From 757e7cd496ea9da0b94829076c6ce7d3733f0d1c Mon Sep 17 00:00:00 2001 From: ellnix <103502144+ellnix@users.noreply.github.com> Date: Mon, 9 Oct 2023 12:34:25 +0200 Subject: [PATCH 01/12] Soft deprecate bang methods with warning --- lib/meilisearch/client.rb | 5 ++ lib/meilisearch/index.rb | 35 +++++++++++ lib/meilisearch/utils.rb | 107 ++++++++++++++++++--------------- spec/meilisearch/utils_spec.rb | 61 ++++++++++++++----- 4 files changed, 145 insertions(+), 63 deletions(-) diff --git a/lib/meilisearch/client.rb b/lib/meilisearch/client.rb index e6b49186..5b2d9625 100644 --- a/lib/meilisearch/client.rb +++ b/lib/meilisearch/client.rb @@ -41,6 +41,11 @@ def create_index(index_uid, options = {}) # Synchronous version of create_index. # Waits for the task to be achieved, be careful when using it. def create_index!(index_uid, options = {}) + Utils.soft_deprecate( + 'Client#create_index!', + "client.create_index('#{index_uid}').await" + ) + task = create_index(index_uid, options) wait_for_task(task['taskUid']) end diff --git a/lib/meilisearch/index.rb b/lib/meilisearch/index.rb index d0f7df0f..27dd34e9 100644 --- a/lib/meilisearch/index.rb +++ b/lib/meilisearch/index.rb @@ -92,6 +92,11 @@ def add_documents(documents, primary_key = nil) alias add_or_replace_documents add_documents def add_documents!(documents, primary_key = nil) + Utils.soft_deprecate( + 'Index#add_documents!', + 'index.add_documents(...).await' + ) + task = add_documents(documents, primary_key) wait_for_task(task['taskUid']) end @@ -130,6 +135,11 @@ def update_documents(documents, primary_key = nil) alias add_or_update_documents update_documents def update_documents!(documents, primary_key = nil) + Utils.soft_deprecate( + 'Index#update_documents!', + 'index.update_documents(...).await' + ) + task = update_documents(documents, primary_key) wait_for_task(task['taskUid']) end @@ -144,6 +154,11 @@ def add_documents_in_batches(documents, batch_size = 1000, primary_key = nil) end def add_documents_in_batches!(documents, batch_size = 1000, primary_key = nil) + Utils.soft_deprecate( + 'Index#add_documents_in_batches!', + 'index.add_documents_in_batches(...).await' + ) + tasks = add_documents_in_batches(documents, batch_size, primary_key) responses = [] tasks.each do |task_obj| @@ -161,6 +176,11 @@ def update_documents_in_batches(documents, batch_size = 1000, primary_key = nil) end def update_documents_in_batches!(documents, batch_size = 1000, primary_key = nil) + Utils.soft_deprecate( + 'Index#update_documents_in_batches!', + 'index.update_documents_in_batches(...).await' + ) + tasks = update_documents_in_batches(documents, batch_size, primary_key) responses = [] tasks.each do |task_obj| @@ -192,6 +212,11 @@ def delete_documents(options = {}) alias delete_multiple_documents delete_documents def delete_documents!(documents_ids) + Utils.soft_deprecate( + 'Index#delete_documents!', + 'index.delete_documents(...).await' + ) + task = delete_documents(documents_ids) wait_for_task(task['taskUid']) end @@ -204,6 +229,11 @@ def delete_document(document_id) alias delete_one_document delete_document def delete_document!(document_id) + Utils.soft_deprecate( + 'Index#delete_document!', + 'index.delete_document(...).await' + ) + task = delete_document(document_id) wait_for_task(task['taskUid']) end @@ -214,6 +244,11 @@ def delete_all_documents end def delete_all_documents! + Utils.soft_deprecate( + 'Index#delete_all_documents!', + 'index.delete_all_documents(...).await' + ) + task = delete_all_documents wait_for_task(task['taskUid']) end diff --git a/lib/meilisearch/utils.rb b/lib/meilisearch/utils.rb index 83adfac0..f376222b 100644 --- a/lib/meilisearch/utils.rb +++ b/lib/meilisearch/utils.rb @@ -4,70 +4,81 @@ module MeiliSearch module Utils SNAKE_CASE = /[^a-zA-Z0-9]+(.)/ - def self.transform_attributes(body) - case body - when Array - body.map { |item| transform_attributes(item) } - when Hash - warn_on_non_conforming_attribute_names(body) - parse(body) - else - body + class << self + attr_writer :logger + + def logger + @logger ||= Logger.new($stdout) + end + + def soft_deprecate(subject, replacement) + logger.warn("[meilisearch-ruby] #{subject} is DEPRECATED, please use #{replacement} instead.") end - end - def self.parse(body) - body - .transform_keys(&:to_s) - .transform_keys do |key| - key.include?('_') ? key.downcase.gsub(SNAKE_CASE, &:upcase).gsub('_', '') : key + def transform_attributes(body) + case body + when Array + body.map { |item| transform_attributes(item) } + when Hash + warn_on_non_conforming_attribute_names(body) + parse(body) + else + body end - end + end - def self.filter(original_options, allowed_params = []) - original_options.transform_keys(&:to_sym).slice(*allowed_params) - end + def filter(original_options, allowed_params = []) + original_options.transform_keys(&:to_sym).slice(*allowed_params) + end - def self.parse_query(original_options, allowed_params = []) - only_allowed_params = filter(original_options, allowed_params) + def parse_query(original_options, allowed_params = []) + only_allowed_params = filter(original_options, allowed_params) - Utils.transform_attributes(only_allowed_params).then do |body| - body.transform_values do |v| - v.respond_to?(:join) ? v.join(',') : v.to_s + Utils.transform_attributes(only_allowed_params).then do |body| + body.transform_values do |v| + v.respond_to?(:join) ? v.join(',') : v.to_s + end end end - end - def self.message_builder(current_message, method_name) - "#{current_message}\nHint: It might not be working because maybe you're not up " \ - "to date with the Meilisearch version that `#{method_name}` call requires." - end + def version_error_handler(method_name) + yield if block_given? + rescue MeiliSearch::ApiError => e + message = message_builder(e.http_message, method_name) - def self.version_error_handler(method_name) - yield if block_given? - rescue MeiliSearch::ApiError => e - message = message_builder(e.http_message, method_name) + raise MeiliSearch::ApiError.new(e.http_code, message, e.http_body) + rescue StandardError => e + raise e.class, message_builder(e.message, method_name) + end - raise MeiliSearch::ApiError.new(e.http_code, message, e.http_body) - rescue StandardError => e - raise e.class, message_builder(e.message, method_name) - end + def warn_on_non_conforming_attribute_names(body) + return if body.nil? - def self.warn_on_non_conforming_attribute_names(body) - return if body.nil? + non_snake_case = body.keys.grep_v(/^[a-z0-9_]+$/) + return if non_snake_case.empty? - non_snake_case = body.keys.grep_v(/^[a-z0-9_]+$/) - return if non_snake_case.empty? + message = <<~MSG + [meilisearch-ruby] Attributes will be expected to be snake_case in future versions. + [meilisearch-ruby] Non-conforming attributes: #{non_snake_case.join(', ')} + MSG - message = <<~MSG - Attributes will be expected to be snake_case in future versions of Meilisearch Ruby. + logger.warn(message) + end - Non-conforming attributes: #{non_snake_case.join(', ')} - MSG + private - warn(message) - end + def parse(body) + body + .transform_keys(&:to_s) + .transform_keys do |key| + key.include?('_') ? key.downcase.gsub(SNAKE_CASE, &:upcase).gsub('_', '') : key + end + end - private_class_method :parse, :message_builder + def message_builder(current_message, method_name) + "#{current_message}\nHint: It might not be working because maybe you're not up " \ + "to date with the Meilisearch version that `#{method_name}` call requires." + end + end end end diff --git a/spec/meilisearch/utils_spec.rb b/spec/meilisearch/utils_spec.rb index ffeea67a..b555010e 100644 --- a/spec/meilisearch/utils_spec.rb +++ b/spec/meilisearch/utils_spec.rb @@ -1,6 +1,30 @@ # frozen_string_literal: true +require 'logger' RSpec.describe MeiliSearch::Utils do + let(:logger) { instance_double(Logger, warn: nil) } + + describe '.soft_deprecate' do + before(:each) { described_class.logger = logger } + after(:each) { described_class.logger = nil } + + it 'outputs a warning' do + described_class.soft_deprecate('footballs', 'snowballs') + expect(logger).to have_received(:warn) + end + + it 'does not throw an error' do + expect do + described_class.soft_deprecate('footballs', 'snowballs') + end.not_to raise_error + end + + it 'includes relevant information' do + described_class.soft_deprecate('footballs', 'snowballs') + expect(logger).to have_received(:warn).with(a_string_including('footballs', 'snowballs')) + end + end + describe '.parse_query' do it 'transforms arrays into strings' do data = described_class.parse_query({ array: [1, 2, 3], other: 'string' }, [:array, :other]) @@ -22,6 +46,9 @@ end describe '.transform_attributes' do + before(:each) { described_class.logger = logger } + after(:each) { described_class.logger = nil } + it 'transforms snake_case into camelCased keys' do data = described_class.transform_attributes({ index_name: 'books', @@ -49,9 +76,10 @@ it 'warns when using camelCase' do attrs = { distinctAttribute: 'title' } - expect do - described_class.transform_attributes(attrs) - end.to output(include('Attributes will be expected to be snake_case', 'distinctAttribute')).to_stderr + described_class.transform_attributes(attrs) + + expect(logger).to have_received(:warn) + .with(a_string_including('Attributes will be expected to be snake_case', 'distinctAttribute')) end it 'warns when using camelCase in an array' do @@ -60,9 +88,10 @@ { 'indexUid' => 'books', 'q' => 'prince' } ] - expect do - described_class.transform_attributes(attrs) - end.to output(include('Attributes will be expected to be snake_case', 'indexUid')).to_stderr + described_class.transform_attributes(attrs) + + expect(logger).to have_received(:warn) + .with(a_string_including('Attributes will be expected to be snake_case', 'indexUid')) end end @@ -115,28 +144,30 @@ end describe '.warn_on_non_conforming_attribute_names' do + before(:each) { described_class.logger = logger } + after(:each) { described_class.logger = nil } + it 'warns when using camelCase attributes' do attrs = { attributesToHighlight: ['field'] } + described_class.warn_on_non_conforming_attribute_names(attrs) - expect do - described_class.warn_on_non_conforming_attribute_names(attrs) - end.to output(include('Attributes will be expected to be snake_case', 'attributesToHighlight')).to_stderr + expect(logger).to have_received(:warn) + .with(a_string_including('Attributes will be expected to be snake_case', 'attributesToHighlight')) end it 'warns when using a mixed case' do attrs = { distinct_ATTribute: 'title' } + described_class.warn_on_non_conforming_attribute_names(attrs) - expect do - described_class.warn_on_non_conforming_attribute_names(attrs) - end.to output(include('Attributes will be expected to be snake_case', 'distinct_ATTribute')).to_stderr + expect(logger).to have_received(:warn) + .with(a_string_including('Attributes will be expected to be snake_case', 'distinct_ATTribute')) end it 'does not warn when using snake_case' do attrs = { q: 'query', attributes_to_highlight: ['field'] } + described_class.warn_on_non_conforming_attribute_names(attrs) - expect do - described_class.warn_on_non_conforming_attribute_names(attrs) - end.not_to output.to_stderr + expect(logger).not_to have_received(:warn) end end end From 19fa8f446bab391716bea2df99e39aa3badb1a64 Mon Sep 17 00:00:00 2001 From: ellnix <103502144+ellnix@users.noreply.github.com> Date: Mon, 9 Oct 2023 12:37:01 +0200 Subject: [PATCH 02/12] Update specs to use new await syntax --- spec/meilisearch/client/indexes_spec.rb | 66 +++++++++++++------ spec/meilisearch/client/requests_spec.rb | 2 +- spec/meilisearch/client/tasks_spec.rb | 4 +- spec/meilisearch/index/base_spec.rb | 28 ++++---- spec/meilisearch/index/documents_spec.rb | 48 +++++++------- .../index/search/attributes_to_crop_spec.rb | 2 +- .../meilisearch/index/search/vector_search.rb | 4 +- spec/meilisearch/index/settings_spec.rb | 28 ++++---- 8 files changed, 104 insertions(+), 78 deletions(-) diff --git a/spec/meilisearch/client/indexes_spec.rb b/spec/meilisearch/client/indexes_spec.rb index e05a2c08..664db7cb 100644 --- a/spec/meilisearch/client/indexes_spec.rb +++ b/spec/meilisearch/client/indexes_spec.rb @@ -16,17 +16,43 @@ expect(index.primary_key).to be_nil end - it 'creates an index synchronously' do - task = client.create_index!('books') + context 'synchronously' do + context 'using ! method' do + before { allow(Utils).to receive(:soft_deprecate).and_return(nil) } - expect(task['type']).to eq('indexCreation') - expect(task['status']).to eq('succeeded') + it 'creates an index' do + task = client.create_index!('books') - index = client.fetch_index('books') + expect(task['type']).to eq('indexCreation') + expect(task['status']).to eq('succeeded') - expect(index).to be_a(MeiliSearch::Index) - expect(index.uid).to eq('books') - expect(index.primary_key).to be_nil + index = client.fetch_index('books') + + expect(index).to be_a(MeiliSearch::Index) + expect(index.uid).to eq('books') + expect(index.primary_key).to be_nil + end + + it 'warns about deprecation' do + client.create_index!('books') + expect(Utils).to have_received(:soft_deprecate).with('Client#create_index!', a_string_matching(/books/)) + end + end + + context 'using await syntax' do + it 'creates an index' do + task = client.create_index('books').await + + expect(task['type']).to eq('indexCreation') + expect(task['status']).to eq('succeeded') + + index = client.fetch_index('books') + + expect(index).to be_a(MeiliSearch::Index) + expect(index.uid).to eq('books') + expect(index.primary_key).to be_nil + end + end end end @@ -46,7 +72,7 @@ end it 'creates an index synchronously' do - task = client.create_index!('books', primary_key: 'reference_code') + task = client.create_index('books', primary_key: 'reference_code').await expect(task['type']).to eq('indexCreation') expect(task['status']).to eq('succeeded') @@ -96,8 +122,8 @@ context 'when an index with a given uid already exists' do it 'returns a failing task' do - initial_task = client.create_index!('books') - last_task = client.create_index!('books') + initial_task = client.create_index('books').await + last_task = client.create_index('books').await expect(initial_task['type']).to eq('indexCreation') expect(last_task['type']).to eq('indexCreation') @@ -118,7 +144,7 @@ describe '#indexes' do it 'returns MeiliSearch::Index objects' do - client.create_index!('books') + client.create_index('books').await index = client.indexes['results'].first @@ -126,7 +152,7 @@ end it 'gets a list of indexes' do - ['books', 'colors', 'artists'].each { |name| client.create_index!(name) } + ['books', 'colors', 'artists'].each { |name| client.create_index(name).await } indexes = client.indexes['results'] @@ -137,7 +163,7 @@ end it 'paginates indexes list with limit and offset' do - ['books', 'colors', 'artists'].each { |name| client.create_index!(name) } + ['books', 'colors', 'artists'].each { |name| client.create_index(name).await } indexes = client.indexes(limit: 1, offset: 2) @@ -151,7 +177,7 @@ describe '#raw_indexes' do it 'returns raw indexes' do - client.create_index!('index') + client.create_index('index').await response = client.raw_indexes['results'].first @@ -160,7 +186,7 @@ end it 'gets a list of raw indexes' do - ['books', 'colors', 'artists'].each { |name| client.create_index!(name) } + ['books', 'colors', 'artists'].each { |name| client.create_index(name).await } indexes = client.raw_indexes['results'] @@ -173,7 +199,7 @@ describe '#fetch_index' do it 'fetches index by uid' do - client.create_index!('books', primary_key: 'reference_code') + client.create_index('books', primary_key: 'reference_code').await fetched_index = client.fetch_index('books') @@ -186,7 +212,7 @@ describe '#fetch_raw_index' do it 'fetch a specific index raw Hash response based on uid' do - client.create_index!('books', primary_key: 'reference_code') + client.create_index('books', primary_key: 'reference_code').await index = client.fetch_index('books') raw_response = index.fetch_raw_info @@ -202,7 +228,7 @@ describe '#index' do it 'returns an index object with the provided uid' do - client.create_index!('books', primary_key: 'reference_code') + client.create_index('books', primary_key: 'reference_code').await # this index is in memory, without metadata from server index = client.index('books') @@ -219,7 +245,7 @@ describe '#delete_index' do context 'when the index exists' do it 'deletes the index' do - client.create_index!('books') + client.create_index('books').await task = client.delete_index('books') expect(task['type']).to eq('indexDeletion') diff --git a/spec/meilisearch/client/requests_spec.rb b/spec/meilisearch/client/requests_spec.rb index 4d551315..35c01bea 100644 --- a/spec/meilisearch/client/requests_spec.rb +++ b/spec/meilisearch/client/requests_spec.rb @@ -10,7 +10,7 @@ end it 'parses options when they are in a snake_case' do - client.create_index!(key, primary_key: key) + client.create_index(key, primary_key: key).await index = client.fetch_index(key) expect(index.uid).to eq(key) diff --git a/spec/meilisearch/client/tasks_spec.rb b/spec/meilisearch/client/tasks_spec.rb index 8e42d9de..5d646b3a 100644 --- a/spec/meilisearch/client/tasks_spec.rb +++ b/spec/meilisearch/client/tasks_spec.rb @@ -5,7 +5,7 @@ let(:enqueued_task_keys) { ['uid', 'indexUid', 'status', 'type', 'enqueuedAt'] } let(:succeeded_task_keys) { [*enqueued_task_keys, 'details', 'duration', 'startedAt', 'finishedAt'] } - let!(:doc_addition_task) { index.add_documents!(documents) } + let!(:doc_addition_task) { index.add_documents(documents).await } let(:task_uid) { doc_addition_task['uid'] } it 'gets a task of an index' do @@ -123,7 +123,7 @@ describe '#client.wait_for_task' do it 'waits for task with default values' do - task = index.add_documents!(documents) + task = index.add_documents(documents).await task = client.wait_for_task(task['taskUid']) expect(task).to be_a(Hash) diff --git a/spec/meilisearch/index/base_spec.rb b/spec/meilisearch/index/base_spec.rb index 026c843a..4d588b15 100644 --- a/spec/meilisearch/index/base_spec.rb +++ b/spec/meilisearch/index/base_spec.rb @@ -2,7 +2,7 @@ RSpec.describe MeiliSearch::Index do it 'fetch the info of the index' do - client.create_index!('books') + client.create_index('books').await index = client.fetch_index('books') expect(index).to be_a(MeiliSearch::Index) @@ -15,7 +15,7 @@ end it 'fetch the raw Hash info of the index' do - client.create_index!('books', primary_key: 'reference_number') + client.create_index('books', primary_key: 'reference_number').await raw_index = client.fetch_raw_index('books') @@ -29,7 +29,7 @@ end it 'get primary-key of index if null' do - client.create_index!('index_without_primary_key') + client.create_index('index_without_primary_key').await index = client.fetch_index('index_without_primary_key') expect(index.primary_key).to be_nil @@ -37,7 +37,7 @@ end it 'get primary-key of index if it exists' do - client.create_index!('index_with_prirmary_key', primary_key: 'primary_key') + client.create_index('index_with_prirmary_key', primary_key: 'primary_key').await index = client.fetch_index('index_with_prirmary_key') expect(index.primary_key).to eq('primary_key') @@ -45,14 +45,14 @@ end it 'get uid of index' do - client.create_index!('uid') + client.create_index('uid').await index = client.fetch_index('uid') expect(index.uid).to eq('uid') end it 'updates primary-key of index if not defined before' do - client.create_index!('uid') + client.create_index('uid').await task = client.index('uid').update(primary_key: 'new_primary_key') expect(task['type']).to eq('indexUpdate') @@ -70,7 +70,7 @@ end it 'updates primary-key of index if has been defined before but there is not docs' do - client.create_index!('books', primary_key: 'reference_number') + client.create_index('books', primary_key: 'reference_number').await task = client.index('books').update(primary_key: 'international_standard_book_number') expect(task['type']).to eq('indexUpdate') @@ -89,7 +89,7 @@ it 'returns a failing task if primary-key is already defined' do index = client.index('uid') - index.add_documents!({ id: 1, title: 'My Title' }) + index.add_documents({ id: 1, title: 'My Title' }).await task = index.update(primary_key: 'new_primary_key') expect(task['type']).to eq('indexUpdate') @@ -107,7 +107,7 @@ } new_client = MeiliSearch::Client.new(URL, MASTER_KEY, options) - new_client.create_index!('books') + new_client.create_index('books').await index = new_client.fetch_index('books') expect(index.options).to eq({ max_retries: 1, timeout: 2, convert_body?: true }) @@ -135,7 +135,7 @@ } new_client = MeiliSearch::Client.new(URL, MASTER_KEY, options) - new_client.create_index!('books') + new_client.create_index('books').await index = new_client.fetch_index('books') expect(index.options).to eq(options.merge({ convert_body?: true })) @@ -155,7 +155,7 @@ end it 'deletes index' do - client.create_index!('uid') + client.create_index('uid').await task = client.index('uid').delete expect(task['type']).to eq('indexDeletion') @@ -165,7 +165,7 @@ end it 'fails to manipulate index object after deletion' do - client.create_index!('uid') + client.create_index('uid').await task = client.index('uid').delete expect(task['type']).to eq('indexDeletion') @@ -177,7 +177,7 @@ end it 'works with method aliases' do - client.create_index!('uid', primary_key: 'primary_key') + client.create_index('uid', primary_key: 'primary_key').await index = client.fetch_index('uid') expect(index.method(:fetch_primary_key) == index.method(:get_primary_key)).to be_truthy @@ -187,7 +187,7 @@ context 'with snake_case options' do it 'does the request with camelCase attributes' do - client.create_index!('uid') + client.create_index('uid').await task = client.index('uid').update(primary_key: 'new_primary_key') expect(task['type']).to eq('indexUpdate') diff --git a/spec/meilisearch/index/documents_spec.rb b/spec/meilisearch/index/documents_spec.rb index b4dab437..ec426f2a 100644 --- a/spec/meilisearch/index/documents_spec.rb +++ b/spec/meilisearch/index/documents_spec.rb @@ -107,7 +107,7 @@ end it 'adds documents synchronously (as an array of documents)' do - task = index.add_documents!(documents) + task = index.add_documents(documents).await expect(task).to have_key('status') expect(task['status']).not_to eql('enqueued') @@ -116,7 +116,7 @@ end it 'adds document batches synchronously (as an array of documents)' do - task = index.add_documents_in_batches!(documents, 5) + task = index.add_documents_in_batches(documents, 5).await expect(task).to be_a(Array) expect(task.count).to eq(2) # 2 batches, since we start with 5 < documents.count <= 10 documents task.each do |task_object| @@ -129,13 +129,13 @@ end it 'infers order of fields' do - index.add_documents!(documents) + index.add_documents(documents).await task = index.document(1) expect(task.keys).to eq(['objectId', 'title', 'comment']) end it 'slices response fields' do - index.add_documents!(documents) + index.add_documents(documents).await task = index.document(1, fields: ['title']) @@ -143,7 +143,7 @@ end it 'infers primary-key attribute' do - index.add_documents!(documents) + index.add_documents(documents).await expect(index.fetch_primary_key).to eq('objectId') end @@ -158,10 +158,10 @@ it 'adds only one document to index (as an hash of one document)' do new_doc = { objectId: 30, title: 'Hamlet' } - client.create_index!('books') + client.create_index('books').await new_index = client.index('books') expect do - new_index.add_documents!(new_doc) + new_index.add_documents(new_doc).await expect(new_index.document(30)['title']).to eq('Hamlet') end.to(change { new_index.documents['results'].length }.by(1)) @@ -169,10 +169,10 @@ it 'adds only one document synchronously to index (as an hash of one document)' do new_doc = { objectId: 30, title: 'Hamlet' } - client.create_index!('books') + client.create_index('books').await new_index = client.index('books') expect do - task = new_index.add_documents!(new_doc) + task = new_index.add_documents(new_doc).await expect(task).to have_key('status') expect(task['status']).to eq('succeeded') @@ -181,14 +181,14 @@ end it 'fails to add document with bad primary-key format' do - index.add_documents!(documents) + index.add_documents(documents).await task = index.add_documents(objectId: 'toto et titi', title: 'Unknown') client.wait_for_task(task['taskUid']) expect(index.task(task['taskUid'])['status']).to eq('failed') end it 'fails to add document with no primary-key' do - index.add_documents!(documents) + index.add_documents(documents).await task = index.add_documents(id: 0, title: 'Unknown') client.wait_for_task(task['taskUid']) expect(index.task(task['taskUid'])['status']).to eq('failed') @@ -198,10 +198,10 @@ enable_vector_store(true) new_doc = { objectId: 123, _vectors: [0.1, 0.2, 0.3] } - client.create_index!('vector_test') + client.create_index('vector_test').await new_index = client.index('vector_test') expect do - new_index.add_documents!(new_doc) + new_index.add_documents(new_doc).await end.to(change { new_index.documents['results'].length }.by(1)) expect(new_index.document(123)).to have_key('_vectors') expect(new_index.document(123)['_vectors']).to be_a(Array) @@ -270,7 +270,7 @@ end describe 'updating documents' do - before { index.add_documents!(documents) } + before { index.add_documents(documents).await } it 'updates documents in index (as an array of documents)' do id1 = 123 @@ -297,7 +297,7 @@ { objectId: id1, title: 'Sense and Sensibility' }, { objectId: id2, title: 'The Little Prince' } ] - task = index.update_documents!(updated_documents) + task = index.update_documents(updated_documents).await expect(task).to have_key('status') expect(task['status']).not_to eql('enqueued') @@ -318,7 +318,7 @@ { objectId: id1, title: 'Sense and Sensibility' }, { objectId: id2, title: 'The Little Prince' } ] - task = index.update_documents_in_batches!(updated_documents, 1) + task = index.update_documents_in_batches(updated_documents, 1).await expect(task).to be_a(Array) expect(task.count).to eq(2) # 2 batches, since we have two items with batch size 1 task.each do |task_object| @@ -351,7 +351,7 @@ it 'updates one document synchronously in index (as an hash of one document)' do id = 123 updated_document = { objectId: id, title: 'Emma' } - task = index.update_documents!(updated_document) + task = index.update_documents(updated_document).await expect(task).to have_key('status') expect(task['status']).not_to eql('enqueued') @@ -389,7 +389,7 @@ end describe 'deleting documents' do - before { index.add_documents!(documents) } + before { index.add_documents(documents).await } it 'deletes one document from index' do id = 456 @@ -402,7 +402,7 @@ it 'deletes one document synchronously from index' do id = 456 - task = index.delete_document!(id) + task = index.delete_document(id).await expect(task).to have_key('status') expect(task['status']).not_to eql('enqueued') @@ -449,7 +449,7 @@ it 'deletes one document synchronously from index (with delete-batch route)' do id = 2 expect do - task = index.delete_documents!(id) + task = index.delete_documents(id).await expect(task['status']).not_to eql('enqueued') expect(task['status']).to eql('succeeded') @@ -469,7 +469,7 @@ it 'deletes one document synchronously from index (with delete-batch route as an array of one uid)' do id = 123 expect do - task = index.delete_documents!([id]) + task = index.delete_documents([id]).await expect(task['status']).not_to eql('enqueued') expect(task['status']).to eql('succeeded') @@ -488,7 +488,7 @@ it 'deletes multiples documents synchronously from index' do docs_to_delete = [1, 4] expect do - task = index.delete_documents!(docs_to_delete) + task = index.delete_documents(docs_to_delete).await expect(task['status']).not_to eql('enqueued') expect(task['status']).to eql('succeeded') @@ -543,7 +543,7 @@ end it 'does not take into account the new primary key' do - index.add_documents!(documents, 'unique') + index.add_documents(documents, 'unique').await task = index.update_documents({ unique: 3, id: 1, @@ -591,7 +591,7 @@ end it 'Impossible to push docs if the pk is missing' do - task = index.add_documents!(documents) + task = index.add_documents(documents).await update = index.task(task['uid']) expect(update['status']).to eq('failed') expect(update['error']['code']).to eq('index_primary_key_no_candidate_found') diff --git a/spec/meilisearch/index/search/attributes_to_crop_spec.rb b/spec/meilisearch/index/search/attributes_to_crop_spec.rb index 652f60c6..0e5c201b 100644 --- a/spec/meilisearch/index/search/attributes_to_crop_spec.rb +++ b/spec/meilisearch/index/search/attributes_to_crop_spec.rb @@ -10,7 +10,7 @@ } end - before { index.add_documents!(document) } + before { index.add_documents(document).await } it 'searches with default cropping params' do response = index.search('galaxy', attributes_to_crop: ['*'], crop_length: 6) diff --git a/spec/meilisearch/index/search/vector_search.rb b/spec/meilisearch/index/search/vector_search.rb index 3b1f8564..b3d756cf 100644 --- a/spec/meilisearch/index/search/vector_search.rb +++ b/spec/meilisearch/index/search/vector_search.rb @@ -10,9 +10,9 @@ { objectId: 2, _vectors: [0.5, 3, 1], title: 'And Your Bird Can Sing' } ] - client.create_index!('vector_test_search') + client.create_index('vector_test_search').await new_index = client.index('vector_test_search') - new_index.add_documents!(documents) + new_index.add_documents(documents).await expect(new_index.search('q', vector: [0, 1, 2])['hits']).not_to be_empty end diff --git a/spec/meilisearch/index/settings_spec.rb b/spec/meilisearch/index/settings_spec.rb index 488c6eab..c9fce495 100644 --- a/spec/meilisearch/index/settings_spec.rb +++ b/spec/meilisearch/index/settings_spec.rb @@ -39,7 +39,7 @@ context 'On global settings routes' do let(:index) { client.index(uid) } - before { client.create_index!(uid) } + before { client.create_index(uid).await } it 'gets default values of settings' do settings = index.settings @@ -112,7 +112,7 @@ let(:ranking_rules) { ['title:asc', 'words', 'typo'] } let(:wrong_ranking_rules) { ['title:asc', 'typos'] } - before { client.create_index!(uid) } + before { client.create_index(uid).await } it 'gets default values of ranking rules' do settings = index.ranking_rules @@ -163,7 +163,7 @@ let(:distinct_attribute) { 'title' } it 'gets default values of distinct attribute' do - client.create_index!(uid) + client.create_index(uid).await settings = index.distinct_attribute expect(settings).to be_nil @@ -207,7 +207,7 @@ let(:index) { client.index(uid) } let(:searchable_attributes) { ['title', 'description'] } - before { client.create_index!(uid) } + before { client.create_index(uid).await } it 'gets default values of searchable attributes' do settings = index.searchable_attributes @@ -254,7 +254,7 @@ let(:index) { client.index(uid) } let(:displayed_attributes) { ['title', 'description'] } - before { client.create_index!(uid) } + before { client.create_index(uid).await } it 'gets default values of displayed attributes' do settings = index.displayed_attributes @@ -308,7 +308,7 @@ } end - before { client.create_index!(uid) } + before { client.create_index(uid).await } it 'gets an empty hash of synonyms by default' do settings = index.synonyms @@ -374,7 +374,7 @@ let(:stop_words_array) { ['the', 'of'] } let(:stop_words_string) { 'a' } - before { client.create_index!(uid) } + before { client.create_index(uid).await } it 'gets an empty array when there is no stop-words' do settings = index.stop_words @@ -448,7 +448,7 @@ let(:index) { client.index(uid) } let(:filterable_attributes) { ['title', 'description'] } - before { client.create_index!(uid) } + before { client.create_index(uid).await } it 'gets default values of filterable attributes' do settings = index.filterable_attributes @@ -496,7 +496,7 @@ let(:index) { client.index(uid) } let(:sortable_attributes) { ['title', 'description'] } - before { client.create_index!(uid) } + before { client.create_index(uid).await } it 'gets default values of sortable attributes' do settings = index.sortable_attributes @@ -543,7 +543,7 @@ context 'Index with primary-key' do let(:index) { client.index(uid) } - before { client.create_index!(uid, primary_key: 'id') } + before { client.create_index(uid, primary_key: 'id').await } it 'gets the default values of settings' do settings = index.settings @@ -651,7 +651,7 @@ context 'Aliases' do let(:index) { client.index(uid) } - before { client.create_index!(uid) } + before { client.create_index(uid).await } it 'works with method aliases' do expect(index.method(:settings) == index.method(:get_settings)).to be_truthy @@ -675,7 +675,7 @@ def update_synonyms(index, synonyms) let(:index) { client.index(uid) } let(:pagination) { { maxTotalHits: 3141 } } - before { client.create_index!(uid) } + before { client.create_index(uid).await } it 'gets default values of pagination' do settings = index.pagination.transform_keys(&:to_sym) @@ -739,7 +739,7 @@ def update_synonyms(index, synonyms) } end - before { client.create_index!(uid) } + before { client.create_index(uid).await } it 'gets default typo tolerance settings' do settings = index.typo_tolerance @@ -769,7 +769,7 @@ def update_synonyms(index, synonyms) let(:index) { client.index(uid) } let(:default_faceting) { { maxValuesPerFacet: 100, sortFacetValuesBy: { '*' => 'alpha' } } } - before { client.create_index!(uid) } + before { client.create_index(uid).await } it 'gets default values of faceting' do settings = index.faceting.transform_keys(&:to_sym) From 78f7311a8cf916f5efa2a35022a4964d713f1d1e Mon Sep 17 00:00:00 2001 From: ellnix <103502144+ellnix@users.noreply.github.com> Date: Mon, 9 Oct 2023 14:54:06 +0200 Subject: [PATCH 03/12] Write Model::Task and its tests --- .rubocop_todo.yml | 10 +- lib/meilisearch.rb | 1 + lib/meilisearch/client.rb | 4 +- lib/meilisearch/index.rb | 45 +- lib/meilisearch/models/task.rb | 158 ++++++ lib/meilisearch/utils.rb | 14 + .../index/search/attributes_to_crop_spec.rb | 2 +- spec/meilisearch/models/task_spec.rb | 521 ++++++++++++++++++ spec/meilisearch/utils_spec.rb | 1 - 9 files changed, 728 insertions(+), 28 deletions(-) create mode 100644 lib/meilisearch/models/task.rb create mode 100644 spec/meilisearch/models/task_spec.rb diff --git a/.rubocop_todo.yml b/.rubocop_todo.yml index 734c395b..afda34df 100644 --- a/.rubocop_todo.yml +++ b/.rubocop_todo.yml @@ -1,21 +1,21 @@ # This configuration was generated by # `rubocop --auto-gen-config` -# on 2024-01-16 21:52:52 UTC using RuboCop version 1.50.2. +# on 2024-02-16 17:57:26 UTC using RuboCop version 1.50.2. # The point is for the user to remove these configuration records # one by one as the offenses are removed from the code base. # Note that changes in the inspected code, or installation of new # versions of RuboCop, may require this file to be generated again. -# Offense count: 55 +# Offense count: 64 # Configuration parameters: CountComments, CountAsOne, AllowedMethods, AllowedPatterns. # AllowedMethods: refine Metrics/BlockLength: - Max: 694 + Max: 693 -# Offense count: 2 +# Offense count: 4 # Configuration parameters: CountComments, CountAsOne. Metrics/ClassLength: - Max: 373 + Max: 401 # Offense count: 1 # Configuration parameters: Max, CountKeywordArgs. diff --git a/lib/meilisearch.rb b/lib/meilisearch.rb index 2ba15581..f7a58e49 100644 --- a/lib/meilisearch.rb +++ b/lib/meilisearch.rb @@ -2,6 +2,7 @@ require 'meilisearch/version' require 'meilisearch/utils' +require 'meilisearch/models/task' require 'meilisearch/http_request' require 'meilisearch/multi_search' require 'meilisearch/tenant_token' diff --git a/lib/meilisearch/client.rb b/lib/meilisearch/client.rb index 5b2d9625..ff682687 100644 --- a/lib/meilisearch/client.rb +++ b/lib/meilisearch/client.rb @@ -35,7 +35,9 @@ def indexes(options = {}) def create_index(index_uid, options = {}) body = Utils.transform_attributes(options.merge(uid: index_uid)) - http_post '/indexes', body + response = http_post '/indexes', body + + Model::Task.new(response, task_endpoint) end # Synchronous version of create_index. diff --git a/lib/meilisearch/index.rb b/lib/meilisearch/index.rb index 27dd34e9..eeaa4ea7 100644 --- a/lib/meilisearch/index.rb +++ b/lib/meilisearch/index.rb @@ -86,7 +86,9 @@ def documents(options = {}) def add_documents(documents, primary_key = nil) documents = [documents] if documents.is_a?(Hash) - http_post "/indexes/#{@uid}/documents", documents, { primaryKey: primary_key }.compact + response = http_post "/indexes/#{@uid}/documents", documents, { primaryKey: primary_key }.compact + + Model::Task.new(response, task_endpoint) end alias replace_documents add_documents alias add_or_replace_documents add_documents @@ -130,7 +132,9 @@ def add_documents_csv(documents, primary_key = nil, delimiter = nil) def update_documents(documents, primary_key = nil) documents = [documents] if documents.is_a?(Hash) - http_put "/indexes/#{@uid}/documents", documents, { primaryKey: primary_key }.compact + response = http_put "/indexes/#{@uid}/documents", documents, { primaryKey: primary_key }.compact + + Model::Task.new(response, task_endpoint) end alias add_or_update_documents update_documents @@ -146,11 +150,9 @@ def update_documents!(documents, primary_key = nil) alias add_or_update_documents! update_documents! def add_documents_in_batches(documents, batch_size = 1000, primary_key = nil) - tasks = [] - documents.each_slice(batch_size) do |batch| - tasks.append(add_documents(batch, primary_key)) + documents.each_slice(batch_size).map do |batch| + add_documents(batch, primary_key) end - tasks end def add_documents_in_batches!(documents, batch_size = 1000, primary_key = nil) @@ -168,11 +170,9 @@ def add_documents_in_batches!(documents, batch_size = 1000, primary_key = nil) end def update_documents_in_batches(documents, batch_size = 1000, primary_key = nil) - tasks = [] - documents.each_slice(batch_size) do |batch| - tasks.append(update_documents(batch, primary_key)) + documents.each_slice(batch_size).map do |batch| + update_documents(batch, primary_key) end - tasks end def update_documents_in_batches!(documents, batch_size = 1000, primary_key = nil) @@ -198,15 +198,17 @@ def update_documents_in_batches!(documents, batch_size = 1000, primary_key = nil # Returns a Task object. def delete_documents(options = {}) Utils.version_error_handler(__method__) do - if options.is_a?(Hash) && options.key?(:filter) - http_post "/indexes/#{@uid}/documents/delete", options - else - # backwards compatibility: - # expect to be a array or/number/string to send alongside as documents_ids. - options = [options] unless options.is_a?(Array) + response = if options.is_a?(Hash) && options.key?(:filter) + http_post "/indexes/#{@uid}/documents/delete", options + else + # backwards compatibility: + # expect to be a array or/number/string to send alongside as documents_ids. + options = [options] unless options.is_a?(Array) - http_post "/indexes/#{@uid}/documents/delete-batch", options - end + http_post "/indexes/#{@uid}/documents/delete-batch", options + end + + Model::Task.new(response, task_endpoint) end end alias delete_multiple_documents delete_documents @@ -224,7 +226,9 @@ def delete_documents!(documents_ids) def delete_document(document_id) encode_document = URI.encode_www_form_component(document_id) - http_delete "/indexes/#{@uid}/documents/#{encode_document}" + response = http_delete "/indexes/#{@uid}/documents/#{encode_document}" + + Model::Task.new(response, task_endpoint) end alias delete_one_document delete_document @@ -240,7 +244,8 @@ def delete_document!(document_id) alias delete_one_document! delete_document! def delete_all_documents - http_delete "/indexes/#{@uid}/documents" + response = http_delete "/indexes/#{@uid}/documents" + Model::Task.new(response, task_endpoint) end def delete_all_documents! diff --git a/lib/meilisearch/models/task.rb b/lib/meilisearch/models/task.rb new file mode 100644 index 00000000..336d2cc0 --- /dev/null +++ b/lib/meilisearch/models/task.rb @@ -0,0 +1,158 @@ +# frozen_string_literal: true + +require 'forwardable' + +module MeiliSearch + module Model + class Task + extend Forwardable + + # Maintain backwards compatibility with task hash return type + def_delegators :metadata, :[], :dig, :keys, :key?, :has_key? + + attr_reader :metadata + + def initialize(metadata_hash, task_endpoint) + self.metadata = metadata_hash + validate_required_fields! metadata + + @task_endpoint = task_endpoint + end + + def uid + @metadata['taskUid'] + end + + def type + @metadata['type'] + end + + def status + @metadata['status'] + end + + def enqueued? + refresh if status_enqueued? + + status_enqueued? + end + + def processing? + refresh if status_processing? || status_enqueued? + + status_processing? + end + + def unfinished? + refresh if status_processing? || status_enqueued? + + status_processing? || status_enqueued? + end + alias waiting? unfinished? + + def finished? + !unfinished? + end + + def succeeded? + Utils.warn_on_unfinished_task(self) if unfinished? + + status == 'succeeded' + end + + def failed? + Utils.warn_on_unfinished_task(self) if unfinished? + + status == 'failed' + end + + def cancelled? + Utils.warn_on_unfinished_task(self) if unfinished? + + status_cancelled? + end + + def deleted? + refresh unless @deleted + + !!@deleted + end + + def error + @metadata['error'] + end + + def refresh(with: nil) + self.metadata = with || @task_endpoint.task(uid) + + self + rescue MeiliSearch::ApiError => e + raise e unless e.http_code == 404 + + @deleted = true + + self + end + + def await(timeout_in_ms = 5000, interval_in_ms = 50) + refresh with: @task_endpoint.wait_for_task(uid, timeout_in_ms, interval_in_ms) unless finished? + + self + end + + def cancel + return true if status_cancelled? + return false if status_finished? + + @task_endpoint.cancel_tasks(uids: [uid]).await + + cancelled? + end + + def delete + return false unless status_finished? + + @task_endpoint.delete_tasks(uids: [uid]).await + + deleted? + end + + def to_h + @metadata + end + alias to_hash to_h + + private + + def validate_required_fields!(task_hash) + raise ArgumentError, 'Cannot instantiate a task without an ID' unless task_hash['taskUid'] + raise ArgumentError, 'Cannot instantiate a task without a type' unless task_hash['type'] + raise ArgumentError, 'Cannot instantiate a task without a status' unless task_hash['status'] + end + + def status_enqueued? + status == 'enqueued' + end + + def status_processing? + status == 'processing' + end + + def status_finished? + ['succeeded', 'failed', 'cancelled'].include? status + end + + def status_cancelled? + status == 'cancelled' + end + + def metadata=(metadata) + @metadata = metadata + + uid = @metadata['taskUid'] || @metadata['uid'] + @metadata['uid'] = uid + @metadata['taskUid'] = uid + end + end + end +end diff --git a/lib/meilisearch/utils.rb b/lib/meilisearch/utils.rb index f376222b..b6652b27 100644 --- a/lib/meilisearch/utils.rb +++ b/lib/meilisearch/utils.rb @@ -1,5 +1,7 @@ # frozen_string_literal: true +require 'logger' + module MeiliSearch module Utils SNAKE_CASE = /[^a-zA-Z0-9]+(.)/ @@ -15,6 +17,18 @@ def soft_deprecate(subject, replacement) logger.warn("[meilisearch-ruby] #{subject} is DEPRECATED, please use #{replacement} instead.") end + def warn_on_unfinished_task(task_uid) + message = <<~UNFINISHED_TASK_WARNING + [meilisearch-ruby] Task #{task_uid}'s finished state (succeeded?/failed?/cancelled?) is being checked before finishing. + [meilisearch-ruby] Tasks in meilisearch are processed in the background asynchronously. + [meilisearch-ruby] Please use the #finished? method to check if the task is finished or the #await method to wait for the task to finish. + UNFINISHED_TASK_WARNING + + message.lines.each do |line| + logger.warn(line) + end + end + def transform_attributes(body) case body when Array diff --git a/spec/meilisearch/index/search/attributes_to_crop_spec.rb b/spec/meilisearch/index/search/attributes_to_crop_spec.rb index 0e5c201b..71d70bd9 100644 --- a/spec/meilisearch/index/search/attributes_to_crop_spec.rb +++ b/spec/meilisearch/index/search/attributes_to_crop_spec.rb @@ -53,7 +53,7 @@ expect(response['hits'].first['_formatted']['description']).to eq('…Guide to the Galaxy is a…') end - it 'does a placehodler search with attributes to crop' do + it 'does a placeholder search with attributes to crop' do response = index.search('', { attributes_to_crop: ['description'], crop_length: 5 }) expect(response['hits'].first).to have_key('_formatted') expect(response['hits'].first['description']).to eq(document[:description]) diff --git a/spec/meilisearch/models/task_spec.rb b/spec/meilisearch/models/task_spec.rb new file mode 100644 index 00000000..3138b479 --- /dev/null +++ b/spec/meilisearch/models/task_spec.rb @@ -0,0 +1,521 @@ +# frozen_string_literal: true + +describe MeiliSearch::Model::Task do + let(:new_index_uid) { random_uid } + let(:task_hash) { client.http_post '/indexes', { 'uid' => new_index_uid } } + let(:endpoint) { MeiliSearch::Task.new(URL, MASTER_KEY, client.options) } + + subject { described_class.new task_hash, endpoint } + + let(:enqueued_endpoint) { instance_double(MeiliSearch::Task, task: task_hash) } + let(:enqueued_task) { described_class.new task_hash, enqueued_endpoint } + + let(:processing_endpoint) { instance_double(MeiliSearch::Task, task: task_hash.update('status' => 'processing')) } + let(:processing_task) { described_class.new task_hash, processing_endpoint } + + let(:logger) { instance_double(Logger, warn: nil) } + before { MeiliSearch::Utils.logger = logger } + after { MeiliSearch::Utils.logger = nil } + + describe '.initialize' do + it 'requires a uid in the task hash' do + task_hash.delete 'taskUid' + + expect { subject }.to raise_error(ArgumentError) + end + + it 'requires a type in the task hash' do + task_hash.delete 'type' + + expect { subject }.to raise_error(ArgumentError) + end + + it 'requires a status in the task hash' do + task_hash.delete 'status' + + expect { subject }.to raise_error(ArgumentError) + end + + it 'sets "taskUid" key when given a "uid"' do + expect(subject).to have_key('uid') + end + + it 'sets "uid" key when given a "taskUid"' do + task_hash['uid'] = task_hash.delete 'taskUid' + + expect(subject).to have_key('taskUid') + end + end + + describe 'forwarding' do + it 'allows for direct reading internal hash' do + subject + + task_hash.each do |key, value| + expect(subject[key]).to eq(value) + end + end + end + + describe '#enqueued?' do + context 'if the task is processing' do + before { task_hash['status'] = 'processing' } + + it 'returns false' do + expect(subject).not_to be_enqueued + end + + it 'does not refresh the task' do + allow(subject).to receive(:refresh) + subject.enqueued? + expect(subject).not_to have_received(:refresh) + end + end + + context 'if the task has succeeded' do + before { task_hash['status'] = 'succeeded' } + + it 'returns false' do + expect(subject).not_to be_enqueued + end + + it 'does not refresh the task' do + allow(subject).to receive(:refresh) + subject.enqueued? + expect(subject).not_to have_received(:refresh) + end + end + + context 'if the task has failed' do + before { task_hash['status'] = 'failed' } + + it 'returns false' do + expect(subject).not_to be_enqueued + end + + it 'does not refresh the task' do + allow(subject).to receive(:refresh) + subject.enqueued? + expect(subject).not_to have_received(:refresh) + end + end + + it 'returns true if the task is enqueued' do + expect(enqueued_task).to be_enqueued + end + + context 'if the task has succeeded but not updated' do + let(:successful_task_hash) { task_hash.merge('status' => 'succeeded') } + let(:endpoint) { instance_double(MeiliSearch::Task, task: successful_task_hash) } + + it 'refreshes and returns false' do + expect(subject).not_to be_enqueued + end + end + end + + describe '#processing?' do + context 'if the task has succeeded' do + before { task_hash['status'] = 'succeeded' } + + it 'returns false' do + expect(subject).not_to be_processing + end + + it 'does not refresh the task' do + allow(subject).to receive(:refresh) + subject.processing? + expect(subject).not_to have_received(:refresh) + end + end + + context 'if the task has failed' do + before { task_hash['status'] = 'failed' } + + it 'returns false' do + expect(subject).not_to be_processing + end + + it 'does not refresh the task' do + allow(subject).to receive(:refresh) + subject.processing? + expect(subject).not_to have_received(:refresh) + end + end + + it 'returns false if the task has not begun to process' do + expect(enqueued_task).not_to be_processing + end + + it 'returns true if the task is processing' do + expect(processing_task).to be_processing + end + + context 'if the task has begun processing but has not updated' do + let(:endpoint) { instance_double(MeiliSearch::Task, task: task_hash.merge('status' => 'processing')) } + + it 'refreshes and returns true' do + expect(subject).to be_processing + end + end + + context 'if the task has succeeded but not updated' do + let(:successful_task_hash) { task_hash.merge('status' => 'succeeded') } + let(:endpoint) { instance_double(MeiliSearch::Task, task: successful_task_hash) } + + it 'refreshes and returns false' do + expect(subject).not_to be_enqueued + end + end + end + + describe '#unfinished?' do + it 'returns false if the task has succeeded' do + task_hash['status'] = 'succeeded' + expect(subject).not_to be_unfinished + end + + it 'returns false if the task has failed' do + task_hash['status'] = 'failed' + expect(subject).not_to be_unfinished + end + + it 'returns true if the task is enqueued' do + expect(enqueued_task).to be_unfinished + end + + it 'returns true if the task is processing' do + expect(processing_task).to be_unfinished + end + + context 'if the task has succeeded but not updated' do + let(:successful_task_hash) { task_hash.merge('status' => 'succeeded') } + let(:endpoint) { instance_double(MeiliSearch::Task, task: successful_task_hash) } + + it 'refreshes and returns false' do + expect(subject).not_to be_unfinished + end + end + end + + describe '#finished?' do + it 'returns true if the task has succeeded' do + task_hash['status'] = 'succeeded' + expect(subject).to be_finished + end + + it 'returns true if the task has failed' do + task_hash['status'] = 'failed' + expect(subject).to be_finished + end + + it 'returns false if the task is enqueued' do + expect(enqueued_task).not_to be_finished + end + + it 'returns false if the task is processing' do + expect(processing_task).not_to be_finished + end + + context 'if the task has succeeded but not updated' do + let(:successful_task_hash) { task_hash.merge('status' => 'succeeded') } + let(:endpoint) { instance_double(MeiliSearch::Task, task: successful_task_hash) } + + it 'refreshes and returns true' do + expect(subject).to be_finished + end + end + end + + describe '#failed?' do + it 'returns false if the task has succeeded or been cancelled' do + task_hash['status'] = 'succeeded' + expect(subject).not_to be_failed + task_hash['status'] = 'cancelled' + expect(subject).not_to be_failed + end + + it 'returns true if the task has failed' do + task_hash['status'] = 'failed' + expect(subject).to be_failed + end + + context 'if the task is not finished' do + let(:endpoint) { instance_double(MeiliSearch::Task, task: task_hash) } + + it 'returns false' do + expect(subject).not_to be_failed + end + + it 'warns that the task is not finished' do + subject.failed? + + expect(logger).to have_received(:warn).with(a_string_including('checked before finishing')) + end + end + + context 'if the task has failed but not updated' do + let(:failed_task_hash) { task_hash.merge('status' => 'failed') } + let(:endpoint) { instance_double(MeiliSearch::Task, task: failed_task_hash) } + + it 'refreshes and returns true' do + expect(subject).to be_failed + end + end + end + + describe '#succeeded?' do + it 'returns true if the task has succeeded' do + task_hash['status'] = 'succeeded' + expect(subject).to be_succeeded + end + + it 'returns false if the task has failed or been cancelled' do + task_hash['status'] = 'failed' + expect(subject).not_to be_succeeded + task_hash['status'] = 'cancelled' + expect(subject).not_to be_succeeded + end + + context 'if the task is not finished' do + let(:endpoint) { instance_double(MeiliSearch::Task, task: task_hash) } + + it 'returns false' do + expect(subject).not_to be_succeeded + end + + it 'warns that the task is not finished' do + subject.succeeded? + + expect(logger).to have_received(:warn).with(a_string_including('checked before finishing')) + end + end + + context 'if the task has succeeded but not updated' do + let(:successful_task_hash) { task_hash.merge('status' => 'succeeded') } + let(:endpoint) { instance_double(MeiliSearch::Task, task: successful_task_hash) } + + it 'refreshes and returns true' do + expect(subject).to be_succeeded + end + end + end + + describe '#cancelled?' do + it 'returns false if the task has succeeded or failed' do + task_hash['status'] = 'succeeded' + expect(subject).not_to be_cancelled + task_hash['status'] = 'failed' + expect(subject).not_to be_cancelled + end + + it 'returns true if the task has been cancelled' do + task_hash['status'] = 'cancelled' + expect(subject).to be_cancelled + end + + context 'if the task is not finished' do + let(:endpoint) { instance_double(MeiliSearch::Task, task: task_hash) } + + it 'returns false' do + expect(subject).not_to be_cancelled + end + + it 'warns that the task is not finished' do + subject.cancelled? + + expect(logger).to have_received(:warn).with(a_string_including('checked before finishing')) + end + end + + context 'if the task has failed but not updated' do + let(:cancelled_task_hash) { task_hash.merge('status' => 'cancelled') } + let(:endpoint) { instance_double(MeiliSearch::Task, task: cancelled_task_hash) } + + it 'refreshes and returns true' do + expect(subject).to be_cancelled + end + end + end + + describe '#deleted?' do + let(:not_found_error) { MeiliSearch::ApiError.new(404, '', '') } + let(:endpoint) { instance_double(MeiliSearch::Task, task: task_hash) } + + it 'returns false if the task can be found' do + expect(subject.deleted?).to be(false) # don't just return nil + expect(subject).not_to be_deleted + end + + context 'when it was deleted earlier' do + let(:endpoint) { instance_double(MeiliSearch::Task) } + + before do + allow(endpoint).to receive(:task) { raise not_found_error } + subject.refresh + end + + it 'does not check again' do + subject.deleted? + expect(endpoint).to have_received(:task).once + end + + it 'returns true' do + expect(subject).to be_deleted + end + end + + it 'refreshes and returns true when it is no longer in instance' do + allow(endpoint).to receive(:task) { raise not_found_error } + expect(subject).to be_deleted + end + end + + describe '#cancel' do + context 'if the task is still not finished' do + let(:cancellation_task) { instance_double(described_class, await: nil) } + let(:endpoint) { instance_double(MeiliSearch::Task, task: task_hash, cancel_tasks: cancellation_task) } + + it 'sends a request to cancel itself' do + subject.cancel + expect(endpoint).to have_received(:cancel_tasks) + end + + it 'returns true if the cancellation succeeds' do + task_hash['status'] = 'cancelled' + expect(subject.cancel).to be(true) + end + + it 'returns false if the cancellation fails' do + task_hash['status'] = 'succeeded' + expect(subject.cancel).to be(false) + end + end + + context 'if the task is already finished' do + let(:endpoint) { instance_double(MeiliSearch::Task, task: task_hash, cancel_tasks: nil) } + before { task_hash['status'] = 'succeeded' } + + it 'sends no request' do + subject.cancel + expect(endpoint).not_to have_received(:cancel_tasks) + end + + it 'returns false' do + expect(subject.cancel).to be(false) + end + end + + context 'if the task is already cancelled' do + let(:endpoint) { instance_double(MeiliSearch::Task, task: task_hash, cancel_tasks: nil) } + before { task_hash['status'] = 'cancelled' } + + it 'sends no request' do + subject.cancel + expect(endpoint).not_to have_received(:cancel_tasks) + end + + it 'returns true' do + expect(subject.cancel).to be(true) + end + end + end + + describe '#delete' do + let(:deletion_task) { instance_double(described_class, await: nil) } + let(:endpoint) { instance_double(MeiliSearch::Task, delete_tasks: deletion_task) } + + context 'if the task is unfinished' do + it 'makes no request' do + subject.delete + expect(endpoint).not_to have_received(:delete_tasks) + end + + it 'returns false' do + expect(subject.delete).to be(false) + end + end + + context 'if the task is finished' do + before do + task_hash['status'] = 'failed' + not_found_error = MeiliSearch::ApiError.new(404, '', '') + allow(endpoint).to receive(:task) { raise not_found_error } + end + + it 'makes a deletion request' do + subject.delete + expect(endpoint).to have_received(:delete_tasks) + end + + it 'returns true' do + expect(subject.delete).to be(true) + end + end + end + + describe '#refresh' do + let(:changed_task) { task_hash.merge('status' => 'succeeded', 'error' => 'Done too well') } + let(:endpoint) { instance_double(MeiliSearch::Task, task: changed_task) } + + it 'calls endpoint to update task' do + expect { subject.refresh }.to change { subject['status'] }.from('enqueued').to('succeeded') + .and(change { subject['error'] }.from(nil).to('Done too well')) + end + end + + describe '#await' do + let(:changed_task) { task_hash.merge('status' => 'succeeded', 'error' => 'Done too well') } + let(:endpoint) { instance_double(MeiliSearch::Task, task: task_hash, wait_for_task: changed_task) } + + context 'if the task is not yet completed' do + let(:endpoint) { instance_double(MeiliSearch::Task, task: task_hash, wait_for_task: changed_task) } + + it 'waits if the task is yet not completed' do + expect { subject.await }.to change { subject['status'] }.from('enqueued').to('succeeded') + .and(change { subject['error'] }.from(nil).to('Done too well')) + end + + it 'returns itself for method chaining' do + expect(subject.await).to be(subject) + end + end + + context 'if the task is already completed' do + let(:endpoint) { instance_double(MeiliSearch::Task, task: changed_task, wait_for_task: changed_task) } + + it 'does not contact the instance' do + subject.refresh + subject.await + + expect(endpoint).to have_received(:task).once + expect(endpoint).not_to have_received(:wait_for_task) + end + end + end + + describe '#error' do + let(:error) do + { 'message' => "Index `#{new_index_uid}` already exists.", + 'code' => 'index_already_exists', + 'type' => 'invalid_request', + 'link' => 'https://docs.meilisearch.com/errors#index_already_exists' } + end + + before { task_hash.merge!('error' => error, 'status' => 'failed') } + + it 'returns errors' do + expect(subject.error).to match(error) + end + end + + describe '#to_h' do + it 'returns the underlying task hash' do + expect(subject.to_h).to be(task_hash) + end + + it 'is aliased as #to_hash' do + expect(subject.to_hash).to be(subject.to_h) + end + end +end diff --git a/spec/meilisearch/utils_spec.rb b/spec/meilisearch/utils_spec.rb index b555010e..08da5e8e 100644 --- a/spec/meilisearch/utils_spec.rb +++ b/spec/meilisearch/utils_spec.rb @@ -1,5 +1,4 @@ # frozen_string_literal: true -require 'logger' RSpec.describe MeiliSearch::Utils do let(:logger) { instance_double(Logger, warn: nil) } From 4608813a1cdaaed236780275b74322d8dc2071a4 Mon Sep 17 00:00:00 2001 From: ellnix <103502144+ellnix@users.noreply.github.com> Date: Wed, 11 Oct 2023 14:23:39 +0200 Subject: [PATCH 04/12] Update some Index methods to use Model::Task Refactor all doc methods & specs with Model::Task Update global settings methods to use Model::Task Update ranking_rules methods to use Model::Task Update distinct attribute methods with Model::Task Update searchable attr methods to use Model::Task Update displayed attrs methods with Model::Task Update synonoms methods to use Model::Task Update stop words methods to use Model::Task Update filterable attrs methods to use Model::Task Update sortable attrs methods to use Model::Task --- lib/meilisearch/index.rb | 88 +-- lib/meilisearch/models/task.rb | 2 + spec/meilisearch/client/indexes_spec.rb | 4 +- spec/meilisearch/index/documents_spec.rb | 771 ++++++++++++----------- spec/meilisearch/index/settings_spec.rb | 591 ++++++++--------- 5 files changed, 729 insertions(+), 727 deletions(-) diff --git a/lib/meilisearch/index.rb b/lib/meilisearch/index.rb index eeaa4ea7..78fb022a 100644 --- a/lib/meilisearch/index.rb +++ b/lib/meilisearch/index.rb @@ -99,22 +99,25 @@ def add_documents!(documents, primary_key = nil) 'index.add_documents(...).await' ) - task = add_documents(documents, primary_key) - wait_for_task(task['taskUid']) + add_documents(documents, primary_key).await end alias replace_documents! add_documents! alias add_or_replace_documents! add_documents! def add_documents_json(documents, primary_key = nil) options = { convert_body?: false } - http_post "/indexes/#{@uid}/documents", documents, { primaryKey: primary_key }.compact, options + response = http_post "/indexes/#{@uid}/documents", documents, { primaryKey: primary_key }.compact, options + + Model::Task.new(response, task_endpoint) end alias replace_documents_json add_documents_json alias add_or_replace_documents_json add_documents_json def add_documents_ndjson(documents, primary_key = nil) options = { headers: { 'Content-Type' => 'application/x-ndjson' }, convert_body?: false } - http_post "/indexes/#{@uid}/documents", documents, { primaryKey: primary_key }.compact, options + response = http_post "/indexes/#{@uid}/documents", documents, { primaryKey: primary_key }.compact, options + + Model::Task.new(response, task_endpoint) end alias replace_documents_ndjson add_documents_ndjson alias add_or_replace_documents_ndjson add_documents_ndjson @@ -122,10 +125,12 @@ def add_documents_ndjson(documents, primary_key = nil) def add_documents_csv(documents, primary_key = nil, delimiter = nil) options = { headers: { 'Content-Type' => 'text/csv' }, convert_body?: false } - http_post "/indexes/#{@uid}/documents", documents, { + response = http_post "/indexes/#{@uid}/documents", documents, { primaryKey: primary_key, csvDelimiter: delimiter }.compact, options + + Model::Task.new(response, task_endpoint) end alias replace_documents_csv add_documents_csv alias add_or_replace_documents_csv add_documents_csv @@ -144,8 +149,7 @@ def update_documents!(documents, primary_key = nil) 'index.update_documents(...).await' ) - task = update_documents(documents, primary_key) - wait_for_task(task['taskUid']) + update_documents(documents, primary_key).await end alias add_or_update_documents! update_documents! @@ -181,12 +185,7 @@ def update_documents_in_batches!(documents, batch_size = 1000, primary_key = nil 'index.update_documents_in_batches(...).await' ) - tasks = update_documents_in_batches(documents, batch_size, primary_key) - responses = [] - tasks.each do |task_obj| - responses.append(wait_for_task(task_obj['taskUid'])) - end - responses + update_documents_in_batches(documents, batch_size, primary_key).each(&:await) end # Public: Delete documents from an index @@ -219,8 +218,7 @@ def delete_documents!(documents_ids) 'index.delete_documents(...).await' ) - task = delete_documents(documents_ids) - wait_for_task(task['taskUid']) + delete_documents(documents_ids).await end alias delete_multiple_documents! delete_documents! @@ -238,8 +236,7 @@ def delete_document!(document_id) 'index.delete_document(...).await' ) - task = delete_document(document_id) - wait_for_task(task['taskUid']) + delete_document(document_id).await end alias delete_one_document! delete_document! @@ -254,8 +251,7 @@ def delete_all_documents! 'index.delete_all_documents(...).await' ) - task = delete_all_documents - wait_for_task(task['taskUid']) + delete_all_documents.await end ### SEARCH @@ -328,12 +324,14 @@ def settings alias get_settings settings def update_settings(settings) - http_patch "/indexes/#{@uid}/settings", Utils.transform_attributes(settings) + response = http_patch "/indexes/#{@uid}/settings", Utils.transform_attributes(settings) + MeiliSearch::Model::Task.new(response, task_endpoint) end alias settings= update_settings def reset_settings - http_delete "/indexes/#{@uid}/settings" + response = http_delete "/indexes/#{@uid}/settings" + MeiliSearch::Model::Task.new(response, task_endpoint) end ### SETTINGS - RANKING RULES @@ -344,12 +342,14 @@ def ranking_rules alias get_ranking_rules ranking_rules def update_ranking_rules(ranking_rules) - http_put "/indexes/#{@uid}/settings/ranking-rules", ranking_rules + response = http_put "/indexes/#{@uid}/settings/ranking-rules", ranking_rules + MeiliSearch::Model::Task.new(response, task_endpoint) end alias ranking_rules= update_ranking_rules def reset_ranking_rules - http_delete "/indexes/#{@uid}/settings/ranking-rules" + response = http_delete "/indexes/#{@uid}/settings/ranking-rules" + MeiliSearch::Model::Task.new(response, task_endpoint) end ### SETTINGS - SYNONYMS @@ -360,12 +360,14 @@ def synonyms alias get_synonyms synonyms def update_synonyms(synonyms) - http_put "/indexes/#{@uid}/settings/synonyms", synonyms + response = http_put "/indexes/#{@uid}/settings/synonyms", synonyms + MeiliSearch::Model::Task.new(response, task_endpoint) end alias synonyms= update_synonyms def reset_synonyms - http_delete "/indexes/#{@uid}/settings/synonyms" + response = http_delete "/indexes/#{@uid}/settings/synonyms" + MeiliSearch::Model::Task.new(response, task_endpoint) end ### SETTINGS - STOP-WORDS @@ -377,12 +379,14 @@ def stop_words def update_stop_words(stop_words) body = stop_words.nil? || stop_words.is_a?(Array) ? stop_words : [stop_words] - http_put "/indexes/#{@uid}/settings/stop-words", body + response = http_put "/indexes/#{@uid}/settings/stop-words", body + MeiliSearch::Model::Task.new(response, task_endpoint) end alias stop_words= update_stop_words def reset_stop_words - http_delete "/indexes/#{@uid}/settings/stop-words" + response = http_delete "/indexes/#{@uid}/settings/stop-words" + MeiliSearch::Model::Task.new(response, task_endpoint) end ### SETTINGS - DINSTINCT ATTRIBUTE @@ -393,12 +397,14 @@ def distinct_attribute alias get_distinct_attribute distinct_attribute def update_distinct_attribute(distinct_attribute) - http_put "/indexes/#{@uid}/settings/distinct-attribute", distinct_attribute + response = http_put "/indexes/#{@uid}/settings/distinct-attribute", distinct_attribute + MeiliSearch::Model::Task.new(response, task_endpoint) end alias distinct_attribute= update_distinct_attribute def reset_distinct_attribute - http_delete "/indexes/#{@uid}/settings/distinct-attribute" + response = http_delete "/indexes/#{@uid}/settings/distinct-attribute" + MeiliSearch::Model::Task.new(response, task_endpoint) end ### SETTINGS - SEARCHABLE ATTRIBUTES @@ -409,12 +415,14 @@ def searchable_attributes alias get_searchable_attributes searchable_attributes def update_searchable_attributes(searchable_attributes) - http_put "/indexes/#{@uid}/settings/searchable-attributes", searchable_attributes + response = http_put "/indexes/#{@uid}/settings/searchable-attributes", searchable_attributes + MeiliSearch::Model::Task.new(response, task_endpoint) end alias searchable_attributes= update_searchable_attributes def reset_searchable_attributes - http_delete "/indexes/#{@uid}/settings/searchable-attributes" + response = http_delete "/indexes/#{@uid}/settings/searchable-attributes" + MeiliSearch::Model::Task.new(response, task_endpoint) end ### SETTINGS - DISPLAYED ATTRIBUTES @@ -425,12 +433,14 @@ def displayed_attributes alias get_displayed_attributes displayed_attributes def update_displayed_attributes(displayed_attributes) - http_put "/indexes/#{@uid}/settings/displayed-attributes", displayed_attributes + response = http_put "/indexes/#{@uid}/settings/displayed-attributes", displayed_attributes + MeiliSearch::Model::Task.new(response, task_endpoint) end alias displayed_attributes= update_displayed_attributes def reset_displayed_attributes - http_delete "/indexes/#{@uid}/settings/displayed-attributes" + response = http_delete "/indexes/#{@uid}/settings/displayed-attributes" + MeiliSearch::Model::Task.new(response, task_endpoint) end ### SETTINGS - FILTERABLE ATTRIBUTES @@ -441,12 +451,14 @@ def filterable_attributes alias get_filterable_attributes filterable_attributes def update_filterable_attributes(filterable_attributes) - http_put "/indexes/#{@uid}/settings/filterable-attributes", filterable_attributes + response = http_put "/indexes/#{@uid}/settings/filterable-attributes", filterable_attributes + Model::Task.new(response, task_endpoint) end alias filterable_attributes= update_filterable_attributes def reset_filterable_attributes - http_delete "/indexes/#{@uid}/settings/filterable-attributes" + response = http_delete "/indexes/#{@uid}/settings/filterable-attributes" + MeiliSearch::Model::Task.new(response, task_endpoint) end ### SETTINGS - SORTABLE ATTRIBUTES @@ -457,12 +469,14 @@ def sortable_attributes alias get_sortable_attributes sortable_attributes def update_sortable_attributes(sortable_attributes) - http_put "/indexes/#{@uid}/settings/sortable-attributes", sortable_attributes + response = http_put "/indexes/#{@uid}/settings/sortable-attributes", sortable_attributes + MeiliSearch::Model::Task.new(response, task_endpoint) end alias sortable_attributes= update_sortable_attributes def reset_sortable_attributes - http_delete "/indexes/#{@uid}/settings/sortable-attributes" + response = http_delete "/indexes/#{@uid}/settings/sortable-attributes" + MeiliSearch::Model::Task.new(response, task_endpoint) end ### SETTINGS - PAGINATION diff --git a/lib/meilisearch/models/task.rb b/lib/meilisearch/models/task.rb index 336d2cc0..4a17516a 100644 --- a/lib/meilisearch/models/task.rb +++ b/lib/meilisearch/models/task.rb @@ -59,12 +59,14 @@ def succeeded? status == 'succeeded' end + alias has_succeeded? succeeded? def failed? Utils.warn_on_unfinished_task(self) if unfinished? status == 'failed' end + alias has_failed? failed? def cancelled? Utils.warn_on_unfinished_task(self) if unfinished? diff --git a/spec/meilisearch/client/indexes_spec.rb b/spec/meilisearch/client/indexes_spec.rb index 664db7cb..dd92abe9 100644 --- a/spec/meilisearch/client/indexes_spec.rb +++ b/spec/meilisearch/client/indexes_spec.rb @@ -18,7 +18,7 @@ context 'synchronously' do context 'using ! method' do - before { allow(Utils).to receive(:soft_deprecate).and_return(nil) } + before { allow(MeiliSearch::Utils).to receive(:soft_deprecate).and_return(nil) } it 'creates an index' do task = client.create_index!('books') @@ -35,7 +35,7 @@ it 'warns about deprecation' do client.create_index!('books') - expect(Utils).to have_received(:soft_deprecate).with('Client#create_index!', a_string_matching(/books/)) + expect(MeiliSearch::Utils).to have_received(:soft_deprecate).with('Client#create_index!', a_string_matching(/books/)) end end diff --git a/spec/meilisearch/index/documents_spec.rb b/spec/meilisearch/index/documents_spec.rb index ec426f2a..9d564c6b 100644 --- a/spec/meilisearch/index/documents_spec.rb +++ b/spec/meilisearch/index/documents_spec.rb @@ -16,155 +16,160 @@ ] end - describe 'adding documents' do - it 'adds documents (as a array of documents)' do - task = index.add_documents(documents) - - expect(task['type']).to eq('documentAdditionOrUpdate') - client.wait_for_task(task['taskUid']) - expect(index.documents['results'].count).to eq(documents.count) - end + let(:documents_with_string_keys) { documents.map { |doc| doc.transform_keys(&:to_s) } } + + describe '#add_documents' do + context 'passed an array of documents' do + it 'adds documents' do + task = index.add_documents(documents) + expect(task.type).to eq('documentAdditionOrUpdate') + task.await + expect(index.documents['results']).to contain_exactly(*documents_with_string_keys) + end - it 'keeps the structure of the original documents' do - docs = [ - { object_id: 123, my_title: 'Pride and Prejudice', 'my-comment': 'A great book' } - ] + it 'keeps the structure of the original documents' do + doc = { object_id: 123, my_title: 'Pride and Prejudice', 'my-comment': 'A great book' } + index.add_documents([doc]).await - task = index.add_documents(docs) - client.wait_for_task(task['taskUid']) + expect(index.documents['results'].first.keys).to eq(doc.keys.map(&:to_s)) + end - expect(index.documents['results'].first.keys).to eq(docs.first.keys.map(&:to_s)) - end + it 'adds JSON documents' do + documents = <<~JSON + [ + { "objectRef": 123, "title": "Pride and Prejudice", "comment": "A great book" }, + { "objectRef": 456, "title": "Le Petit Prince", "comment": "A french book" }, + { "objectRef": 1, "title": "Alice In Wonderland", "comment": "A weird book" }, + { "objectRef": 1344, "title": "The Hobbit", "comment": "An awesome book" }, + { "objectRef": 4, "title": "Harry Potter and the Half-Blood Prince", "comment": "The best book" } + ] + JSON + index.add_documents_json(documents, 'objectRef').await + + expect(index.documents['results'].count).to eq(5) + end - it 'adds JSON documents (as a array of documents)' do - documents = <<~JSON - [ - { "objectRef": 123, "title": "Pride and Prejudice", "comment": "A great book" }, - { "objectRef": 456, "title": "Le Petit Prince", "comment": "A french book" }, - { "objectRef": 1, "title": "Alice In Wonderland", "comment": "A weird book" }, - { "objectRef": 1344, "title": "The Hobbit", "comment": "An awesome book" }, + it 'adds NDJSON documents' do + documents = <<~NDJSON + { "objectRef": 123, "title": "Pride and Prejudice", "comment": "A great book" } + { "objectRef": 456, "title": "Le Petit Prince", "comment": "A french book" } + { "objectRef": 1, "title": "Alice In Wonderland", "comment": "A weird book" } { "objectRef": 4, "title": "Harry Potter and the Half-Blood Prince", "comment": "The best book" } - ] - JSON - response = index.add_documents_json(documents, 'objectRef') - - index.wait_for_task(response['taskUid']) - expect(index.documents['results'].count).to eq(5) - end + NDJSON + index.add_documents_ndjson(documents, 'objectRef').await - it 'adds NDJSON documents (as a array of documents)' do - documents = <<~NDJSON - { "objectRef": 123, "title": "Pride and Prejudice", "comment": "A great book" } - { "objectRef": 456, "title": "Le Petit Prince", "comment": "A french book" } - { "objectRef": 1, "title": "Alice In Wonderland", "comment": "A weird book" } - { "objectRef": 4, "title": "Harry Potter and the Half-Blood Prince", "comment": "The best book" } - NDJSON - response = index.add_documents_ndjson(documents, 'objectRef') + expect(index.documents['results'].count).to eq(4) + end - index.wait_for_task(response['taskUid']) - expect(index.documents['results'].count).to eq(4) - end + it 'adds CSV documents' do + documents = <<~CSV + "objectRef:number","title:string","comment:string" + "1239","Pride and Prejudice","A great book" + "4569","Le Petit Prince","A french book" + "49","Harry Potter and the Half-Blood Prince","The best book" + CSV + index.add_documents_csv(documents, 'objectRef').await - it 'adds CSV documents (as a array of documents)' do - documents = <<~CSV - "objectRef:number","title:string","comment:string" - "1239","Pride and Prejudice","A great book" - "4569","Le Petit Prince","A french book" - "49","Harry Potter and the Half-Blood Prince","The best book" - CSV - response = index.add_documents_csv(documents, 'objectRef') + expect(index.documents['results'].count).to eq(3) + end - index.wait_for_task(response['taskUid']) - expect(index.documents['results'].count).to eq(3) - end + it 'adds CSV documents with different separator' do + documents = <<~CSV + "objectRef:number"|"title:string"|"comment:string" + "1239"|"Pride and Prejudice"|"A great book" + "4569"|"Le Petit Prince"|"A french book" + "49"|"Harry Potter and the Half-Blood Prince"|"The best book" + CSV + + index.add_documents_csv(documents, 'objectRef', '|').await + + expect(index.documents['results'].count).to eq(3) + expect(index.documents['results'][1]).to match( + 'objectRef' => 4569, + 'title' => 'Le Petit Prince', + 'comment' => 'A french book' + ) + end - it 'adds CSV documents (as an array of documents with a different separator)' do - documents = <<~CSV - "objectRef:number"|"title:string"|"comment:string" - "1239"|"Pride and Prejudice"|"A great book" - "4569"|"Le Petit Prince"|"A french book" - "49"|"Harry Potter and the Half-Blood Prince"|"The best book" - CSV + it 'infers order of fields' do + index.add_documents(documents).await + task = index.document(1) + expect(task.keys).to eq(['objectId', 'title', 'comment']) + end - response = index.add_documents_csv(documents, 'objectRef', '|') - index.wait_for_task(response['taskUid']) + it 'slices response fields' do + index.add_documents(documents).await - expect(index.documents['results'].count).to eq(3) - expect(index.documents['results'][1]['objectRef']).to eq(4569) - expect(index.documents['results'][1]['title']).to eq('Le Petit Prince') - expect(index.documents['results'][1]['comment']).to eq('A french book') - end + document = index.document(1, fields: ['title']) - it 'adds documents in a batch (as a array of documents)' do - task = index.add_documents_in_batches(documents, 5) - expect(task).to be_a(Array) - expect(task.count).to eq(2) # 2 batches, since we start with 5 < documents.count <= 10 documents - expect(task[0]).to have_key('taskUid') - task.each do |task_object| - client.wait_for_task(task_object['taskUid']) + expect(document.keys).to eq(['title']) end - expect(index.documents['results'].count).to eq(documents.count) - end - it 'adds documents synchronously (as an array of documents)' do - task = index.add_documents(documents).await + it 'infers primary-key attribute' do + index.add_documents(documents).await + expect(index.fetch_primary_key).to eq('objectId') + end - expect(task).to have_key('status') - expect(task['status']).not_to eql('enqueued') - expect(task['status']).to eql('succeeded') - expect(index.documents['results'].count).to eq(documents.count) - end + it 'creates the index during document addition' do + new_index = client.index('books') + new_index.add_documents(documents).await - it 'adds document batches synchronously (as an array of documents)' do - task = index.add_documents_in_batches(documents, 5).await - expect(task).to be_a(Array) - expect(task.count).to eq(2) # 2 batches, since we start with 5 < documents.count <= 10 documents - task.each do |task_object| - expect(task_object).to have_key('uid') - expect(task_object).to have_key('status') - expect(task_object['status']).not_to eql('enqueued') - expect(task_object['status']).to eql('succeeded') + expect(client.index('books').fetch_primary_key).to eq('objectId') + expect(client.index('books').documents['results'].count).to eq(documents.count) end - expect(index.documents['results'].count).to eq(documents.count) end - it 'infers order of fields' do - index.add_documents(documents).await - task = index.document(1) - expect(task.keys).to eq(['objectId', 'title', 'comment']) - end + it 'adds documents in a batch (as a array of documents)' do + tasks = index.add_documents_in_batches(documents, 5) + expect(tasks).to contain_exactly(a_kind_of(MeiliSearch::Model::Task), + a_kind_of(MeiliSearch::Model::Task)) + tasks.each(&:await) + expect(index.documents['results']).to contain_exactly(*documents_with_string_keys) + end + + context 'given a single document' do + it 'adds only one document to index (as an hash of one document)' do + new_doc = { objectId: 30, title: 'Hamlet' } + client.create_index('books').await + new_index = client.index('books') + expect do + new_index.add_documents(new_doc).await + end.to(change { new_index.documents['results'].length }.by(1)) - it 'slices response fields' do - index.add_documents(documents).await + expect(new_index.document(30)['title']).to eq('Hamlet') + end - task = index.document(1, fields: ['title']) + it 'fails to add document with bad primary-key format' do + index.add_documents(documents).await + task = index.add_documents(objectId: 'toto et titi', title: 'Unknown').await + expect(task).to have_failed + end - expect(task.keys).to eq(['title']) - end + it 'fails to add document with no primary-key' do + index.add_documents(documents).await + task = index.add_documents(id: 0, title: 'Unknown').await + expect(task).to have_failed + end - it 'infers primary-key attribute' do - index.add_documents(documents).await - expect(index.fetch_primary_key).to eq('objectId') + it 'allows the user to store vectors' do + enable_vector_store(true) + new_doc = { objectId: 123, _vectors: [0.1, 0.2, 0.3] } + client.create_index('vector_test').await + new_index = client.index('vector_test') + new_index.add_documents(new_doc).await + expect(new_index.document(123)['_vectors']).to include(0.1) + end end + end - it 'create the index during document addition' do - new_index = client.index('books') - task = new_index.add_documents(documents) - - new_index.wait_for_task(task['taskUid']) - expect(client.index('books').fetch_primary_key).to eq('objectId') - expect(client.index('books').documents['results'].count).to eq(documents.count) - end + describe '#add_documents!' do + before { allow(MeiliSearch::Utils).to receive(:soft_deprecate).and_return(nil) } - it 'adds only one document to index (as an hash of one document)' do - new_doc = { objectId: 30, title: 'Hamlet' } - client.create_index('books').await - new_index = client.index('books') - expect do - new_index.add_documents(new_doc).await + it 'adds documents synchronously (as an array of documents)' do + task = index.add_documents!(documents) - expect(new_index.document(30)['title']).to eq('Hamlet') - end.to(change { new_index.documents['results'].length }.by(1)) + expect(task).to be_finished + expect(index.documents['results'].count).to eq(documents.count) end it 'adds only one document synchronously to index (as an hash of one document)' do @@ -180,198 +185,120 @@ end.to(change { new_index.documents['results'].length }.by(1)) end - it 'fails to add document with bad primary-key format' do - index.add_documents(documents).await - task = index.add_documents(objectId: 'toto et titi', title: 'Unknown') - client.wait_for_task(task['taskUid']) - expect(index.task(task['taskUid'])['status']).to eq('failed') + it 'warns about deprecation' do + index.add_documents!(documents) + expect(MeiliSearch::Utils).to have_received(:soft_deprecate) + .with('Index#add_documents!', a_string_including('await')) end + end - it 'fails to add document with no primary-key' do - index.add_documents(documents).await - task = index.add_documents(id: 0, title: 'Unknown') - client.wait_for_task(task['taskUid']) - expect(index.task(task['taskUid'])['status']).to eq('failed') - end + describe '#add_documents_in_batches!' do + before { allow(MeiliSearch::Utils).to receive(:soft_deprecate).and_return(nil) } - it 'allows the user to store vectors' do - enable_vector_store(true) + it 'adds document batches synchronously (as an array of documents)' do + task = index.add_documents_in_batches(documents, 5).each(&:await) + expect(task).to be_a(Array) + expect(task.count).to eq(2) # 2 batches, since we start with 5 < documents.count <= 10 documents + task.each do |task_object| + expect(task_object).to have_key('uid') + expect(task_object).to have_key('status') + expect(task_object['status']).not_to eql('enqueued') + expect(task_object['status']).to eql('succeeded') + end + expect(index.documents['results'].count).to eq(documents.count) + end - new_doc = { objectId: 123, _vectors: [0.1, 0.2, 0.3] } - client.create_index('vector_test').await - new_index = client.index('vector_test') - expect do - new_index.add_documents(new_doc).await - end.to(change { new_index.documents['results'].length }.by(1)) - expect(new_index.document(123)).to have_key('_vectors') - expect(new_index.document(123)['_vectors']).to be_a(Array) - expect(new_index.document(123)['_vectors'].first).to be_a(Float) - expect(new_index.document(123)['_vectors'].first).to eq(0.1) + it 'warns about deprecation' do + index.add_documents_in_batches!(documents, 5) + expect(MeiliSearch::Utils).to have_received(:soft_deprecate) + .with('Index#add_documents_in_batches!', a_string_including('await')) end end - describe 'accessing documents' do - before do - index.add_documents(documents) + describe '#document' do + before { index.add_documents(documents).await } - task = index.update_filterable_attributes(['title', 'objectId']) - client.wait_for_task(task['taskUid']) + it 'gets one document from its primary-key' do + expect(index.document(123)).to include( + 'title' => 'Pride and Prejudice', + 'comment' => 'A great book' + ) end + end - it 'gets one document from its primary-key' do - task = index.document(123) - expect(task).to be_a(Hash) - expect(task['title']).to eq('Pride and Prejudice') - expect(task['comment']).to eq('A great book') + describe '#documents' do + before do + index.add_documents(documents).await + index.update_filterable_attributes(['title', 'objectId']).await end it 'browses documents' do docs = index.documents['results'] - - expect(docs).to be_a(Array) - expect(docs.size).to eq(documents.count) - expected_titles = documents.map { |doc| doc[:title] } - expect(docs.map { |doc| doc['title'] }).to contain_exactly(*expected_titles) + expect(docs).to contain_exactly(*documents_with_string_keys) end it 'browses documents with query parameters' do docs = index.documents(offset: 2, limit: 5)['results'] - expect(docs).to be_a(Array) expect(docs.size).to eq(5) - expect(docs.first['objectId']).to eq(index.documents['results'][2]['objectId']) + expect(docs.first).to eq(index.documents['results'][2]) end it 'browses documents with fields' do docs = index.documents(fields: ['title'])['results'] - expect(docs).to be_a(Array) - expect(docs.first.keys).to eq(['title']) + expect(docs).to include(a_hash_including('title')) + expect(docs).not_to include(a_hash_including('comment')) end it 'retrieves documents by filters' do docs = index.documents(filter: 'objectId > 400')['results'] - expect(docs).to be_a(Array) - expect(docs.first).to eq({ - 'objectId' => 456, - 'title' => 'Le Petit Prince', - 'comment' => 'A french book' - }) + expect(docs).to include('objectId' => 456, + 'title' => 'Le Petit Prince', + 'comment' => 'A french book') end it 'retrieves documents by filters & other parameters' do docs = index.documents(fields: ['title'], filter: 'objectId > 100')['results'] - expect(docs).to be_a(Array) - expect(docs.size).to eq(3) - expect(docs.first.keys).to eq(['title']) + expect(docs).to contain_exactly( + { 'title' => a_kind_of(String) }, + { 'title' => a_kind_of(String) }, + { 'title' => a_kind_of(String) } + ) end end - describe 'updating documents' do + describe '#update_documents' do before { index.add_documents(documents).await } - it 'updates documents in index (as an array of documents)' do - id1 = 123 - id2 = 456 - updated_documents = [ - { objectId: id1, title: 'Sense and Sensibility' }, - { objectId: id2, title: 'The Little Prince' } - ] - task = index.update_documents(updated_documents) - client.wait_for_task(task['taskUid']) - doc1 = index.document(id1) - doc2 = index.document(id2) - expect(index.documents['results'].count).to eq(documents.count) - expect(doc1['title']).to eq(updated_documents.detect { |doc| doc[:objectId] == id1 }[:title]) - expect(doc1['comment']).to eq(documents.detect { |doc| doc[:objectId] == id1 }[:comment]) - expect(doc2['title']).to eq(updated_documents.detect { |doc| doc[:objectId] == id2 }[:title]) - expect(doc2['comment']).to eq(documents.detect { |doc| doc[:objectId] == id2 }[:comment]) - end + it 'updates multiple documents in index' do + index.update_documents( + [{ objectId: 123, title: 'Sense and Sensibility' }, + { objectId: 456, title: 'The Little Prince' }] + ).await - it 'updates documents synchronously in index (as an array of documents)' do - id1 = 123 - id2 = 456 - updated_documents = [ - { objectId: id1, title: 'Sense and Sensibility' }, - { objectId: id2, title: 'The Little Prince' } - ] - task = index.update_documents(updated_documents).await - - expect(task).to have_key('status') - expect(task['status']).not_to eql('enqueued') - expect(task['status']).to eql('succeeded') - doc1 = index.document(id1) - doc2 = index.document(id2) expect(index.documents['results'].count).to eq(documents.count) - expect(doc1['title']).to eq(updated_documents.detect { |doc| doc[:objectId] == id1 }[:title]) - expect(doc1['comment']).to eq(documents.detect { |doc| doc[:objectId] == id1 }[:comment]) - expect(doc2['title']).to eq(updated_documents.detect { |doc| doc[:objectId] == id2 }[:title]) - expect(doc2['comment']).to eq(documents.detect { |doc| doc[:objectId] == id2 }[:comment]) + expect(index.document(123)).to include('objectId' => 123, 'title' => 'Sense and Sensibility') + expect(index.document(456)).to include('objectId' => 456, 'title' => 'The Little Prince') end - it 'updates documents synchronously in index in batches (as an array of documents)' do - id1 = 123 - id2 = 456 - updated_documents = [ - { objectId: id1, title: 'Sense and Sensibility' }, - { objectId: id2, title: 'The Little Prince' } - ] - task = index.update_documents_in_batches(updated_documents, 1).await - expect(task).to be_a(Array) - expect(task.count).to eq(2) # 2 batches, since we have two items with batch size 1 - task.each do |task_object| - expect(task_object).to have_key('uid') - expect(task_object).to have_key('status') - expect(task_object['status']).not_to eql('enqueued') - expect(task_object['status']).to eql('succeeded') - end - doc1 = index.document(id1) - doc2 = index.document(id2) - expect(index.documents['results'].count).to eq(documents.count) - expect(doc1['title']).to eq(updated_documents.detect { |doc| doc[:objectId] == id1 }[:title]) - expect(doc1['comment']).to eq(documents.detect { |doc| doc[:objectId] == id1 }[:comment]) - expect(doc2['title']).to eq(updated_documents.detect { |doc| doc[:objectId] == id2 }[:title]) - expect(doc2['comment']).to eq(documents.detect { |doc| doc[:objectId] == id2 }[:comment]) - end - - it 'updates one document in index (as an hash of one document)' do - id = 123 - updated_document = { objectId: id, title: 'Emma' } - task = index.update_documents(updated_document) - client.wait_for_task(task['taskUid']) + it 'updates a single document in index' do + index.update_documents({ objectId: 123, title: 'Emma' }).await expect(index.documents['results'].count).to eq(documents.count) - new_doc = index.document(id) - expect(new_doc['title']).to eq(updated_document[:title]) - expect(new_doc['comment']).to eq(documents.detect { |doc| doc[:objectId] == id }[:comment]) - end - - it 'updates one document synchronously in index (as an hash of one document)' do - id = 123 - updated_document = { objectId: id, title: 'Emma' } - task = index.update_documents(updated_document).await - - expect(task).to have_key('status') - expect(task['status']).not_to eql('enqueued') - expect(task['status']).to eql('succeeded') - expect(index.documents['results'].count).to eq(documents.count) - new_doc = index.document(id) - expect(new_doc['title']).to eq(updated_document[:title]) - expect(new_doc['comment']).to eq(documents.detect { |doc| doc[:objectId] == id }[:comment]) + expect(index.document(123)).to include('objectId' => 123, 'title' => 'Emma') end it 'update a document with new fields' do - id = 2 - doc = { objectId: id, note: '8/10' } - task = index.update_documents(doc) - client.wait_for_task(task['taskUid']) + doc = { objectId: 2, note: '8/10' } + old_title = 'Le Rouge et le Noir' + + index.update_documents(doc).await expect(index.documents['results'].count).to eq(documents.count) - new_document = index.document(id) - expect(new_document['title']).to eq(documents.detect { |d| d[:objectId] == id }[:title]) - expect(new_document).to have_key('note') + expect(index.document(2)).to include('title' => old_title, 'note' => '8/10') end it 'replaces document' do @@ -388,16 +315,114 @@ end end - describe 'deleting documents' do + describe '#update_documents!' do + before do + index.add_documents(documents).await + allow(MeiliSearch::Utils).to receive(:soft_deprecate).and_return(nil) + end + + it 'updates multiple documents synchronously' do + updated_documents = [ + { objectId: 123, title: 'Sense and Sensibility' }, + { objectId: 456, title: 'The Little Prince' } + ] + + expect(index.update_documents!(updated_documents)).to be_succeeded + + expect(index.document(123)).to include('objectId' => 123, 'title' => 'Sense and Sensibility') + expect(index.document(456)).to include('objectId' => 456, 'title' => 'The Little Prince') + end + + it 'updates a single document synchronously' do + updated_document = { objectId: 123, title: 'Emma' } + + expect(index.update_documents!(updated_document)).to be_succeeded + expect(index.document(123)).to include('objectId' => 123, 'title' => 'Emma') + end + + it 'warns about deprecation' do + updated_documents = [ + { objectId: 123, title: 'Sense and Sensibility' }, + { objectId: 456, title: 'The Little Prince' } + ] + + index.update_documents!(updated_documents) + expect(MeiliSearch::Utils).to have_received(:soft_deprecate) + .with('Index#update_documents!', a_string_including('await')) + end + end + + describe '#update_documents_in_batches' do + before { index.add_documents(documents).await } + + it 'updates documents in index in batches' do + updated_documents = [ + { objectId: 123, title: 'Sense and Sensibility' }, + { objectId: 456, title: 'The Little Prince' } + ] + + index.update_documents_in_batches(updated_documents, 1).each(&:await) + + expect(index.document(123)).to include('objectId' => 123, 'title' => 'Sense and Sensibility') + expect(index.document(456)).to include('objectId' => 456, 'title' => 'The Little Prince') + end + end + + describe '#update_documents_in_batches!' do + before do + index.add_documents(documents).await + allow(MeiliSearch::Utils).to receive(:soft_deprecate).and_return(nil) + end + + it 'updates documents synchronously in index in batches (as an array of documents)' do + updated_documents = [ + { objectId: 123, title: 'Sense and Sensibility' }, + { objectId: 456, title: 'The Little Prince' } + ] + + expect(index.update_documents_in_batches!(updated_documents, 1)) + .to contain_exactly(be_succeeded, be_succeeded) + + expect(index.document(123)).to include('objectId' => 123, 'title' => 'Sense and Sensibility') + expect(index.document(456)).to include('objectId' => 456, 'title' => 'The Little Prince') + end + + it 'warns about deprecation' do + updated_documents = [ + { objectId: 123, title: 'Sense and Sensibility' }, + { objectId: 456, title: 'The Little Prince' } + ] + + index.update_documents_in_batches!(updated_documents, 1) + expect(MeiliSearch::Utils).to have_received(:soft_deprecate) + .with('Index#update_documents_in_batches!', a_string_including('await')) + end + end + + describe '#delete_document' do before { index.add_documents(documents).await } it 'deletes one document from index' do id = 456 - task = index.delete_document(id) - client.wait_for_task(task['taskUid']) + index.delete_document(id).await - expect(index.documents['results'].size).to eq(documents.count - 1) + expect(index.documents['results']).not_to include(a_hash_including('id' => 456)) + end + + it 'does nothing when trying to delete a document which does not exist' do + id = 111 expect { index.document(id) }.to raise_document_not_found_meilisearch_api_error + expect do + task = index.delete_document(id) + client.wait_for_task(task['taskUid']) + end.not_to(change { index.documents['results'].size }) + end + end + + describe '#delete_document!' do + before do + index.add_documents(documents).await + allow(MeiliSearch::Utils).to receive(:soft_deprecate).and_return(nil) end it 'deletes one document synchronously from index' do @@ -411,122 +436,116 @@ expect { index.document(id) }.to raise_document_not_found_meilisearch_api_error end - it 'does nothing when trying to delete a document which does not exist' do - id = 111 - expect { index.document(id) }.to raise_document_not_found_meilisearch_api_error - expect do - task = index.delete_document(id) - client.wait_for_task(task['taskUid']) - end.not_to(change { index.documents['results'].size }) + it 'warns about deprecation' do + index.delete_document!(2) + expect(MeiliSearch::Utils).to have_received(:soft_deprecate) + .with('Index#delete_document!', a_string_including('await')) end + end + + describe '#delete_documents' do + before { index.add_documents(documents).await } - it 'deletes one document from index (with delete-batch route)' do + it 'deletes a single document' do id = 2 expect do - task = index.delete_documents(id) - client.wait_for_task(task['taskUid']) - end.to(change { index.documents['results'].size }.by(-1)) + index.delete_documents(id).await + end.to change { index.documents['results'].size }.by(-1) expect { index.document(id) }.to raise_document_not_found_meilisearch_api_error end - it 'deletes documents based on filter from index (with delete route)' do - expect do - index.update_filterable_attributes(['objectId']) - task = index.delete_documents(filter: ['objectId > 0']) - - client.wait_for_task(task['taskUid']) - end.to(change { index.documents['results'].size }.by(-documents.size)) + it 'deletes documents based on filter from index' do + index.update_filterable_attributes(['objectId']) + index.delete_documents(filter: ['objectId > 0']).await + expect(index.documents['results']).to be_empty end - it 'ignores filter even when documents_ids is empty (with delete-batch route)' do + it 'ignores filters when documents_ids is empty' do expect do - task = index.delete_documents(filter: ['objectId > 0']) - - client.wait_for_task(task['taskUid']) - end.to(change { index.documents['results'].size }.by(0)) + index.delete_documents(filter: ['objectId > 0']).await + end.not_to(change { index.documents['results'] }) end - it 'deletes one document synchronously from index (with delete-batch route)' do - id = 2 + it 'deletes multiple documents from index' do + docs_to_delete = [1, 4] expect do - task = index.delete_documents(id).await - - expect(task['status']).not_to eql('enqueued') - expect(task['status']).to eql('succeeded') - end.to(change { index.documents['results'].size }.by(-1)) - expect { index.document(id) }.to raise_document_not_found_meilisearch_api_error + index.delete_documents(docs_to_delete).await + end.to change { index.documents['results'].size }.by(-2) end + end - it 'deletes one document from index (with delete-batch route as an array of one uid)' do - id = 123 - expect do - task = index.delete_documents([id]) - client.wait_for_task(task['taskUid']) - end.to(change { index.documents['results'].size }.by(-1)) - expect { index.document(id) }.to raise_document_not_found_meilisearch_api_error + describe '#delete_documents!' do + before do + index.add_documents(documents).await + allow(MeiliSearch::Utils).to receive(:soft_deprecate).and_return(nil) end - it 'deletes one document synchronously from index (with delete-batch route as an array of one uid)' do - id = 123 - expect do - task = index.delete_documents([id]).await + it 'deletes a single document' do + id = 2 - expect(task['status']).not_to eql('enqueued') - expect(task['status']).to eql('succeeded') - end.to(change { index.documents['results'].size }.by(-1)) - expect { index.document(id) }.to raise_document_not_found_meilisearch_api_error + expect(index.delete_documents!(id)).to be_succeeded + expect(index.documents['results']).not_to include(a_hash_including('id' => 2)) end - it 'deletes multiples documents from index' do + it 'deletes multiple documents' do docs_to_delete = [1, 4] - expect do - task = index.delete_documents(docs_to_delete) - client.wait_for_task(task['taskUid']) - end.to(change { index.documents['results'].size }.by(-2)) - end + expect(index.delete_documents!(docs_to_delete)).to be_succeeded - it 'deletes multiples documents synchronously from index' do - docs_to_delete = [1, 4] - expect do - task = index.delete_documents(docs_to_delete).await + expect(index.documents['results']).not_to include( + a_hash_including('id' => 1), + a_hash_including('id' => 4) + ) + end - expect(task['status']).not_to eql('enqueued') - expect(task['status']).to eql('succeeded') - end.to(change { index.documents['results'].size }.by(-2)) + it 'warns about deprecation' do + index.delete_documents!([2]) + expect(MeiliSearch::Utils).to have_received(:soft_deprecate) + .with('Index#delete_documents!', a_string_including('await')) end + end + + describe '#delete_all_documents' do + before { index.add_documents(documents).await } it 'clears all documents from index' do - expect do - task = index.delete_all_documents - client.wait_for_task(task['taskUid']) - expect(index.documents['results']).to be_empty - end.to(change { index.documents['results'].size }.from(documents.size).to(0)) + expect(index.documents['results']).not_to be_empty + index.delete_all_documents.await + expect(index.documents['results']).to be_empty end + end - it 'clears all documents synchronously from index' do - task = index.delete_all_documents! + describe '#delete_all_documents!' do + before do + index.add_documents(documents).await + allow(MeiliSearch::Utils).to receive(:soft_deprecate).and_return(nil) + end - expect(task).to have_key('status') - expect(task['status']).not_to eql('enqueued') - expect(task['status']).to eql('succeeded') + it 'clears all documents synchronously from index' do + expect(index.documents['results']).not_to be_empty + expect(index.delete_all_documents!).to be_succeeded expect(index.documents['results']).to be_empty - expect(index.documents['results'].size).to eq(0) + end + + it 'warns about deprecation' do + index.delete_all_documents! + expect(MeiliSearch::Utils).to have_received(:soft_deprecate) + .with('Index#delete_all_documents!', a_string_including('await')) end end it 'works with method aliases' do - expect(index.method(:document) == index.method(:get_document)).to be_truthy - expect(index.method(:document) == index.method(:get_one_document)).to be_truthy - expect(index.method(:documents) == index.method(:get_documents)).to be_truthy - expect(index.method(:add_documents) == index.method(:add_or_replace_documents)).to be_truthy - expect(index.method(:add_documents) == index.method(:replace_documents)).to be_truthy - expect(index.method(:update_documents) == index.method(:add_or_update_documents)).to be_truthy - expect(index.method(:delete_documents) == index.method(:delete_multiple_documents)).to be_truthy - expect(index.method(:delete_document) == index.method(:delete_one_document)).to be_truthy + expect(index.method(:document)).to eq index.method(:get_document) + expect(index.method(:document)).to eq index.method(:get_one_document) + expect(index.method(:documents)).to eq index.method(:get_documents) + expect(index.method(:add_documents)).to eq index.method(:add_or_replace_documents) + expect(index.method(:add_documents)).to eq index.method(:replace_documents) + expect(index.method(:update_documents)).to eq index.method(:add_or_update_documents) + expect(index.method(:delete_documents)).to eq index.method(:delete_multiple_documents) + expect(index.method(:delete_document)).to eq index.method(:delete_one_document) end end - context 'Right primary-key added when pushing documents' do + context 'when the right primary key is passed' do let(:documents) do [ { unique: 1, id: 1, title: 'Pride and Prejudice', comment: 'A great book' }, @@ -535,14 +554,12 @@ ] end - it 'adds documents and the primary-key' do - task = index.add_documents(documents, 'unique') - expect(task).to be_a(Hash) - client.wait_for_task(task['taskUid']) + it 'adds documents and the primary key' do + index.add_documents(documents, 'unique').await expect(index.fetch_primary_key).to eq('unique') end - it 'does not take into account the new primary key' do + it 'fails to add tasks with a different primary key' do index.add_documents(documents, 'unique').await task = index.update_documents({ unique: 3, @@ -550,65 +567,65 @@ title: 'The Red and the Black' }, 'id') - task = client.wait_for_task(task['taskUid']) - - expect(task['status']).to eq('failed') - expect(task['type']).to eq('documentAdditionOrUpdate') - expect(task['error']['code']).to eq('index_primary_key_already_exists') + expect(task.await).to be_failed + expect(task.type).to eq('documentAdditionOrUpdate') + expect(task.error['code']).to eq('index_primary_key_already_exists') end end - context 'Wrong primary-key (attribute does not exist) when pushing documents' do + context 'when passed a non existant attribute as primary key' do let(:documents) do { unique: 3, id: 1, title: 'Le Rouge et le Noir' } end - it 'does not add the primary key and the documents either' do - task = index.update_documents(documents, 'objectId') - client.wait_for_task(task['taskUid']) + it 'fails to add the documents and the primary key' do + task = index.update_documents(documents, 'objectId').await + expect(task).to be_failed expect(index.fetch_primary_key).to be_nil - expect(index.task(task['taskUid'])['status']).to eq('failed') end end - context 'Wrong primary-key (attribute bad formatted) when pushing documents' do + context 'when the specified primary key field is of an unsupported type' do let(:documents) do { id: 1, title: 'Le Rouge et le Noir' } end - it 'does not add the primary key and the documents either' do - task = index.add_documents(documents, 'title') - client.wait_for_task(task['taskUid']) + it 'fails to add the primary key and the documents' do + task = index.add_documents(documents, 'title').await + expect(task).to be_failed expect(index.fetch_primary_key).to be_nil - expect(index.task(task['taskUid'])['status']).to eq('failed') - expect(index.documents['results'].count).to eq(0) + expect(index.documents['results']).to be_empty end end - context 'Impossible to infer the primary-key' do + context 'when it is not possible to infer the primary key' do let(:documents) do { title: 'Le Rouge et le Noir' } end - it 'Impossible to push docs if the pk is missing' do + it 'fails to add documents' do task = index.add_documents(documents).await - update = index.task(task['uid']) - expect(update['status']).to eq('failed') - expect(update['error']['code']).to eq('index_primary_key_no_candidate_found') + expect(task).to be_failed + expect(task.error['code']).to eq('index_primary_key_no_candidate_found') end end - context 'Impossible to update primary-key if already given during index creation' do + context 'when the primary key was specified on the index' do + let(:index) do + uid = random_uid + client.create_index uid, primary_key: 'id' + client.index(uid) + end + let(:documents) do { id: 1, unique: 1, title: 'Le Rouge et le Noir' } end - it 'adds the documents anyway' do + it 'fails to add documents with another primary key' do task = index.add_documents(documents, 'unique') - expect(task).to be_a(Hash) - client.wait_for_task(task['taskUid']) - expect(index.fetch_primary_key).to eq('unique') - expect(index.documents['results'].count).to eq(1) + task.await + expect(index.fetch_primary_key).to eq('id') + expect(index.documents['results']).to be_empty end end end diff --git a/spec/meilisearch/index/settings_spec.rb b/spec/meilisearch/index/settings_spec.rb index c9fce495..02d8abe8 100644 --- a/spec/meilisearch/index/settings_spec.rb +++ b/spec/meilisearch/index/settings_spec.rb @@ -41,264 +41,264 @@ before { client.create_index(uid).await } - it 'gets default values of settings' do - settings = index.settings - expect(settings).to be_a(Hash) - expect(settings.keys).to include(*settings_keys) - expect(settings['rankingRules']).to eq(default_ranking_rules) - expect(settings['distinctAttribute']).to be_nil - expect(settings['searchableAttributes']).to eq(default_searchable_attributes) - expect(settings['displayedAttributes']).to eq(default_displayed_attributes) - expect(settings['stopWords']).to eq([]) - expect(settings['synonyms']).to eq({}) - expect(settings['pagination'].transform_keys(&:to_sym)).to eq(default_pagination) - expect(settings['filterableAttributes']).to eq([]) - expect(settings['sortableAttributes']).to eq([]) - expect(settings['proximityPrecision']).to eq(default_proximity_precision) + it '#settings gets default values of settings' do + expect(index.settings).to include( + 'rankingRules' => default_ranking_rules, + 'distinctAttribute' => nil, + 'searchableAttributes' => default_searchable_attributes, + 'displayedAttributes' => default_displayed_attributes, + 'stopWords' => [], + 'synonyms' => {}, + 'pagination' => default_pagination.transform_keys(&:to_s), + 'filterableAttributes' => [], + 'sortableAttributes' => [], + 'dictionary' => [], + 'separatorTokens' => [], + 'nonSeparatorTokens' => [], + 'proximityPrecision' => default_proximity_precision + ) end - it 'updates multiples settings at the same time' do - task = index.update_settings( - ranking_rules: ['title:asc', 'typo'], - distinct_attribute: 'title' - ) + describe '#update_settings' do + it 'updates multiples settings at the same time' do + task = index.update_settings( + ranking_rules: ['title:asc', 'typo'], + distinct_attribute: 'title' + ) - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) - settings = index.settings - expect(settings['rankingRules']).to eq(['title:asc', 'typo']) - expect(settings['distinctAttribute']).to eq('title') - expect(settings['stopWords']).to be_empty - end + expect(task.type).to eq('settingsUpdate') + task.await - it 'updates one setting without reset the others' do - task = index.update_settings(stop_words: ['the']) + expect(index.settings).to include( + 'rankingRules' => ['title:asc', 'typo'], + 'distinctAttribute' => 'title', + 'stopWords' => [] + ) + end - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) - settings = index.settings - expect(settings['rankingRules']).to eq(default_ranking_rules) - expect(settings['distinctAttribute']).to be_nil - expect(settings['stopWords']).to eq(['the']) - expect(settings['synonyms']).to be_empty + it 'updates one setting without touching the others' do + task = index.update_settings(stop_words: ['the']) + + expect(task.type).to eq('settingsUpdate') + task.await + + expect(index.settings).to include( + 'rankingRules' => default_ranking_rules, + 'distinctAttribute' => nil, + 'stopWords' => ['the'], + 'synonyms' => {} + ) + end end - it 'resets all settings' do - task = index.update_settings( + it '#reset_settings resets all settings' do + index.update_settings( ranking_rules: ['title:asc', 'typo'], distinct_attribute: 'title', stop_words: ['the', 'a'], synonyms: { wow: ['world of warcraft'] }, proximity_precision: 'byAttribute' - ) - client.wait_for_task(task['taskUid']) + ).await task = index.reset_settings - - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) - - settings = index.settings - expect(settings['rankingRules']).to eq(default_ranking_rules) - expect(settings['distinctAttribute']).to be_nil - expect(settings['stopWords']).to be_empty - expect(settings['synonyms']).to be_empty - expect(settings['proximityPrecision']).to eq(default_proximity_precision) + expect(task.type).to eq('settingsUpdate') + task.await + + expect(index.settings).to include( + 'rankingRules' => default_ranking_rules, + 'distinctAttribute' => nil, + 'stopWords' => [], + 'synonyms' => {}, + 'proximityPrecision' => default_proximity_precision + ) end end - context 'On ranking-rules sub-routes' do + context 'On ranking rules' do let(:index) { client.index(uid) } let(:ranking_rules) { ['title:asc', 'words', 'typo'] } let(:wrong_ranking_rules) { ['title:asc', 'typos'] } before { client.create_index(uid).await } - it 'gets default values of ranking rules' do - settings = index.ranking_rules - expect(settings).to eq(default_ranking_rules) + it '#ranking_rules gets default values of ranking rules' do + expect(index.ranking_rules).to eq(default_ranking_rules) end - it 'updates ranking rules' do - task = index.update_ranking_rules(ranking_rules) - - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) - expect(index.ranking_rules).to eq(ranking_rules) - end + describe '#update_ranking_rules' do + it 'updates ranking rules' do + task = index.update_ranking_rules(ranking_rules) + expect(task.type).to eq('settingsUpdate') + task.await - it 'updates ranking rules at null' do - task = index.update_ranking_rules(ranking_rules) - client.wait_for_task(task['taskUid']) + expect(index.ranking_rules).to eq(ranking_rules) + end - task = index.update_ranking_rules(nil) + it 'resets ranking rules when passed nil' do + index.update_ranking_rules(ranking_rules).await + task = index.update_ranking_rules(nil) - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) + expect(task.type).to eq('settingsUpdate') + task.await - expect(index.ranking_rules).to eq(default_ranking_rules) - end + expect(index.ranking_rules).to eq(default_ranking_rules) + end - it 'fails when updating with wrong ranking rules name' do - expect do - index.update_ranking_rules(wrong_ranking_rules) - end.to raise_meilisearch_api_error_with(400, 'invalid_settings_ranking_rules', 'invalid_request') + it 'fails when updating with wrong ranking rules name' do + expect do + index.update_ranking_rules(wrong_ranking_rules) + end.to raise_meilisearch_api_error_with(400, 'invalid_settings_ranking_rules', 'invalid_request') + end end - it 'resets ranking rules' do - task = index.update_ranking_rules(ranking_rules) - client.wait_for_task(task['taskUid']) - + it '#reset_ranking_rules resets ranking rules' do + index.update_ranking_rules(ranking_rules).await task = index.reset_ranking_rules - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) + expect(task.type).to eq('settingsUpdate') + task.await expect(index.ranking_rules).to eq(default_ranking_rules) end end - context 'On distinct-attribute sub-routes' do + context 'On distinct attribute' do let(:index) { client.index(uid) } let(:distinct_attribute) { 'title' } - it 'gets default values of distinct attribute' do - client.create_index(uid).await - settings = index.distinct_attribute + before { client.create_index(uid).await } - expect(settings).to be_nil + it '#distinct_attribute gets default values of distinct attribute' do + expect(index.distinct_attribute).to be_nil end - it 'updates distinct attribute' do - task = index.update_distinct_attribute(distinct_attribute) + describe '#update_distinct_attribute' do + it 'updates distinct attribute' do + task = index.update_distinct_attribute(distinct_attribute) + expect(task.type).to eq('settingsUpdate') + task.await - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) + expect(index.distinct_attribute).to eq(distinct_attribute) + end - expect(index.distinct_attribute).to eq(distinct_attribute) - end + it 'resets district attributes when passed nil' do + task = index.update_distinct_attribute(distinct_attribute) + expect(task.type).to eq('settingsUpdate') + task.await - it 'updates distinct attribute at null' do - task = index.update_distinct_attribute(distinct_attribute) - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) + task = index.update_distinct_attribute(nil) + expect(task.type).to eq('settingsUpdate') + task.await - task = index.update_distinct_attribute(nil) - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) - - expect(index.distinct_attribute).to be_nil + expect(index.distinct_attribute).to be_nil + end end - it 'resets distinct attribute' do + it '#reset_distinct_attribute resets distinct attribute' do task = index.update_distinct_attribute(distinct_attribute) - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) + expect(task.type).to eq('settingsUpdate') + task.await task = index.reset_distinct_attribute - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) + expect(task.type).to eq('settingsUpdate') + task.await expect(index.distinct_attribute).to be_nil end end - context 'On searchable-attributes sub-routes' do + context 'On searchable attributes' do let(:index) { client.index(uid) } let(:searchable_attributes) { ['title', 'description'] } before { client.create_index(uid).await } - it 'gets default values of searchable attributes' do - settings = index.searchable_attributes - expect(settings).to eq(default_searchable_attributes) + it '#searchable_attributes gets default values of searchable attributes' do + expect(index.searchable_attributes).to eq(default_searchable_attributes) end - it 'updates searchable attributes' do - task = index.update_searchable_attributes(searchable_attributes) + describe '#update_searchable_attributes' do + it 'updates searchable attributes' do + task = index.update_searchable_attributes(searchable_attributes) + expect(task.type).to eq('settingsUpdate') + task.await - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) - expect(index.searchable_attributes).to eq(searchable_attributes) - end + expect(index.searchable_attributes).to eq(searchable_attributes) + end - it 'updates searchable attributes at null' do - task = index.update_searchable_attributes(searchable_attributes) - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) + it 'resets searchable attributes when passed nil' do + task = index.update_searchable_attributes(searchable_attributes) + expect(task.type).to eq('settingsUpdate') + task.await - task = index.update_searchable_attributes(nil) - expect(task['type']).to eq('settingsUpdate') + task = index.update_searchable_attributes(nil) + expect(task.type).to eq('settingsUpdate') + task.await - client.wait_for_task(task['taskUid']) - - expect(index.searchable_attributes).to eq(default_searchable_attributes) + expect(index.searchable_attributes).to eq(default_searchable_attributes) + end end - it 'resets searchable attributes' do + it '#reset_searchable_attributes resets searchable attributes' do task = index.update_searchable_attributes(searchable_attributes) - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) + expect(task.type).to eq('settingsUpdate') + task.await task = index.reset_searchable_attributes + expect(task.type).to eq('settingsUpdate') + expect(task.await).to be_succeeded - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) - - expect(index.task(task['taskUid'])['status']).to eq('succeeded') expect(index.searchable_attributes).to eq(default_searchable_attributes) end end - context 'On displayed-attributes sub-routes' do + context 'On displayed attributes' do let(:index) { client.index(uid) } let(:displayed_attributes) { ['title', 'description'] } before { client.create_index(uid).await } - it 'gets default values of displayed attributes' do - settings = index.displayed_attributes - expect(settings).to eq(default_displayed_attributes) + it '#displayed_attributes gets default values of displayed attributes' do + expect(index.displayed_attributes).to eq(default_displayed_attributes) end - it 'updates displayed attributes' do - task = index.update_displayed_attributes(displayed_attributes) - - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) + describe '#update_displayed_attributes' do + it 'updates displayed attributes' do + task = index.update_displayed_attributes(displayed_attributes) - expect(index.displayed_attributes).to contain_exactly(*displayed_attributes) - end + expect(task.type).to eq('settingsUpdate') + task.await - it 'updates displayed attributes at null' do - task = index.update_displayed_attributes(displayed_attributes) - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) + expect(index.displayed_attributes).to contain_exactly(*displayed_attributes) + end - task = index.update_displayed_attributes(nil) + it 'resets displayed attributes when passed nil' do + task = index.update_displayed_attributes(displayed_attributes) + expect(task.type).to eq('settingsUpdate') + task.await - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) + task = index.update_displayed_attributes(nil) + expect(task.type).to eq('settingsUpdate') + task.await - expect(index.displayed_attributes).to eq(default_displayed_attributes) + expect(index.displayed_attributes).to eq(default_displayed_attributes) + end end - it 'resets displayed attributes' do + it '#reset_displayed_attributes resets displayed attributes' do task = index.update_displayed_attributes(displayed_attributes) - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) + expect(task.type).to eq('settingsUpdate') + task.await task = index.reset_displayed_attributes + expect(task.type).to eq('settingsUpdate') + expect(task.await).to be_succeeded - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) - - expect(index.task(task['taskUid'])['status']).to eq('succeeded') expect(index.displayed_attributes).to eq(default_displayed_attributes) end end - context 'On synonyms sub-routes' do + context 'On synonyms' do let(:index) { client.index(uid) } let(:synonyms) do { @@ -310,232 +310,207 @@ before { client.create_index(uid).await } - it 'gets an empty hash of synonyms by default' do - settings = index.synonyms - expect(settings).to be_a(Hash) - expect(settings).to be_empty - end - - it 'returns an uid when updating' do - task = index.update_synonyms(synonyms) - expect(task).to be_a(Hash) + describe '#synonyms' do + it 'gets an empty hash of synonyms by default' do + expect(index.synonyms).to eq({}) + end - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) + it 'gets all the synonyms' do + index.update_synonyms(synonyms).await + expect(index.synonyms).to match( + 'wow' => ['world of warcraft'], + 'wolverine' => ['xmen', 'logan'], + 'logan' => ['wolverine', 'xmen'] + ) + end end - it 'gets all the synonyms' do - update_synonyms(index, synonyms) - settings = index.synonyms - expect(settings).to be_a(Hash) - expect(settings.count).to eq(3) - expect(settings.keys).to contain_exactly('wow', 'wolverine', 'logan') - expect(settings['wow']).to be_a(Array) - expect(settings['wow']).to eq(['world of warcraft']) - end + describe '#update_synonyms' do + it 'overwrites all existing synonyms' do + index.update_synonyms(synonyms).await + index.update_synonyms(hp: ['harry potter'], 'harry potter': ['hp']).await - it 'overwrites all synonyms when updating' do - update_synonyms(index, synonyms) - update_synonyms(index, hp: ['harry potter'], 'harry potter': ['hp']) - synonyms = index.synonyms - expect(synonyms).to be_a(Hash) - expect(synonyms.count).to eq(2) - expect(synonyms.keys).to contain_exactly('hp', 'harry potter') - expect(synonyms['hp']).to be_a(Array) - expect(synonyms['hp']).to eq(['harry potter']) - end + expect(index.synonyms).to match( + 'hp' => ['harry potter'], 'harry potter' => ['hp'] + ) + end - it 'updates synonyms at null' do - update_synonyms(index, synonyms) + it 'resets synonyms when passed nil' do + index.update_synonyms(synonyms).await + expect(index.synonyms).not_to be_empty - expect do - update_synonyms(index, nil) - end.to(change { index.synonyms.length }.from(3).to(0)) + index.update_synonyms(nil).await + expect(index.synonyms).to eq({}) + end end - it 'deletes all the synonyms' do - update_synonyms(index, synonyms) - - expect do - task = index.reset_synonyms - - expect(task).to be_a(Hash) - - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) + it '#reset_synonyms deletes all the synonyms' do + index.update_synonyms(synonyms).await + expect(index.synonyms).not_to be_empty - expect(index.synonyms).to be_a(Hash) - end.to(change { index.synonyms.length }.from(3).to(0)) + index.reset_synonyms.await + expect(index.synonyms).to eq({}) end end - context 'On stop-words sub-routes' do + context 'On stop words' do let(:index) { client.index(uid) } let(:stop_words_array) { ['the', 'of'] } let(:stop_words_string) { 'a' } before { client.create_index(uid).await } - it 'gets an empty array when there is no stop-words' do - settings = index.stop_words - expect(settings).to be_a(Array) - expect(settings).to be_empty - end - - it 'updates stop-words when the body is valid (as an array)' do - task = index.update_stop_words(stop_words_array) - expect(task).to be_a(Hash) + describe '#stop_words' do + it 'gets an empty array when there is no stop-words' do + expect(index.stop_words).to eq([]) + end - expect(task['type']).to eq('settingsUpdate') - end + it 'gets list of stop-words' do + task = index.update_stop_words(stop_words_array) + expect(task.type).to eq('settingsUpdate') + task.await - it 'gets list of stop-words' do - task = index.update_stop_words(stop_words_array) - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) - settings = index.stop_words - expect(settings).to be_a(Array) - expect(settings).to contain_exactly(*stop_words_array) + expect(index.stop_words).to contain_exactly(*stop_words_array) + end end - it 'updates stop-words when the body is valid (as single string)' do - task = index.update_stop_words(stop_words_string) - expect(task).to be_a(Hash) - - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) - sw = index.stop_words - expect(sw).to be_a(Array) - expect(sw).to contain_exactly(stop_words_string) - end + describe '#update_stop_words' do + it 'updates stop words when passed an array' do + index.update_stop_words(stop_words_array).await + expect(index.stop_words).to contain_exactly(*stop_words_array) + end - it 'updates stop-words at null' do - task = index.update_stop_words(stop_words_string) - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) + it 'updates stop-words when passed a string' do + index.update_stop_words(stop_words_string).await + expect(index.stop_words).to contain_exactly(stop_words_string) + end - task = index.update_stop_words(nil) + it 'resets stop words when passed nil' do + task = index.update_stop_words(stop_words_string) + expect(task.type).to eq('settingsUpdate') + task.await - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) + task = index.update_stop_words(nil) + expect(task.type).to eq('settingsUpdate') + task.await - expect(index.stop_words).to be_empty - end + expect(index.stop_words).to be_empty + end - it 'returns an error when the body is invalid' do - expect do - index.update_stop_words(test: 'test') - end.to raise_meilisearch_api_error_with(400, 'invalid_settings_stop_words', 'invalid_request') + it 'raises an error when the body is invalid' do + expect do + index.update_stop_words(test: 'test') + end.to raise_meilisearch_api_error_with(400, 'invalid_settings_stop_words', 'invalid_request') + end end - it 'resets stop-words' do + it '#reset_stop_words resets stop-words' do task = index.update_stop_words(stop_words_string) - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) + expect(task.type).to eq('settingsUpdate') + task.await - task = index.reset_stop_words - expect(task).to be_a(Hash) + expect(index.stop_words).to contain_exactly(stop_words_string) - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) + task = index.reset_stop_words + expect(task.type).to eq('settingsUpdate') + task.await - expect(index.stop_words).to be_a(Array) - expect(index.stop_words).to be_empty + expect(index.stop_words).to eq([]) end end - context 'On filterable-attributes sub-routes' do + context 'On filterable attributes' do let(:index) { client.index(uid) } let(:filterable_attributes) { ['title', 'description'] } before { client.create_index(uid).await } - it 'gets default values of filterable attributes' do - settings = index.filterable_attributes - expect(settings).to be_a(Array) - expect(settings).to be_empty + it '#filterable_attributes gets default values of filterable attributes' do + expect(index.filterable_attributes).to eq([]) end - it 'updates filterable attributes' do - task = index.update_filterable_attributes(filterable_attributes) - - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) - expect(index.filterable_attributes).to contain_exactly(*filterable_attributes) - end + describe '#update_filterable_attributes' do + it 'updates filterable attributes' do + task = index.update_filterable_attributes(filterable_attributes) + expect(task.type).to eq('settingsUpdate') + task.await - it 'updates filterable attributes at null' do - task = index.update_filterable_attributes(filterable_attributes) + expect(index.filterable_attributes).to contain_exactly(*filterable_attributes) + end - expect(task['type']).to eq('settingsUpdate') + it 'resets filterable attributes when passed nil' do + task = index.update_filterable_attributes(filterable_attributes) + expect(task.type).to eq('settingsUpdate') + task.await + expect(index.filterable_attributes).to contain_exactly(*filterable_attributes) - task = index.update_filterable_attributes(nil) + task = index.update_filterable_attributes(nil) + expect(task.type).to eq('settingsUpdate') + task.await - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) - - expect(index.filterable_attributes).to be_empty + expect(index.filterable_attributes).to be_empty + end end - it 'resets filterable attributes' do + it '#reset_filterable_attributes resets filterable attributes' do task = index.update_filterable_attributes(filterable_attributes) - - expect(task['type']).to eq('settingsUpdate') + expect(task.type).to eq('settingsUpdate') + task.await + expect(index.filterable_attributes).to contain_exactly(*filterable_attributes) task = index.reset_filterable_attributes + expect(task.type).to eq('settingsUpdate') + task.await - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) - - expect(index.task(task['taskUid'])['status']).to eq('succeeded') expect(index.filterable_attributes).to be_empty end end - context 'On sortable-attributes sub-routes' do + context 'On sortable attributes' do let(:index) { client.index(uid) } let(:sortable_attributes) { ['title', 'description'] } before { client.create_index(uid).await } it 'gets default values of sortable attributes' do - settings = index.sortable_attributes - expect(settings).to be_a(Array) - expect(settings).to be_empty + expect(index.sortable_attributes).to eq([]) end - it 'updates sortable attributes' do - task = index.update_sortable_attributes(sortable_attributes) + describe '#update_sortable_attributes' do + it 'updates sortable attributes' do + task = index.update_sortable_attributes(sortable_attributes) + expect(task.type).to eq('settingsUpdate') + task.await - client.wait_for_task(task['taskUid']) - expect(task['type']).to eq('settingsUpdate') - expect(index.sortable_attributes).to contain_exactly(*sortable_attributes) - end + expect(index.sortable_attributes).to contain_exactly(*sortable_attributes) + end - it 'updates sortable attributes at null' do - task = index.update_sortable_attributes(sortable_attributes) - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) + it 'resets sortable attributes when passed nil' do + task = index.update_sortable_attributes(sortable_attributes) + expect(task.type).to eq('settingsUpdate') + task.await - task = index.update_sortable_attributes(nil) + expect(index.sortable_attributes).to contain_exactly(*sortable_attributes) - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) + task = index.update_sortable_attributes(nil) + expect(task.type).to eq('settingsUpdate') + task.await - expect(index.sortable_attributes).to be_empty + expect(index.sortable_attributes).to be_empty + end end it 'resets sortable attributes' do task = index.update_sortable_attributes(sortable_attributes) - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) + expect(task.type).to eq('settingsUpdate') + task.await - task = index.reset_sortable_attributes + expect(index.sortable_attributes).to contain_exactly(*sortable_attributes) - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) + task = index.reset_sortable_attributes + expect(task.type).to eq('settingsUpdate') + task.await - expect(index.task(task['taskUid'])['status']).to eq('succeeded') expect(index.sortable_attributes).to be_empty end end @@ -665,12 +640,6 @@ end end - def update_synonyms(index, synonyms) - task = index.update_synonyms(synonyms) - - client.wait_for_task(task['taskUid']) - end - context 'On pagination sub-routes' do let(:index) { client.index(uid) } let(:pagination) { { maxTotalHits: 3141 } } From b1bc1a0120920315fc6181ee0645b2754e59e358 Mon Sep 17 00:00:00 2001 From: ellnix <103502144+ellnix@users.noreply.github.com> Date: Thu, 12 Oct 2023 15:48:55 +0200 Subject: [PATCH 05/12] Remove redundant tests --- spec/meilisearch/index/settings_spec.rb | 124 ++---------------------- 1 file changed, 8 insertions(+), 116 deletions(-) diff --git a/spec/meilisearch/index/settings_spec.rb b/spec/meilisearch/index/settings_spec.rb index 02d8abe8..afad362f 100644 --- a/spec/meilisearch/index/settings_spec.rb +++ b/spec/meilisearch/index/settings_spec.rb @@ -515,128 +515,20 @@ end end - context 'Index with primary-key' do - let(:index) { client.index(uid) } - - before { client.create_index(uid, primary_key: 'id').await } - - it 'gets the default values of settings' do - settings = index.settings - expect(settings).to be_a(Hash) - expect(settings.keys).to include(*settings_keys) - expect(settings['rankingRules']).to eq(default_ranking_rules) - expect(settings['distinctAttribute']).to be_nil - expect(settings['searchableAttributes']).to eq(default_searchable_attributes) - expect(settings['displayedAttributes']).to eq(default_displayed_attributes) - expect(settings['stopWords']).to eq([]) - expect(settings['synonyms']).to eq({}) - end - - it 'updates multiples settings at the same time' do - task = index.update_settings( - ranking_rules: ['title:asc', 'typo'], - distinct_attribute: 'title' - ) - - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) - settings = index.settings - expect(settings['rankingRules']).to eq(['title:asc', 'typo']) - expect(settings['distinctAttribute']).to eq('title') - expect(settings['stopWords']).to be_empty - end - - it 'updates one setting without reset the others' do - task = index.update_settings(stop_words: ['the']) - - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) - settings = index.settings - expect(settings['rankingRules']).to eq(default_ranking_rules) - expect(settings['distinctAttribute']).to be_nil - expect(settings['stopWords']).to eq(['the']) - expect(settings['synonyms']).to be_empty - end - - it 'resets all settings' do - task = index.update_settings( - ranking_rules: ['title:asc', 'typo'], - distinct_attribute: 'title', - stop_words: ['the'], - synonyms: { - wow: ['world of warcraft'] - } - ) - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) - - task = index.reset_settings - - expect(task['type']).to eq('settingsUpdate') - client.wait_for_task(task['taskUid']) - - settings = index.settings - expect(settings['rankingRules']).to eq(default_ranking_rules) - expect(settings['distinctAttribute']).to be_nil - expect(settings['stopWords']).to be_empty - expect(settings['synonyms']).to be_empty - end - end - - context 'Manipulation of searchable/displayed attributes with the primary-key' do - let(:index) { client.index(random_uid) } - - it 'does not add document when there is no primary-key' do - task = index.add_documents(title: 'Test') - task = client.wait_for_task(task['taskUid']) - - expect(task.keys).to include('error') - expect(task['error']['code']).to eq('index_primary_key_no_candidate_found') - end - - it 'adds documents when there is a primary-key' do - task = index.add_documents(objectId: 1, title: 'Test') - - client.wait_for_task(task['taskUid']) - expect(index.documents['results'].count).to eq(1) - end - - it 'resets searchable/displayed attributes' do - task = index.update_displayed_attributes(['title', 'description']) - client.wait_for_task(task['taskUid']) - task = index.update_searchable_attributes(['title']) - - client.wait_for_task(task['taskUid']) - - task = index.reset_displayed_attributes - - client.wait_for_task(task['taskUid']) - expect(index.task(task['taskUid'])['status']).to eq('succeeded') - - task = index.reset_searchable_attributes - - client.wait_for_task(task['taskUid']) - expect(index.task(task['taskUid'])['status']).to eq('succeeded') - - expect(index.displayed_attributes).to eq(['*']) - expect(index.searchable_attributes).to eq(['*']) - end - end - context 'Aliases' do let(:index) { client.index(uid) } before { client.create_index(uid).await } it 'works with method aliases' do - expect(index.method(:settings) == index.method(:get_settings)).to be_truthy - expect(index.method(:ranking_rules) == index.method(:get_ranking_rules)).to be_truthy - expect(index.method(:distinct_attribute) == index.method(:get_distinct_attribute)).to be_truthy - expect(index.method(:searchable_attributes) == index.method(:get_searchable_attributes)).to be_truthy - expect(index.method(:displayed_attributes) == index.method(:get_displayed_attributes)).to be_truthy - expect(index.method(:synonyms) == index.method(:get_synonyms)).to be_truthy - expect(index.method(:stop_words) == index.method(:get_stop_words)).to be_truthy - expect(index.method(:filterable_attributes) == index.method(:get_filterable_attributes)).to be_truthy + expect(index.method(:settings)).to eq index.method(:get_settings) + expect(index.method(:ranking_rules)).to eq index.method(:get_ranking_rules) + expect(index.method(:distinct_attribute)).to eq index.method(:get_distinct_attribute) + expect(index.method(:searchable_attributes)).to eq index.method(:get_searchable_attributes) + expect(index.method(:displayed_attributes)).to eq index.method(:get_displayed_attributes) + expect(index.method(:synonyms)).to eq index.method(:get_synonyms) + expect(index.method(:stop_words)).to eq index.method(:get_stop_words) + expect(index.method(:filterable_attributes)).to eq index.method(:get_filterable_attributes) end end From 4d7c1a36d6058ce357d1a02b7c04b7340e6c1fe0 Mon Sep 17 00:00:00 2001 From: ellnix <103502144+ellnix@users.noreply.github.com> Date: Thu, 12 Oct 2023 15:54:21 +0200 Subject: [PATCH 06/12] Update more Index methods with Model::Task Update typo tolerance methods with Model::Task Update faceting settings methods with Model::Task Update user dict methods to use Model::Task Update (non) separator methods to use Model::Task Update Index#update and #delete to use Model::Task --- .rubocop_todo.yml | 8 +- lib/meilisearch/index.rb | 49 +++--- spec/meilisearch/client/indexes_spec.rb | 4 +- spec/meilisearch/index/base_spec.rb | 30 ++-- spec/meilisearch/index/documents_spec.rb | 12 +- spec/meilisearch/index/settings_spec.rb | 198 ++++++++++------------- 6 files changed, 137 insertions(+), 164 deletions(-) diff --git a/.rubocop_todo.yml b/.rubocop_todo.yml index afda34df..4a6eac57 100644 --- a/.rubocop_todo.yml +++ b/.rubocop_todo.yml @@ -1,21 +1,21 @@ # This configuration was generated by # `rubocop --auto-gen-config` -# on 2024-02-16 17:57:26 UTC using RuboCop version 1.50.2. +# on 2024-02-16 18:01:53 UTC using RuboCop version 1.50.2. # The point is for the user to remove these configuration records # one by one as the offenses are removed from the code base. # Note that changes in the inspected code, or installation of new # versions of RuboCop, may require this file to be generated again. -# Offense count: 64 +# Offense count: 63 # Configuration parameters: CountComments, CountAsOne, AllowedMethods, AllowedPatterns. # AllowedMethods: refine Metrics/BlockLength: - Max: 693 + Max: 581 # Offense count: 4 # Configuration parameters: CountComments, CountAsOne. Metrics/ClassLength: - Max: 401 + Max: 421 # Offense count: 1 # Configuration parameters: Max, CountKeywordArgs. diff --git a/lib/meilisearch/index.rb b/lib/meilisearch/index.rb index 78fb022a..7ef1a2bb 100644 --- a/lib/meilisearch/index.rb +++ b/lib/meilisearch/index.rb @@ -30,13 +30,15 @@ def fetch_raw_info end def update(body) - http_patch indexes_path(id: @uid), Utils.transform_attributes(body) + response = http_patch indexes_path(id: @uid), Utils.transform_attributes(body) + MeiliSearch::Model::Task.new(response, task_endpoint) end alias update_index update def delete - http_delete indexes_path(id: @uid) + response = http_delete indexes_path(id: @uid) + MeiliSearch::Model::Task.new(response, task_endpoint) end alias delete_index delete @@ -165,12 +167,7 @@ def add_documents_in_batches!(documents, batch_size = 1000, primary_key = nil) 'index.add_documents_in_batches(...).await' ) - tasks = add_documents_in_batches(documents, batch_size, primary_key) - responses = [] - tasks.each do |task_obj| - responses.append(wait_for_task(task_obj['taskUid'])) - end - responses + add_documents_in_batches(documents, batch_size, primary_key).each(&:await) end def update_documents_in_batches(documents, batch_size = 1000, primary_key = nil) @@ -487,12 +484,14 @@ def pagination alias get_pagination pagination def update_pagination(pagination) - http_patch "/indexes/#{@uid}/settings/pagination", pagination + response = http_patch "/indexes/#{@uid}/settings/pagination", pagination + MeiliSearch::Model::Task.new(response, task_endpoint) end alias pagination= update_sortable_attributes def reset_pagination - http_delete "/indexes/#{@uid}/settings/pagination" + response = http_delete "/indexes/#{@uid}/settings/pagination" + MeiliSearch::Model::Task.new(response, task_endpoint) end def typo_tolerance @@ -502,12 +501,14 @@ def typo_tolerance def update_typo_tolerance(typo_tolerance_attributes) attributes = Utils.transform_attributes(typo_tolerance_attributes) - http_patch("/indexes/#{@uid}/settings/typo-tolerance", attributes) + response = http_patch("/indexes/#{@uid}/settings/typo-tolerance", attributes) + MeiliSearch::Model::Task.new(response, task_endpoint) end alias typo_tolerance= update_typo_tolerance def reset_typo_tolerance - http_delete("/indexes/#{@uid}/settings/typo-tolerance") + response = http_delete("/indexes/#{@uid}/settings/typo-tolerance") + MeiliSearch::Model::Task.new(response, task_endpoint) end def faceting @@ -517,12 +518,14 @@ def faceting def update_faceting(faceting_attributes) attributes = Utils.transform_attributes(faceting_attributes) - http_patch("/indexes/#{@uid}/settings/faceting", attributes) + response = http_patch("/indexes/#{@uid}/settings/faceting", attributes) + MeiliSearch::Model::Task.new(response, task_endpoint) end alias faceting= update_faceting def reset_faceting - http_delete("/indexes/#{@uid}/settings/faceting") + response = http_delete("/indexes/#{@uid}/settings/faceting") + MeiliSearch::Model::Task.new(response, task_endpoint) end ### SETTINGS - DICTIONARY @@ -533,11 +536,13 @@ def dictionary def update_dictionary(dictionary_attributes) attributes = Utils.transform_attributes(dictionary_attributes) - http_put("/indexes/#{@uid}/settings/dictionary", attributes) + response = http_put("/indexes/#{@uid}/settings/dictionary", attributes) + MeiliSearch::Model::Task.new(response, task_endpoint) end def reset_dictionary - http_delete("/indexes/#{@uid}/settings/dictionary") + response = http_delete("/indexes/#{@uid}/settings/dictionary") + MeiliSearch::Model::Task.new(response, task_endpoint) end ### SETTINGS - SEPARATOR TOKENS @@ -547,11 +552,13 @@ def separator_tokens def update_separator_tokens(separator_tokens_attributes) attributes = Utils.transform_attributes(separator_tokens_attributes) - http_put("/indexes/#{@uid}/settings/separator-tokens", attributes) + response = http_put("/indexes/#{@uid}/settings/separator-tokens", attributes) + MeiliSearch::Model::Task.new(response, task_endpoint) end def reset_separator_tokens - http_delete("/indexes/#{@uid}/settings/separator-tokens") + response = http_delete("/indexes/#{@uid}/settings/separator-tokens") + MeiliSearch::Model::Task.new(response, task_endpoint) end ### SETTINGS - NON SEPARATOR TOKENS @@ -562,11 +569,13 @@ def non_separator_tokens def update_non_separator_tokens(non_separator_tokens_attributes) attributes = Utils.transform_attributes(non_separator_tokens_attributes) - http_put("/indexes/#{@uid}/settings/non-separator-tokens", attributes) + response = http_put("/indexes/#{@uid}/settings/non-separator-tokens", attributes) + MeiliSearch::Model::Task.new(response, task_endpoint) end def reset_non_separator_tokens - http_delete("/indexes/#{@uid}/settings/non-separator-tokens") + response = http_delete("/indexes/#{@uid}/settings/non-separator-tokens") + MeiliSearch::Model::Task.new(response, task_endpoint) end ### SETTINGS - PROXIMITY PRECISION diff --git a/spec/meilisearch/client/indexes_spec.rb b/spec/meilisearch/client/indexes_spec.rb index dd92abe9..069a6893 100644 --- a/spec/meilisearch/client/indexes_spec.rb +++ b/spec/meilisearch/client/indexes_spec.rb @@ -35,7 +35,9 @@ it 'warns about deprecation' do client.create_index!('books') - expect(MeiliSearch::Utils).to have_received(:soft_deprecate).with('Client#create_index!', a_string_matching(/books/)) + expect(MeiliSearch::Utils) + .to have_received(:soft_deprecate) + .with('Client#create_index!', a_string_including('books')) end end diff --git a/spec/meilisearch/index/base_spec.rb b/spec/meilisearch/index/base_spec.rb index 4d588b15..f4c1508e 100644 --- a/spec/meilisearch/index/base_spec.rb +++ b/spec/meilisearch/index/base_spec.rb @@ -55,8 +55,8 @@ client.create_index('uid').await task = client.index('uid').update(primary_key: 'new_primary_key') - expect(task['type']).to eq('indexUpdate') - client.wait_for_task(task['taskUid']) + expect(task.type).to eq('indexUpdate') + task.await index = client.fetch_index('uid') expect(index).to be_a(MeiliSearch::Index) @@ -73,8 +73,8 @@ client.create_index('books', primary_key: 'reference_number').await task = client.index('books').update(primary_key: 'international_standard_book_number') - expect(task['type']).to eq('indexUpdate') - client.wait_for_task(task['taskUid']) + expect(task.type).to eq('indexUpdate') + task.await index = client.fetch_index('books') expect(index).to be_a(MeiliSearch::Index) @@ -92,11 +92,11 @@ index.add_documents({ id: 1, title: 'My Title' }).await task = index.update(primary_key: 'new_primary_key') - expect(task['type']).to eq('indexUpdate') - achieved_task = client.wait_for_task(task['taskUid']) + expect(task.type).to eq('indexUpdate') - expect(achieved_task['status']).to eq('failed') - expect(achieved_task['error']['code']).to eq('index_primary_key_already_exists') + task.await + expect(task).to be_failed + expect(task.error['code']).to eq('index_primary_key_already_exists') end it 'supports options' do @@ -158,9 +158,9 @@ client.create_index('uid').await task = client.index('uid').delete - expect(task['type']).to eq('indexDeletion') - achieved_task = client.wait_for_task(task['taskUid']) - expect(achieved_task['status']).to eq('succeeded') + expect(task.type).to eq('indexDeletion') + task.await + expect(task).to be_succeeded expect { client.fetch_index('uid') }.to raise_index_not_found_meilisearch_api_error end @@ -168,8 +168,8 @@ client.create_index('uid').await task = client.index('uid').delete - expect(task['type']).to eq('indexDeletion') - client.wait_for_task(task['taskUid']) + expect(task.type).to eq('indexDeletion') + task.await index = client.index('uid') expect { index.fetch_primary_key }.to raise_index_not_found_meilisearch_api_error @@ -190,8 +190,8 @@ client.create_index('uid').await task = client.index('uid').update(primary_key: 'new_primary_key') - expect(task['type']).to eq('indexUpdate') - client.wait_for_task(task['taskUid']) + expect(task.type).to eq('indexUpdate') + task.await index = client.fetch_index('uid') expect(index).to be_a(MeiliSearch::Index) diff --git a/spec/meilisearch/index/documents_spec.rb b/spec/meilisearch/index/documents_spec.rb index 9d564c6b..26642740 100644 --- a/spec/meilisearch/index/documents_spec.rb +++ b/spec/meilisearch/index/documents_spec.rb @@ -195,16 +195,8 @@ describe '#add_documents_in_batches!' do before { allow(MeiliSearch::Utils).to receive(:soft_deprecate).and_return(nil) } - it 'adds document batches synchronously (as an array of documents)' do - task = index.add_documents_in_batches(documents, 5).each(&:await) - expect(task).to be_a(Array) - expect(task.count).to eq(2) # 2 batches, since we start with 5 < documents.count <= 10 documents - task.each do |task_object| - expect(task_object).to have_key('uid') - expect(task_object).to have_key('status') - expect(task_object['status']).not_to eql('enqueued') - expect(task_object['status']).to eql('succeeded') - end + it 'adds document batches synchronously' do + expect(index.add_documents_in_batches!(documents, 5)).to contain_exactly(be_succeeded, be_succeeded) expect(index.documents['results'].count).to eq(documents.count) end diff --git a/spec/meilisearch/index/settings_spec.rb b/spec/meilisearch/index/settings_spec.rb index afad362f..9ca7cbd3 100644 --- a/spec/meilisearch/index/settings_spec.rb +++ b/spec/meilisearch/index/settings_spec.rb @@ -532,43 +532,38 @@ end end - context 'On pagination sub-routes' do + context 'On pagination' do let(:index) { client.index(uid) } let(:pagination) { { maxTotalHits: 3141 } } + let(:pagination_with_string_keys) { pagination.transform_keys(&:to_s) } before { client.create_index(uid).await } - it 'gets default values of pagination' do - settings = index.pagination.transform_keys(&:to_sym) - - expect(settings).to eq(default_pagination) - end - - it 'updates pagination' do - task = index.update_pagination(pagination) - client.wait_for_task(task['taskUid']) - - expect(index.pagination.transform_keys(&:to_sym)).to eq(pagination) + it '#pagination gets default values of pagination' do + expect(index.pagination).to eq(default_pagination.transform_keys(&:to_s)) end - it 'updates pagination at null' do - task = index.update_pagination(pagination) - client.wait_for_task(task['taskUid']) + describe '#update_pagination' do + it 'updates pagination' do + index.update_pagination(pagination).await + expect(index.pagination).to eq(pagination_with_string_keys) + end - task = index.update_pagination(nil) - client.wait_for_task(task['taskUid']) + it 'resets pagination when passed nil' do + index.update_pagination(pagination).await + expect(index.pagination).to eq(pagination_with_string_keys) - expect(index.pagination.transform_keys(&:to_sym)).to eq(default_pagination) + index.update_pagination(nil).await + expect(index.pagination).to eq(default_pagination.transform_keys(&:to_s)) + end end - it 'resets pagination' do - task = index.update_pagination(pagination) - client.wait_for_task(task['taskUid']) + it '#reset_pagination resets pagination' do + index.update_pagination(pagination).await + expect(index.pagination).to eq(pagination_with_string_keys) - task = index.reset_pagination - client.wait_for_task(task['taskUid']) - - expect(index.pagination.transform_keys(&:to_sym)).to eq(default_pagination) + index.reset_pagination.await + expect(index.pagination).to eq(default_pagination.transform_keys(&:to_s)) end end @@ -602,26 +597,21 @@ before { client.create_index(uid).await } - it 'gets default typo tolerance settings' do - settings = index.typo_tolerance - - expect(settings).to eq(default_typo_tolerance) + it '#typo_tolerance gets default typo tolerance settings' do + expect(index.typo_tolerance).to eq(default_typo_tolerance) end - it 'updates typo tolerance settings' do - update_task = index.update_typo_tolerance(new_typo_tolerance) - client.wait_for_task(update_task['taskUid']) + it '#update_type_tolerance updates typo tolerance settings' do + index.update_typo_tolerance(new_typo_tolerance).await expect(index.typo_tolerance).to eq(MeiliSearch::Utils.transform_attributes(new_typo_tolerance)) end - it 'resets typo tolerance settings' do - update_task = index.update_typo_tolerance(new_typo_tolerance) - client.wait_for_task(update_task['taskUid']) - - reset_task = index.reset_typo_tolerance - client.wait_for_task(reset_task['taskUid']) + it '#reset_typo_tolerance resets typo tolerance settings' do + index.update_typo_tolerance(new_typo_tolerance).await + expect(index.typo_tolerance).to eq(MeiliSearch::Utils.transform_attributes(new_typo_tolerance)) + index.reset_typo_tolerance.await expect(index.typo_tolerance).to eq(default_typo_tolerance) end end @@ -629,122 +619,102 @@ context 'On faceting' do let(:index) { client.index(uid) } let(:default_faceting) { { maxValuesPerFacet: 100, sortFacetValuesBy: { '*' => 'alpha' } } } + let(:default_faceting_with_string_keys) { default_faceting.transform_keys(&:to_s) } before { client.create_index(uid).await } - it 'gets default values of faceting' do - settings = index.faceting.transform_keys(&:to_sym) - - expect(settings.keys).to include(*default_faceting.keys) + it '#faceting gets default values of faceting' do + expect(index.faceting).to eq(default_faceting_with_string_keys) end - it 'updates faceting' do - update_task = index.update_faceting({ 'max_values_per_facet' => 333 }) - client.wait_for_task(update_task['taskUid']) - - expect(index.faceting['maxValuesPerFacet']).to eq(333) - expect(index.faceting.transform_keys(&:to_sym).keys).to include(*default_faceting.keys) - end + describe '#update_faceting' do + it 'updates faceting' do + index.update_faceting({ 'max_values_per_facet' => 333 }).await + new_faceting = default_faceting_with_string_keys.merge('maxValuesPerFacet' => 333) - it 'updates faceting at null' do - update_task = index.update_faceting({ 'max_values_per_facet' => 444 }) - client.wait_for_task(update_task['taskUid']) + expect(index.faceting).to eq(new_faceting) + end - update_task = index.update_faceting(nil) - client.wait_for_task(update_task['taskUid']) + it 'resets faceting when passed nil' do + index.update_faceting({ 'max_values_per_facet' => 333 }).await + new_faceting = default_faceting_with_string_keys.merge('maxValuesPerFacet' => 333) + expect(index.faceting).to eq(new_faceting) - expect(index.faceting.transform_keys(&:to_sym).keys).to include(*default_faceting.keys) + index.update_faceting(nil).await + expect(index.faceting).to eq(default_faceting_with_string_keys) + end end - it 'resets faceting' do - update_task = index.update_faceting({ 'max_values_per_facet' => 444 }) - client.wait_for_task(update_task['taskUid']) - - reset_task = index.reset_faceting - client.wait_for_task(reset_task['taskUid']) + it '#reset_faceting resets faceting' do + index.update_faceting({ 'max_values_per_facet' => 333 }).await + new_faceting = default_faceting_with_string_keys.merge('maxValuesPerFacet' => 333) + expect(index.faceting).to eq(new_faceting) - expect(index.faceting.transform_keys(&:to_sym).keys).to include(*default_faceting.keys) + index.reset_faceting.await + expect(index.faceting).to eq(default_faceting_with_string_keys) end end context 'On user-defined dictionary' do let(:index) { client.index(uid) } - before { client.create_index!(uid) } + before { client.create_index(uid).await } it 'has no default value' do - settings = index.dictionary - - expect(settings).to be_empty + expect(index.dictionary).to eq([]) end - it 'updates dictionary' do - update_task = index.update_dictionary(['J. R. R.', 'W. E. B.']) - client.wait_for_task(update_task['taskUid']) - + it '#update_dictionary updates dictionary' do + index.update_dictionary(['J. R. R.', 'W. E. B.']).await expect(index.dictionary).to contain_exactly('J. R. R.', 'W. E. B.') end - it 'resets dictionary' do - update_task = index.update_dictionary(['J. R. R.', 'W. E. B.']) - client.wait_for_task(update_task['taskUid']) - - reset_task = index.reset_dictionary - client.wait_for_task(reset_task['taskUid']) + it '#reset_dictionary resets dictionary' do + index.update_dictionary(['J. R. R.', 'W. E. B.']).await + expect(index.dictionary).to contain_exactly('J. R. R.', 'W. E. B.') - expect(index.dictionary).to be_empty + index.reset_dictionary.await + expect(index.dictionary).to eq([]) end end context 'On separator tokens' do let(:index) { client.index(uid) } - before { client.create_index!(uid) } - - describe 'separator_tokens' do - it 'has no default value' do - expect(index.separator_tokens).to be_empty - end - - it 'updates separator tokens' do - update_task = index.update_separator_tokens ['|', '…'] - client.wait_for_task(update_task['taskUid']) - - expect(index.separator_tokens).to contain_exactly('|', '…') - end - - it 'resets separator tokens' do - update_task = index.update_separator_tokens ['|', '…'] - client.wait_for_task(update_task['taskUid']) + before { client.create_index(uid).await } - reset_task = index.reset_separator_tokens - client.wait_for_task(reset_task['taskUid']) + it '#separator_tokens has no default value' do + expect(index.separator_tokens).to eq([]) + end - expect(index.separator_tokens).to be_empty - end + it '#update_separator_tokens updates separator tokens' do + index.update_separator_tokens(['|', '…']).await + expect(index.separator_tokens).to contain_exactly('|', '…') end - describe '#non_separator_tokens' do - it 'has no default value' do - expect(index.non_separator_tokens).to be_empty - end + it '#reset_separator_tokens resets separator tokens' do + index.update_separator_tokens(['|', '…']).await + expect(index.separator_tokens).to contain_exactly('|', '…') - it 'updates non separator tokens' do - update_task = index.update_non_separator_tokens ['@', '#'] - client.wait_for_task(update_task['taskUid']) + index.reset_separator_tokens.await + expect(index.separator_tokens).to eq([]) + end - expect(index.non_separator_tokens).to contain_exactly('@', '#') - end + it '#non_separator_tokens has no default value' do + expect(index.non_separator_tokens).to eq([]) + end - it 'resets non separator tokens' do - update_task = index.update_non_separator_tokens ['@', '#'] - client.wait_for_task(update_task['taskUid']) + it '#update_non_separator_tokens updates non separator tokens' do + index.update_non_separator_tokens(['@', '#']).await + expect(index.non_separator_tokens).to contain_exactly('@', '#') + end - reset_task = index.reset_non_separator_tokens - client.wait_for_task(reset_task['taskUid']) + it '#reset_non_separator_tokens resets non separator tokens' do + index.update_non_separator_tokens(['@', '#']).await + expect(index.non_separator_tokens).to contain_exactly('@', '#') - expect(index.non_separator_tokens).to be_empty - end + index.reset_non_separator_tokens.await + expect(index.non_separator_tokens).to eq([]) end describe '#proximity_precision' do From 45bb7ce536f2be157cb73d1918a825a4d2885d0b Mon Sep 17 00:00:00 2001 From: ellnix <103502144+ellnix@users.noreply.github.com> Date: Thu, 12 Oct 2023 17:11:58 +0200 Subject: [PATCH 07/12] Refactor specs with Task#await syntax Replace calls to wait_for_task in specs which I had missed somehow Remove now-redundant TaskHelpers Update some Client methods&specs with Model::Task Add Model::Task to Client#dump and other specs --- lib/meilisearch/client.rb | 9 +++--- spec/meilisearch/client/dumps_spec.rb | 8 +---- spec/meilisearch/client/indexes_spec.rb | 32 ++++++++----------- spec/meilisearch/client/multi_search_spec.rb | 3 +- spec/meilisearch/client/token_spec.rb | 5 +-- spec/meilisearch/index/documents_spec.rb | 6 ++-- .../index/search/facets_distribution_spec.rb | 3 +- spec/meilisearch/index/search/filter_spec.rb | 5 +-- .../index/search/multi_params_spec.rb | 11 ++----- .../index/search/nested_fields_spec.rb | 10 +++--- spec/meilisearch/index/search/offset_spec.rb | 3 +- spec/meilisearch/index/search/q_spec.rb | 19 ++++++----- spec/meilisearch/index/search/sort_spec.rb | 21 ++++++------ spec/spec_helper.rb | 1 - spec/support/books_contexts.rb | 15 ++------- spec/support/task_helpers.rb | 9 ------ 16 files changed, 57 insertions(+), 103 deletions(-) delete mode 100644 spec/support/task_helpers.rb diff --git a/lib/meilisearch/client.rb b/lib/meilisearch/client.rb index ff682687..50a5aca0 100644 --- a/lib/meilisearch/client.rb +++ b/lib/meilisearch/client.rb @@ -16,7 +16,8 @@ def raw_indexes(options = {}) def swap_indexes(*options) mapped_array = options.map { |arr| { indexes: arr } } - http_post '/swap-indexes', mapped_array + response = http_post '/swap-indexes', mapped_array + MeiliSearch::Model::Task.new(response, task_endpoint) end def indexes(options = {}) @@ -48,8 +49,7 @@ def create_index!(index_uid, options = {}) "client.create_index('#{index_uid}').await" ) - task = create_index(index_uid, options) - wait_for_task(task['taskUid']) + create_index(index_uid, options).await end def delete_index(index_uid) @@ -125,7 +125,8 @@ def stats ### DUMPS def create_dump - http_post '/dumps' + response = http_post '/dumps' + MeiliSearch::Model::Task.new(response, task_endpoint) end ### SNAPSHOTS diff --git a/spec/meilisearch/client/dumps_spec.rb b/spec/meilisearch/client/dumps_spec.rb index e4606b5a..03a108cc 100644 --- a/spec/meilisearch/client/dumps_spec.rb +++ b/spec/meilisearch/client/dumps_spec.rb @@ -2,12 +2,6 @@ RSpec.describe 'MeiliSearch::Client - Dumps' do it 'creates a new dump' do - response = client.create_dump - expect(response).to be_a(Hash) - expect(response['taskUid']).to_not be_nil - expect(response['status']).to_not be_nil - expect(response['status']).to eq('enqueued') - response = client.wait_for_task(response['taskUid']) - expect(response['status']).to eq('succeeded') + expect(client.create_dump.await).to be_succeeded end end diff --git a/spec/meilisearch/client/indexes_spec.rb b/spec/meilisearch/client/indexes_spec.rb index 069a6893..4e7067a1 100644 --- a/spec/meilisearch/client/indexes_spec.rb +++ b/spec/meilisearch/client/indexes_spec.rb @@ -5,12 +5,10 @@ context 'without a primary key' do it 'creates an index' do task = client.create_index('books') + expect(task.type).to eq('indexCreation') + task.await - expect(task['type']).to eq('indexCreation') - - client.wait_for_task(task['taskUid']) index = client.fetch_index('books') - expect(index).to be_a(MeiliSearch::Index) expect(index.uid).to eq('books') expect(index.primary_key).to be_nil @@ -23,8 +21,8 @@ it 'creates an index' do task = client.create_index!('books') - expect(task['type']).to eq('indexCreation') - expect(task['status']).to eq('succeeded') + expect(task.type).to eq('indexCreation') + expect(task).to be_succeeded index = client.fetch_index('books') @@ -62,11 +60,10 @@ it 'creates an index' do task = client.create_index('books', primary_key: 'reference_code') - expect(task['type']).to eq('indexCreation') + expect(task.type).to eq('indexCreation') + task.await - client.wait_for_task(task['taskUid']) index = client.fetch_index('books') - expect(index).to be_a(MeiliSearch::Index) expect(index.uid).to eq('books') expect(index.primary_key).to eq('reference_code') @@ -90,8 +87,8 @@ context 'when primary key option in snake_case' do it 'creates an index' do task = client.create_index('books', primary_key: 'reference_code') - expect(task['type']).to eq('indexCreation') - client.wait_for_task(task['taskUid']) + expect(task.type).to eq('indexCreation') + task.await index = client.fetch_index('books') expect(index).to be_a(MeiliSearch::Index) @@ -109,11 +106,10 @@ uid: 'publications' ) - expect(task['type']).to eq('indexCreation') + expect(task.type).to eq('indexCreation') + task.await - client.wait_for_task(task['taskUid']) index = client.fetch_index('books') - expect(index).to be_a(MeiliSearch::Index) expect(index.uid).to eq('books') expect(index.primary_key).to eq('reference_code') @@ -252,9 +248,9 @@ expect(task['type']).to eq('indexDeletion') - achieved_task = client.wait_for_task(task['taskUid']) + task.await - expect(achieved_task['status']).to eq('succeeded') + expect(task).to be_succeeded expect { client.fetch_index('books') }.to raise_index_not_found_meilisearch_api_error end end @@ -269,9 +265,9 @@ describe '#swap_indexes' do it 'swaps two indexes' do task = client.swap_indexes(['indexA', 'indexB'], ['indexC', 'indexD']) - task = client.wait_for_task(task['taskUid']) - expect(task['type']).to eq('indexSwap') + expect(task.type).to eq('indexSwap') + task.await expect(task['details']['swaps']).to eq([{ 'indexes' => ['indexA', 'indexB'] }, { 'indexes' => ['indexC', 'indexD'] }]) end diff --git a/spec/meilisearch/client/multi_search_spec.rb b/spec/meilisearch/client/multi_search_spec.rb index e8d52a53..4efc0aa7 100644 --- a/spec/meilisearch/client/multi_search_spec.rb +++ b/spec/meilisearch/client/multi_search_spec.rb @@ -3,8 +3,7 @@ RSpec.describe 'MeiliSearch::Client - Multiple Index Search' do before do client.create_index('books') - task = client.create_index('movies') - client.wait_for_task(task['taskUid']) + client.create_index('movies').await end it 'does a custom search with two different indexes' do diff --git a/spec/meilisearch/client/token_spec.rb b/spec/meilisearch/client/token_spec.rb index 1728557c..49b5d9ce 100644 --- a/spec/meilisearch/client/token_spec.rb +++ b/spec/meilisearch/client/token_spec.rb @@ -124,10 +124,7 @@ def initialize(api_key) context 'with search_rules definitions' do include_context 'search books with genre' - before do - filterable_task = index.update_filterable_attributes(['genre', 'objectId']) - index.wait_for_task(filterable_task['taskUid']) - end + before { index.update_filterable_attributes(['genre', 'objectId']).await } let(:adm_client) { MeiliSearch::Client.new(URL, adm_key['key']) } let(:adm_key) do diff --git a/spec/meilisearch/index/documents_spec.rb b/spec/meilisearch/index/documents_spec.rb index 26642740..0d05449f 100644 --- a/spec/meilisearch/index/documents_spec.rb +++ b/spec/meilisearch/index/documents_spec.rb @@ -296,9 +296,8 @@ it 'replaces document' do id = 123 new_title = 'Pride & Prejudice' - task = index.replace_documents(objectId: id, title: 'Pride & Prejudice', note: '8.5/10') + index.replace_documents(objectId: id, title: 'Pride & Prejudice', note: '8.5/10').await - client.wait_for_task(task['taskUid']) expect(index.documents['results'].count).to eq(documents.count) doc = index.document(id) expect(doc['title']).to eq(new_title) @@ -405,8 +404,7 @@ id = 111 expect { index.document(id) }.to raise_document_not_found_meilisearch_api_error expect do - task = index.delete_document(id) - client.wait_for_task(task['taskUid']) + index.delete_document(id).await end.not_to(change { index.documents['results'].size }) end end diff --git a/spec/meilisearch/index/search/facets_distribution_spec.rb b/spec/meilisearch/index/search/facets_distribution_spec.rb index dbe21fb8..26d865c6 100644 --- a/spec/meilisearch/index/search/facets_distribution_spec.rb +++ b/spec/meilisearch/index/search/facets_distribution_spec.rb @@ -4,8 +4,7 @@ include_context 'search books with author, genre, year' before do - response = index.update_filterable_attributes(['genre', 'year', 'author']) - index.wait_for_task(response['taskUid']) + index.update_filterable_attributes(['genre', 'year', 'author']).await end it 'does a custom search with facets' do diff --git a/spec/meilisearch/index/search/filter_spec.rb b/spec/meilisearch/index/search/filter_spec.rb index 4fc4acdb..51ba9355 100644 --- a/spec/meilisearch/index/search/filter_spec.rb +++ b/spec/meilisearch/index/search/filter_spec.rb @@ -3,10 +3,7 @@ RSpec.describe 'MeiliSearch::Index - Filtered search' do include_context 'search books with author, genre, year' - before do - response = index.update_filterable_attributes(['genre', 'year', 'author']) - index.wait_for_task(response['taskUid']) - end + before { index.update_filterable_attributes(['genre', 'year', 'author']).await } it 'does a custom search with one filter' do response = index.search('le', { filter: 'genre = romance' }) diff --git a/spec/meilisearch/index/search/multi_params_spec.rb b/spec/meilisearch/index/search/multi_params_spec.rb index 13434f48..c554fa3e 100644 --- a/spec/meilisearch/index/search/multi_params_spec.rb +++ b/spec/meilisearch/index/search/multi_params_spec.rb @@ -3,10 +3,7 @@ RSpec.describe 'MeiliSearch::Index - Multi-paramaters search' do include_context 'search books with genre' - before do - response = index.update_filterable_attributes(['genre']) - index.wait_for_task(response['taskUid']) - end + before { index.update_filterable_attributes(['genre']).await } it 'does a custom search with attributes to crop, filter and attributes to highlight' do response = index.search('prince', @@ -45,8 +42,7 @@ end it 'does a custom search with filter, attributes_to_retrieve and attributes_to_highlight' do - response = index.update_filterable_attributes(['genre']) - index.wait_for_task(response['taskUid']) + index.update_filterable_attributes(['genre']).await response = index.search('prinec', { filter: ['genre = fantasy'], @@ -63,8 +59,7 @@ end it 'does a custom search with facets and limit' do - response = index.update_filterable_attributes(['genre']) - index.wait_for_task(response['taskUid']) + index.update_filterable_attributes(['genre']).await response = index.search('prinec', facets: ['genre'], limit: 1) expect(response.keys).to contain_exactly( diff --git a/spec/meilisearch/index/search/nested_fields_spec.rb b/spec/meilisearch/index/search/nested_fields_spec.rb index 711c4e2d..ab049d20 100644 --- a/spec/meilisearch/index/search/nested_fields_spec.rb +++ b/spec/meilisearch/index/search/nested_fields_spec.rb @@ -12,8 +12,8 @@ end it 'searches within index with searchableAttributes setting' do - wait_for_it index.update_searchable_attributes(['title', 'info.comment']) - wait_for_it index.add_documents(documents) + index.update_searchable_attributes(['title', 'info.comment']).await + index.add_documents(documents).await response = index.search('An awesome') @@ -23,9 +23,9 @@ end it 'searches within index with searchableAttributes and sortableAttributes settings' do - wait_for_it index.update_searchable_attributes(['title', 'info.comment']) - wait_for_it index.update_sortable_attributes(['info.reviewNb']) - wait_for_it index.add_documents(documents) + index.update_searchable_attributes(['title', 'info.comment']).await + index.update_sortable_attributes(['info.reviewNb']).await + index.add_documents(documents).await response = index.search('An awesome') diff --git a/spec/meilisearch/index/search/offset_spec.rb b/spec/meilisearch/index/search/offset_spec.rb index ec297bf6..4f33224a 100644 --- a/spec/meilisearch/index/search/offset_spec.rb +++ b/spec/meilisearch/index/search/offset_spec.rb @@ -16,8 +16,7 @@ end it 'does a placeholder search with an offset set to 3 and custom ranking rules' do - response = index.update_ranking_rules(['objectId:asc']) - index.wait_for_task(response['taskUid']) + index.update_ranking_rules(['objectId:asc']).await response = index.search('') response_with_offset = index.search('', offset: 3) expect(response['hits'].first['objectId']).to eq(1) diff --git a/spec/meilisearch/index/search/q_spec.rb b/spec/meilisearch/index/search/q_spec.rb index 3c7e5e0a..6791b095 100644 --- a/spec/meilisearch/index/search/q_spec.rb +++ b/spec/meilisearch/index/search/q_spec.rb @@ -38,16 +38,15 @@ end it 'does a basic search with an empty query and a custom ranking rule' do - response = index.update_ranking_rules([ - 'words', - 'typo', - 'sort', - 'proximity', - 'attribute', - 'exactness', - 'objectId:asc' - ]) - index.wait_for_task(response['taskUid']) + index.update_ranking_rules([ + 'words', + 'typo', + 'sort', + 'proximity', + 'attribute', + 'exactness', + 'objectId:asc' + ]).await response = index.search('') expect(response['estimatedTotalHits']).to eq(documents.count) expect(response['hits'].first['objectId']).to eq(1) diff --git a/spec/meilisearch/index/search/sort_spec.rb b/spec/meilisearch/index/search/sort_spec.rb index a196af07..efefa6ee 100644 --- a/spec/meilisearch/index/search/sort_spec.rb +++ b/spec/meilisearch/index/search/sort_spec.rb @@ -3,18 +3,17 @@ RSpec.describe 'MeiliSearch::Index - Sorted search' do include_context 'search books with author, genre, year' before do - response = index.update_sortable_attributes(['year', 'author']) - index.wait_for_task(response['taskUid']) + sortable_update = index.update_sortable_attributes(['year', 'author']) - response = index.update_ranking_rules([ - 'sort', - 'words', - 'typo', - 'proximity', - 'attribute', - 'exactness' - ]) - index.wait_for_task(response['taskUid']) + index.update_ranking_rules([ + 'sort', + 'words', + 'typo', + 'proximity', + 'attribute', + 'exactness' + ]).await + sortable_update.await end it 'does a custom search with one sort' do diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb index 0df0b569..5bb14253 100644 --- a/spec/spec_helper.rb +++ b/spec/spec_helper.rb @@ -146,7 +146,6 @@ # Helpers config.include IndexesHelpers - config.include TaskHelpers config.include ExceptionsHelpers config.include KeysHelpers config.include ExperimentalFeatureHelpers diff --git a/spec/support/books_contexts.rb b/spec/support/books_contexts.rb index 35b65090..83e40d2c 100644 --- a/spec/support/books_contexts.rb +++ b/spec/support/books_contexts.rb @@ -14,10 +14,7 @@ ] end - before do - response = index.add_documents(documents) - index.wait_for_task(response['taskUid']) - end + before { index.add_documents(documents).await } end RSpec.shared_context 'search books with author, genre, year' do @@ -89,10 +86,7 @@ ] end - before do - response = index.add_documents(documents) - index.wait_for_task(response['taskUid']) - end + before { index.add_documents(documents).await } end RSpec.shared_context 'search books with nested fields' do @@ -162,8 +156,5 @@ ] end - before do - response = index.add_documents(documents) - index.wait_for_task(response['taskUid']) - end + before { index.add_documents(documents).await } end diff --git a/spec/support/task_helpers.rb b/spec/support/task_helpers.rb deleted file mode 100644 index 6d399618..00000000 --- a/spec/support/task_helpers.rb +++ /dev/null @@ -1,9 +0,0 @@ -# frozen_string_literal: true - -module TaskHelpers - def wait_for_it(task) - raise('The param `task` does not have an taskUid key.') unless task.key?('taskUid') - - client.wait_for_task(task['taskUid']) - end -end From c5a8e92a0e58a024d976443c44ac5ccd8292213f Mon Sep 17 00:00:00 2001 From: ellnix <103502144+ellnix@users.noreply.github.com> Date: Thu, 12 Oct 2023 17:44:35 +0200 Subject: [PATCH 08/12] Fix bad deprecation advice on 2 methods --- lib/meilisearch/index.rb | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/meilisearch/index.rb b/lib/meilisearch/index.rb index 7ef1a2bb..2dcc47c6 100644 --- a/lib/meilisearch/index.rb +++ b/lib/meilisearch/index.rb @@ -164,7 +164,7 @@ def add_documents_in_batches(documents, batch_size = 1000, primary_key = nil) def add_documents_in_batches!(documents, batch_size = 1000, primary_key = nil) Utils.soft_deprecate( 'Index#add_documents_in_batches!', - 'index.add_documents_in_batches(...).await' + 'index.add_documents_in_batches(...).each(&:await)' ) add_documents_in_batches(documents, batch_size, primary_key).each(&:await) @@ -179,7 +179,7 @@ def update_documents_in_batches(documents, batch_size = 1000, primary_key = nil) def update_documents_in_batches!(documents, batch_size = 1000, primary_key = nil) Utils.soft_deprecate( 'Index#update_documents_in_batches!', - 'index.update_documents_in_batches(...).await' + 'index.update_documents_in_batches(...).each(&:await)' ) update_documents_in_batches(documents, batch_size, primary_key).each(&:await) From 1b03f73bef0656eb43e2cf0f89476b3f50fc18ab Mon Sep 17 00:00:00 2001 From: ellnix <103502144+ellnix@users.noreply.github.com> Date: Wed, 29 Nov 2023 11:58:26 +0100 Subject: [PATCH 09/12] Fix and rename Models namespace --- lib/meilisearch/client.rb | 6 +- lib/meilisearch/index.rb | 80 ++++++++++++------------ lib/meilisearch/models/task.rb | 2 +- spec/meilisearch/index/documents_spec.rb | 4 +- spec/meilisearch/models/task_spec.rb | 2 +- 5 files changed, 47 insertions(+), 47 deletions(-) diff --git a/lib/meilisearch/client.rb b/lib/meilisearch/client.rb index 50a5aca0..1be31acc 100644 --- a/lib/meilisearch/client.rb +++ b/lib/meilisearch/client.rb @@ -17,7 +17,7 @@ def swap_indexes(*options) mapped_array = options.map { |arr| { indexes: arr } } response = http_post '/swap-indexes', mapped_array - MeiliSearch::Model::Task.new(response, task_endpoint) + Models::Task.new(response, task_endpoint) end def indexes(options = {}) @@ -38,7 +38,7 @@ def create_index(index_uid, options = {}) response = http_post '/indexes', body - Model::Task.new(response, task_endpoint) + Models::Task.new(response, task_endpoint) end # Synchronous version of create_index. @@ -126,7 +126,7 @@ def stats def create_dump response = http_post '/dumps' - MeiliSearch::Model::Task.new(response, task_endpoint) + Models::Task.new(response, task_endpoint) end ### SNAPSHOTS diff --git a/lib/meilisearch/index.rb b/lib/meilisearch/index.rb index 2dcc47c6..351b988d 100644 --- a/lib/meilisearch/index.rb +++ b/lib/meilisearch/index.rb @@ -31,14 +31,14 @@ def fetch_raw_info def update(body) response = http_patch indexes_path(id: @uid), Utils.transform_attributes(body) - MeiliSearch::Model::Task.new(response, task_endpoint) + Models::Task.new(response, task_endpoint) end alias update_index update def delete response = http_delete indexes_path(id: @uid) - MeiliSearch::Model::Task.new(response, task_endpoint) + Models::Task.new(response, task_endpoint) end alias delete_index delete @@ -90,7 +90,7 @@ def add_documents(documents, primary_key = nil) documents = [documents] if documents.is_a?(Hash) response = http_post "/indexes/#{@uid}/documents", documents, { primaryKey: primary_key }.compact - Model::Task.new(response, task_endpoint) + Models::Task.new(response, task_endpoint) end alias replace_documents add_documents alias add_or_replace_documents add_documents @@ -110,7 +110,7 @@ def add_documents_json(documents, primary_key = nil) options = { convert_body?: false } response = http_post "/indexes/#{@uid}/documents", documents, { primaryKey: primary_key }.compact, options - Model::Task.new(response, task_endpoint) + Models::Task.new(response, task_endpoint) end alias replace_documents_json add_documents_json alias add_or_replace_documents_json add_documents_json @@ -119,7 +119,7 @@ def add_documents_ndjson(documents, primary_key = nil) options = { headers: { 'Content-Type' => 'application/x-ndjson' }, convert_body?: false } response = http_post "/indexes/#{@uid}/documents", documents, { primaryKey: primary_key }.compact, options - Model::Task.new(response, task_endpoint) + Models::Task.new(response, task_endpoint) end alias replace_documents_ndjson add_documents_ndjson alias add_or_replace_documents_ndjson add_documents_ndjson @@ -132,7 +132,7 @@ def add_documents_csv(documents, primary_key = nil, delimiter = nil) csvDelimiter: delimiter }.compact, options - Model::Task.new(response, task_endpoint) + Models::Task.new(response, task_endpoint) end alias replace_documents_csv add_documents_csv alias add_or_replace_documents_csv add_documents_csv @@ -141,7 +141,7 @@ def update_documents(documents, primary_key = nil) documents = [documents] if documents.is_a?(Hash) response = http_put "/indexes/#{@uid}/documents", documents, { primaryKey: primary_key }.compact - Model::Task.new(response, task_endpoint) + Models::Task.new(response, task_endpoint) end alias add_or_update_documents update_documents @@ -204,7 +204,7 @@ def delete_documents(options = {}) http_post "/indexes/#{@uid}/documents/delete-batch", options end - Model::Task.new(response, task_endpoint) + Models::Task.new(response, task_endpoint) end end alias delete_multiple_documents delete_documents @@ -223,7 +223,7 @@ def delete_document(document_id) encode_document = URI.encode_www_form_component(document_id) response = http_delete "/indexes/#{@uid}/documents/#{encode_document}" - Model::Task.new(response, task_endpoint) + Models::Task.new(response, task_endpoint) end alias delete_one_document delete_document @@ -239,7 +239,7 @@ def delete_document!(document_id) def delete_all_documents response = http_delete "/indexes/#{@uid}/documents" - Model::Task.new(response, task_endpoint) + Models::Task.new(response, task_endpoint) end def delete_all_documents! @@ -322,13 +322,13 @@ def settings def update_settings(settings) response = http_patch "/indexes/#{@uid}/settings", Utils.transform_attributes(settings) - MeiliSearch::Model::Task.new(response, task_endpoint) + Models::Task.new(response, task_endpoint) end alias settings= update_settings def reset_settings response = http_delete "/indexes/#{@uid}/settings" - MeiliSearch::Model::Task.new(response, task_endpoint) + Models::Task.new(response, task_endpoint) end ### SETTINGS - RANKING RULES @@ -340,13 +340,13 @@ def ranking_rules def update_ranking_rules(ranking_rules) response = http_put "/indexes/#{@uid}/settings/ranking-rules", ranking_rules - MeiliSearch::Model::Task.new(response, task_endpoint) + Models::Task.new(response, task_endpoint) end alias ranking_rules= update_ranking_rules def reset_ranking_rules response = http_delete "/indexes/#{@uid}/settings/ranking-rules" - MeiliSearch::Model::Task.new(response, task_endpoint) + Models::Task.new(response, task_endpoint) end ### SETTINGS - SYNONYMS @@ -358,13 +358,13 @@ def synonyms def update_synonyms(synonyms) response = http_put "/indexes/#{@uid}/settings/synonyms", synonyms - MeiliSearch::Model::Task.new(response, task_endpoint) + Models::Task.new(response, task_endpoint) end alias synonyms= update_synonyms def reset_synonyms response = http_delete "/indexes/#{@uid}/settings/synonyms" - MeiliSearch::Model::Task.new(response, task_endpoint) + Models::Task.new(response, task_endpoint) end ### SETTINGS - STOP-WORDS @@ -377,13 +377,13 @@ def stop_words def update_stop_words(stop_words) body = stop_words.nil? || stop_words.is_a?(Array) ? stop_words : [stop_words] response = http_put "/indexes/#{@uid}/settings/stop-words", body - MeiliSearch::Model::Task.new(response, task_endpoint) + Models::Task.new(response, task_endpoint) end alias stop_words= update_stop_words def reset_stop_words response = http_delete "/indexes/#{@uid}/settings/stop-words" - MeiliSearch::Model::Task.new(response, task_endpoint) + Models::Task.new(response, task_endpoint) end ### SETTINGS - DINSTINCT ATTRIBUTE @@ -395,13 +395,13 @@ def distinct_attribute def update_distinct_attribute(distinct_attribute) response = http_put "/indexes/#{@uid}/settings/distinct-attribute", distinct_attribute - MeiliSearch::Model::Task.new(response, task_endpoint) + Models::Task.new(response, task_endpoint) end alias distinct_attribute= update_distinct_attribute def reset_distinct_attribute response = http_delete "/indexes/#{@uid}/settings/distinct-attribute" - MeiliSearch::Model::Task.new(response, task_endpoint) + Models::Task.new(response, task_endpoint) end ### SETTINGS - SEARCHABLE ATTRIBUTES @@ -413,13 +413,13 @@ def searchable_attributes def update_searchable_attributes(searchable_attributes) response = http_put "/indexes/#{@uid}/settings/searchable-attributes", searchable_attributes - MeiliSearch::Model::Task.new(response, task_endpoint) + Models::Task.new(response, task_endpoint) end alias searchable_attributes= update_searchable_attributes def reset_searchable_attributes response = http_delete "/indexes/#{@uid}/settings/searchable-attributes" - MeiliSearch::Model::Task.new(response, task_endpoint) + Models::Task.new(response, task_endpoint) end ### SETTINGS - DISPLAYED ATTRIBUTES @@ -431,13 +431,13 @@ def displayed_attributes def update_displayed_attributes(displayed_attributes) response = http_put "/indexes/#{@uid}/settings/displayed-attributes", displayed_attributes - MeiliSearch::Model::Task.new(response, task_endpoint) + Models::Task.new(response, task_endpoint) end alias displayed_attributes= update_displayed_attributes def reset_displayed_attributes response = http_delete "/indexes/#{@uid}/settings/displayed-attributes" - MeiliSearch::Model::Task.new(response, task_endpoint) + Models::Task.new(response, task_endpoint) end ### SETTINGS - FILTERABLE ATTRIBUTES @@ -449,13 +449,13 @@ def filterable_attributes def update_filterable_attributes(filterable_attributes) response = http_put "/indexes/#{@uid}/settings/filterable-attributes", filterable_attributes - Model::Task.new(response, task_endpoint) + Models::Task.new(response, task_endpoint) end alias filterable_attributes= update_filterable_attributes def reset_filterable_attributes response = http_delete "/indexes/#{@uid}/settings/filterable-attributes" - MeiliSearch::Model::Task.new(response, task_endpoint) + Models::Task.new(response, task_endpoint) end ### SETTINGS - SORTABLE ATTRIBUTES @@ -467,13 +467,13 @@ def sortable_attributes def update_sortable_attributes(sortable_attributes) response = http_put "/indexes/#{@uid}/settings/sortable-attributes", sortable_attributes - MeiliSearch::Model::Task.new(response, task_endpoint) + Models::Task.new(response, task_endpoint) end alias sortable_attributes= update_sortable_attributes def reset_sortable_attributes response = http_delete "/indexes/#{@uid}/settings/sortable-attributes" - MeiliSearch::Model::Task.new(response, task_endpoint) + Models::Task.new(response, task_endpoint) end ### SETTINGS - PAGINATION @@ -485,13 +485,13 @@ def pagination def update_pagination(pagination) response = http_patch "/indexes/#{@uid}/settings/pagination", pagination - MeiliSearch::Model::Task.new(response, task_endpoint) + Models::Task.new(response, task_endpoint) end alias pagination= update_sortable_attributes def reset_pagination response = http_delete "/indexes/#{@uid}/settings/pagination" - MeiliSearch::Model::Task.new(response, task_endpoint) + Models::Task.new(response, task_endpoint) end def typo_tolerance @@ -502,13 +502,13 @@ def typo_tolerance def update_typo_tolerance(typo_tolerance_attributes) attributes = Utils.transform_attributes(typo_tolerance_attributes) response = http_patch("/indexes/#{@uid}/settings/typo-tolerance", attributes) - MeiliSearch::Model::Task.new(response, task_endpoint) + Models::Task.new(response, task_endpoint) end alias typo_tolerance= update_typo_tolerance def reset_typo_tolerance response = http_delete("/indexes/#{@uid}/settings/typo-tolerance") - MeiliSearch::Model::Task.new(response, task_endpoint) + Models::Task.new(response, task_endpoint) end def faceting @@ -519,13 +519,13 @@ def faceting def update_faceting(faceting_attributes) attributes = Utils.transform_attributes(faceting_attributes) response = http_patch("/indexes/#{@uid}/settings/faceting", attributes) - MeiliSearch::Model::Task.new(response, task_endpoint) + Models::Task.new(response, task_endpoint) end alias faceting= update_faceting def reset_faceting response = http_delete("/indexes/#{@uid}/settings/faceting") - MeiliSearch::Model::Task.new(response, task_endpoint) + Models::Task.new(response, task_endpoint) end ### SETTINGS - DICTIONARY @@ -537,12 +537,12 @@ def dictionary def update_dictionary(dictionary_attributes) attributes = Utils.transform_attributes(dictionary_attributes) response = http_put("/indexes/#{@uid}/settings/dictionary", attributes) - MeiliSearch::Model::Task.new(response, task_endpoint) + Models::Task.new(response, task_endpoint) end def reset_dictionary response = http_delete("/indexes/#{@uid}/settings/dictionary") - MeiliSearch::Model::Task.new(response, task_endpoint) + Models::Task.new(response, task_endpoint) end ### SETTINGS - SEPARATOR TOKENS @@ -553,12 +553,12 @@ def separator_tokens def update_separator_tokens(separator_tokens_attributes) attributes = Utils.transform_attributes(separator_tokens_attributes) response = http_put("/indexes/#{@uid}/settings/separator-tokens", attributes) - MeiliSearch::Model::Task.new(response, task_endpoint) + Models::Task.new(response, task_endpoint) end def reset_separator_tokens response = http_delete("/indexes/#{@uid}/settings/separator-tokens") - MeiliSearch::Model::Task.new(response, task_endpoint) + Models::Task.new(response, task_endpoint) end ### SETTINGS - NON SEPARATOR TOKENS @@ -570,12 +570,12 @@ def non_separator_tokens def update_non_separator_tokens(non_separator_tokens_attributes) attributes = Utils.transform_attributes(non_separator_tokens_attributes) response = http_put("/indexes/#{@uid}/settings/non-separator-tokens", attributes) - MeiliSearch::Model::Task.new(response, task_endpoint) + Models::Task.new(response, task_endpoint) end def reset_non_separator_tokens response = http_delete("/indexes/#{@uid}/settings/non-separator-tokens") - MeiliSearch::Model::Task.new(response, task_endpoint) + Models::Task.new(response, task_endpoint) end ### SETTINGS - PROXIMITY PRECISION diff --git a/lib/meilisearch/models/task.rb b/lib/meilisearch/models/task.rb index 4a17516a..2aab0837 100644 --- a/lib/meilisearch/models/task.rb +++ b/lib/meilisearch/models/task.rb @@ -3,7 +3,7 @@ require 'forwardable' module MeiliSearch - module Model + module Models class Task extend Forwardable diff --git a/spec/meilisearch/index/documents_spec.rb b/spec/meilisearch/index/documents_spec.rb index 0d05449f..177af894 100644 --- a/spec/meilisearch/index/documents_spec.rb +++ b/spec/meilisearch/index/documents_spec.rb @@ -121,8 +121,8 @@ it 'adds documents in a batch (as a array of documents)' do tasks = index.add_documents_in_batches(documents, 5) - expect(tasks).to contain_exactly(a_kind_of(MeiliSearch::Model::Task), - a_kind_of(MeiliSearch::Model::Task)) + expect(tasks).to contain_exactly(a_kind_of(MeiliSearch::Models::Task), + a_kind_of(MeiliSearch::Models::Task)) tasks.each(&:await) expect(index.documents['results']).to contain_exactly(*documents_with_string_keys) end diff --git a/spec/meilisearch/models/task_spec.rb b/spec/meilisearch/models/task_spec.rb index 3138b479..eac6ea6c 100644 --- a/spec/meilisearch/models/task_spec.rb +++ b/spec/meilisearch/models/task_spec.rb @@ -1,6 +1,6 @@ # frozen_string_literal: true -describe MeiliSearch::Model::Task do +describe MeiliSearch::Models::Task do let(:new_index_uid) { random_uid } let(:task_hash) { client.http_post '/indexes', { 'uid' => new_index_uid } } let(:endpoint) { MeiliSearch::Task.new(URL, MASTER_KEY, client.options) } From 75017dc83c6ccdf6b8daec5b791f344e50bab4a1 Mon Sep 17 00:00:00 2001 From: ellnix <103502144+ellnix@users.noreply.github.com> Date: Wed, 29 Nov 2023 13:03:47 +0100 Subject: [PATCH 10/12] Clean up and refactor Model::Task spec --- spec/meilisearch/models/task_spec.rb | 150 ++++++++++----------------- 1 file changed, 57 insertions(+), 93 deletions(-) diff --git a/spec/meilisearch/models/task_spec.rb b/spec/meilisearch/models/task_spec.rb index eac6ea6c..33f47c80 100644 --- a/spec/meilisearch/models/task_spec.rb +++ b/spec/meilisearch/models/task_spec.rb @@ -48,7 +48,7 @@ end describe 'forwarding' do - it 'allows for direct reading internal hash' do + it 'allows accessing values in the internal task hash' do subject task_hash.each do |key, value| @@ -58,12 +58,10 @@ end describe '#enqueued?' do - context 'if the task is processing' do + context 'when the task is processing' do before { task_hash['status'] = 'processing' } - it 'returns false' do - expect(subject).not_to be_enqueued - end + it { is_expected.not_to be_enqueued } it 'does not refresh the task' do allow(subject).to receive(:refresh) @@ -72,12 +70,10 @@ end end - context 'if the task has succeeded' do + context 'when the task has succeeded' do before { task_hash['status'] = 'succeeded' } - it 'returns false' do - expect(subject).not_to be_enqueued - end + it { is_expected.not_to be_enqueued } it 'does not refresh the task' do allow(subject).to receive(:refresh) @@ -86,12 +82,10 @@ end end - context 'if the task has failed' do + context 'when the task has failed' do before { task_hash['status'] = 'failed' } - it 'returns false' do - expect(subject).not_to be_enqueued - end + it { is_expected.not_to be_enqueued } it 'does not refresh the task' do allow(subject).to receive(:refresh) @@ -100,27 +94,23 @@ end end - it 'returns true if the task is enqueued' do + it 'returns true when the task is enqueued' do expect(enqueued_task).to be_enqueued end - context 'if the task has succeeded but not updated' do + context 'when the task has succeeded but not refreshed' do let(:successful_task_hash) { task_hash.merge('status' => 'succeeded') } let(:endpoint) { instance_double(MeiliSearch::Task, task: successful_task_hash) } - it 'refreshes and returns false' do - expect(subject).not_to be_enqueued - end + it { is_expected.not_to be_enqueued } end end describe '#processing?' do - context 'if the task has succeeded' do + context 'when the task has succeeded' do before { task_hash['status'] = 'succeeded' } - it 'returns false' do - expect(subject).not_to be_processing - end + it { is_expected.not_to be_processing } it 'does not refresh the task' do allow(subject).to receive(:refresh) @@ -129,12 +119,10 @@ end end - context 'if the task has failed' do + context 'when the task has failed' do before { task_hash['status'] = 'failed' } - it 'returns false' do - expect(subject).not_to be_processing - end + it { is_expected.not_to be_processing } it 'does not refresh the task' do allow(subject).to receive(:refresh) @@ -143,23 +131,21 @@ end end - it 'returns false if the task has not begun to process' do + it 'returns false when the task has not begun to process' do expect(enqueued_task).not_to be_processing end - it 'returns true if the task is processing' do + it 'returns true when the task is processing' do expect(processing_task).to be_processing end - context 'if the task has begun processing but has not updated' do + context 'when the task has begun processing but has not refreshed' do let(:endpoint) { instance_double(MeiliSearch::Task, task: task_hash.merge('status' => 'processing')) } - it 'refreshes and returns true' do - expect(subject).to be_processing - end + it { is_expected.to be_processing } end - context 'if the task has succeeded but not updated' do + context 'when the task has succeeded but not refreshed' do let(:successful_task_hash) { task_hash.merge('status' => 'succeeded') } let(:endpoint) { instance_double(MeiliSearch::Task, task: successful_task_hash) } @@ -175,55 +161,51 @@ expect(subject).not_to be_unfinished end - it 'returns false if the task has failed' do + it 'returns false when the task has failed' do task_hash['status'] = 'failed' expect(subject).not_to be_unfinished end - it 'returns true if the task is enqueued' do + it 'returns true when the task is enqueued' do expect(enqueued_task).to be_unfinished end - it 'returns true if the task is processing' do + it 'returns true when the task is processing' do expect(processing_task).to be_unfinished end - context 'if the task has succeeded but not updated' do + context 'when the task has succeeded but not refreshed' do let(:successful_task_hash) { task_hash.merge('status' => 'succeeded') } let(:endpoint) { instance_double(MeiliSearch::Task, task: successful_task_hash) } - it 'refreshes and returns false' do - expect(subject).not_to be_unfinished - end + it { is_expected.not_to be_unfinished } end end describe '#finished?' do - it 'returns true if the task has succeeded' do + it 'returns true when the task has succeeded' do task_hash['status'] = 'succeeded' expect(subject).to be_finished end - it 'returns true if the task has failed' do + it 'returns true when the task has failed' do task_hash['status'] = 'failed' expect(subject).to be_finished end - it 'returns false if the task is enqueued' do + it 'returns false when the task is enqueued' do expect(enqueued_task).not_to be_finished end - it 'returns false if the task is processing' do + it 'returns false when the task is processing' do expect(processing_task).not_to be_finished end - context 'if the task has succeeded but not updated' do + context 'when the task has succeeded but not refreshed' do let(:successful_task_hash) { task_hash.merge('status' => 'succeeded') } let(:endpoint) { instance_double(MeiliSearch::Task, task: successful_task_hash) } - it 'refreshes and returns true' do - expect(subject).to be_finished - end + it { is_expected.to be_finished } end end @@ -240,12 +222,10 @@ expect(subject).to be_failed end - context 'if the task is not finished' do + context 'when the task is not finished' do let(:endpoint) { instance_double(MeiliSearch::Task, task: task_hash) } - it 'returns false' do - expect(subject).not_to be_failed - end + it { is_expected.not_to be_failed } it 'warns that the task is not finished' do subject.failed? @@ -254,13 +234,11 @@ end end - context 'if the task has failed but not updated' do + context 'when the task has failed but not refreshed' do let(:failed_task_hash) { task_hash.merge('status' => 'failed') } let(:endpoint) { instance_double(MeiliSearch::Task, task: failed_task_hash) } - it 'refreshes and returns true' do - expect(subject).to be_failed - end + it { is_expected.to be_failed } end end @@ -277,12 +255,10 @@ expect(subject).not_to be_succeeded end - context 'if the task is not finished' do + context 'when the task is not finished' do let(:endpoint) { instance_double(MeiliSearch::Task, task: task_hash) } - it 'returns false' do - expect(subject).not_to be_succeeded - end + it { is_expected.not_to be_succeeded } it 'warns that the task is not finished' do subject.succeeded? @@ -291,13 +267,11 @@ end end - context 'if the task has succeeded but not updated' do + context 'when the task has succeeded but not refreshed' do let(:successful_task_hash) { task_hash.merge('status' => 'succeeded') } let(:endpoint) { instance_double(MeiliSearch::Task, task: successful_task_hash) } - it 'refreshes and returns true' do - expect(subject).to be_succeeded - end + it { is_expected.to be_succeeded } end end @@ -314,12 +288,10 @@ expect(subject).to be_cancelled end - context 'if the task is not finished' do + context 'when the task is not finished' do let(:endpoint) { instance_double(MeiliSearch::Task, task: task_hash) } - it 'returns false' do - expect(subject).not_to be_cancelled - end + it { is_expected.not_to be_cancelled } it 'warns that the task is not finished' do subject.cancelled? @@ -328,13 +300,11 @@ end end - context 'if the task has failed but not updated' do + context 'when the task has been cancelled but not refreshed' do let(:cancelled_task_hash) { task_hash.merge('status' => 'cancelled') } let(:endpoint) { instance_double(MeiliSearch::Task, task: cancelled_task_hash) } - it 'refreshes and returns true' do - expect(subject).to be_cancelled - end + it { is_expected.to be_cancelled } end end @@ -342,12 +312,12 @@ let(:not_found_error) { MeiliSearch::ApiError.new(404, '', '') } let(:endpoint) { instance_double(MeiliSearch::Task, task: task_hash) } - it 'returns false if the task can be found' do + it 'returns false when the task can be found' do expect(subject.deleted?).to be(false) # don't just return nil expect(subject).not_to be_deleted end - context 'when it was deleted earlier' do + context 'when it was deleted prior' do let(:endpoint) { instance_double(MeiliSearch::Task) } before do @@ -360,9 +330,7 @@ expect(endpoint).to have_received(:task).once end - it 'returns true' do - expect(subject).to be_deleted - end + it { is_expected.to be_deleted } end it 'refreshes and returns true when it is no longer in instance' do @@ -372,7 +340,7 @@ end describe '#cancel' do - context 'if the task is still not finished' do + context 'when the task is still not finished' do let(:cancellation_task) { instance_double(described_class, await: nil) } let(:endpoint) { instance_double(MeiliSearch::Task, task: task_hash, cancel_tasks: cancellation_task) } @@ -381,18 +349,18 @@ expect(endpoint).to have_received(:cancel_tasks) end - it 'returns true if the cancellation succeeds' do + it 'returns true when the cancellation succeeds' do task_hash['status'] = 'cancelled' expect(subject.cancel).to be(true) end - it 'returns false if the cancellation fails' do + it 'returns false when the cancellation fails' do task_hash['status'] = 'succeeded' expect(subject.cancel).to be(false) end end - context 'if the task is already finished' do + context 'when the task is already finished' do let(:endpoint) { instance_double(MeiliSearch::Task, task: task_hash, cancel_tasks: nil) } before { task_hash['status'] = 'succeeded' } @@ -401,12 +369,10 @@ expect(endpoint).not_to have_received(:cancel_tasks) end - it 'returns false' do - expect(subject.cancel).to be(false) - end + it { is_expected.not_to be_cancelled } end - context 'if the task is already cancelled' do + context 'when the task is already cancelled' do let(:endpoint) { instance_double(MeiliSearch::Task, task: task_hash, cancel_tasks: nil) } before { task_hash['status'] = 'cancelled' } @@ -415,9 +381,7 @@ expect(endpoint).not_to have_received(:cancel_tasks) end - it 'returns true' do - expect(subject.cancel).to be(true) - end + it { is_expected.to be_cancelled } end end @@ -425,7 +389,7 @@ let(:deletion_task) { instance_double(described_class, await: nil) } let(:endpoint) { instance_double(MeiliSearch::Task, delete_tasks: deletion_task) } - context 'if the task is unfinished' do + context 'when the task is unfinished' do it 'makes no request' do subject.delete expect(endpoint).not_to have_received(:delete_tasks) @@ -436,7 +400,7 @@ end end - context 'if the task is finished' do + context 'when the task is finished' do before do task_hash['status'] = 'failed' not_found_error = MeiliSearch::ApiError.new(404, '', '') @@ -468,10 +432,10 @@ let(:changed_task) { task_hash.merge('status' => 'succeeded', 'error' => 'Done too well') } let(:endpoint) { instance_double(MeiliSearch::Task, task: task_hash, wait_for_task: changed_task) } - context 'if the task is not yet completed' do + context 'when the task is not yet completed' do let(:endpoint) { instance_double(MeiliSearch::Task, task: task_hash, wait_for_task: changed_task) } - it 'waits if the task is yet not completed' do + it 'waits for the task to complete' do expect { subject.await }.to change { subject['status'] }.from('enqueued').to('succeeded') .and(change { subject['error'] }.from(nil).to('Done too well')) end @@ -481,7 +445,7 @@ end end - context 'if the task is already completed' do + context 'when the task is already completed' do let(:endpoint) { instance_double(MeiliSearch::Task, task: changed_task, wait_for_task: changed_task) } it 'does not contact the instance' do From d2f4efdbefd9d62476fe66c2ced7c0a12409466c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 1 Mar 2024 03:20:15 +0000 Subject: [PATCH 11/12] Bump release-drafter/release-drafter from 5 to 6 Bumps [release-drafter/release-drafter](https://github.com/release-drafter/release-drafter) from 5 to 6. - [Release notes](https://github.com/release-drafter/release-drafter/releases) - [Commits](https://github.com/release-drafter/release-drafter/compare/v5...v6) --- updated-dependencies: - dependency-name: release-drafter/release-drafter dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .github/workflows/release-drafter.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release-drafter.yml b/.github/workflows/release-drafter.yml index 9ec8b9d6..20f2d83f 100644 --- a/.github/workflows/release-drafter.yml +++ b/.github/workflows/release-drafter.yml @@ -9,7 +9,7 @@ jobs: update_release_draft: runs-on: ubuntu-latest steps: - - uses: release-drafter/release-drafter@v5 + - uses: release-drafter/release-drafter@v6 with: config-name: release-draft-template.yml env: From c0bda9ec842e87815134320e26bd28ef51563d2e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 1 Mar 2024 04:58:17 +0000 Subject: [PATCH 12/12] Update rubocop requirement from ~> 1.50.1 to ~> 1.61.0 Updates the requirements on [rubocop](https://github.com/rubocop/rubocop) to permit the latest version. - [Release notes](https://github.com/rubocop/rubocop/releases) - [Changelog](https://github.com/rubocop/rubocop/blob/master/CHANGELOG.md) - [Commits](https://github.com/rubocop/rubocop/compare/v1.50.1...v1.61.0) --- updated-dependencies: - dependency-name: rubocop dependency-type: direct:development ... Signed-off-by: dependabot[bot] --- Gemfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Gemfile b/Gemfile index 0261ef39..b63d82f5 100644 --- a/Gemfile +++ b/Gemfile @@ -18,5 +18,5 @@ group :development, :test do end group :development do - gem 'rubocop', '~> 1.50.1', require: false + gem 'rubocop', '~> 1.61.0', require: false end