diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 2ae9cbf..52c3bd1 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -33,7 +33,8 @@ jobs: run: bundle exec rspec - name: Upload coverage report - uses: actions/upload-artifact@v2 + if: matrix.os == 'ubuntu-latest' # Only upload from Ubuntu + uses: actions/upload-artifact@v4 with: name: coverage-report path: coverage/ \ No newline at end of file diff --git a/Gemfile b/Gemfile index ccb7c63..0e0c6b7 100644 --- a/Gemfile +++ b/Gemfile @@ -2,10 +2,7 @@ source 'https://rubygems.org' -# Specify your gem's dependencies in nse_data.gemspec -gemspec - -# Specify your gem's dependencies in nse_data.gemspec +# Specify your gem's dependencies in api_wrapper.gemspec gemspec group :development do diff --git a/api_wrapper.gemspec b/api_wrapper.gemspec index d0663c0..4baa712 100644 --- a/api_wrapper.gemspec +++ b/api_wrapper.gemspec @@ -31,9 +31,6 @@ Gem::Specification.new do |spec| spec.executables = spec.files.grep(%r{\Aexe/}) { |f| File.basename(f) } spec.require_paths = ['lib'] - # Uncomment to register a new dependency of your gem - # spec.add_dependency "example-gem", "~> 1.0" - - # For more information and examples about making a new gem, check out our - # guide at: https://bundler.io/guides/creating_gem.html + spec.add_dependency 'faraday', '~> 2.11' + spec.add_dependency 'faraday-http-cache', '~> 2.5', '>= 2.5.1' end diff --git a/lib/api_wrapper/cache/README.md b/lib/api_wrapper/cache/README.md new file mode 100644 index 0000000..8a6d775 --- /dev/null +++ b/lib/api_wrapper/cache/README.md @@ -0,0 +1,78 @@ + +# Caching Mechanism Usage and Customization + +## Overview + +The **ApiWrapper** gem includes a flexible caching mechanism to improve performance and reduce redundant API calls. This documentation provides an overview of how to use and customize the caching mechanism. + +## Cache Policy + +The `CachePolicy` class is responsible for managing caching behavior, including setting global TTLs, custom TTLs for specific endpoints, and controlling which endpoints should bypass the cache. + +### Initializing CachePolicy + +To use caching, you need to initialize the `CachePolicy` with a cache store and optional global TTL: + +```ruby +require 'api_wrapper/cache/cache_policy' +require 'api_wrapper/cache/cache_store' + +# Initialize the cache store (e.g., in-memory cache store) +cache_store = ApiWrapper::Cache::CacheStore.new + +# Initialize the CachePolicy with the cache store and a global TTL of 300 seconds (5 minutes) +cache_policy = ApiWrapper::Cache::CachePolicy.new(cache_store, global_ttl: 300) +``` + +### Configuring Cache Policy +#### Adding No-Cache Endpoints +You can specify endpoints that should bypass the cache: +```ruby +# Add an endpoint to the no-cache list +cache_policy.add_no_cache_endpoint('/no-cache') +``` + +#### Adding Custom TTLs +You can define custom TTLs for specific endpoints: + +```ruby +# Set a custom TTL of 600 seconds (10 minutes) for a specific endpoint +cache_policy.add_custom_ttl('/custom-ttl', ttl: 600) +``` + +#### Fetching Data with Cache +Use the fetch method to retrieve data with caching applied: + +```ruby +# Fetch data for an endpoint with optional cache +data = cache_policy.fetch('/some-endpoint') do + # The block should fetch fresh data if cache is not used or is stale + # e.g., perform an API call or other data retrieval operation + Faraday::Response.new(body: 'fresh data') +end +``` +## Custom Cache Stores +You can extend the caching mechanism to support different types of cache stores. Implement a custom cache store by inheriting from the CacheStore base class and overriding the read and write methods. + +### Example Custom Cache Store +```ruby +class CustomCacheStore < ApiWrapper::Cache::CacheStore + def read(key) + # Implement custom read logic + end + + def write(key, value, ttl) + # Implement custom write logic + end +end +``` +### Using a Custom Cache Store +To use a custom cache store, initialize CachePolicy with an instance of your custom cache store: + +```ruby +# Initialize the custom cache store +custom_cache_store = CustomCacheStore.new + +# Initialize CachePolicy with the custom cache store +cache_policy = ApiWrapper::Cache::CachePolicy.new(custom_cache_store, global_ttl: 300) +``` \ No newline at end of file diff --git a/lib/api_wrapper/cache/cache_policy.rb b/lib/api_wrapper/cache/cache_policy.rb new file mode 100644 index 0000000..f0b8302 --- /dev/null +++ b/lib/api_wrapper/cache/cache_policy.rb @@ -0,0 +1,115 @@ +# frozen_string_literal: true + +module ApiWrapper + module Cache + # CachePolicy manages caching behavior, including cache storage and time-to-live (TTL) settings. + # + # It allows setting global TTLs, custom TTLs for specific endpoints, and controlling which + # endpoints should not use the cache. + # + # @attr_reader [CacheStore] cache_store The cache store used for storing cached data. + class CachePolicy + attr_reader :cache_store + + # Initializes the CachePolicy with a cache store and global TTL. + # + # @param cache_store [CacheStore, RedisCacheStore] The cache store to use for caching. + # @param global_ttl [Integer] The default TTL (in seconds) for caching. + def initialize(cache_store, global_ttl = 300) + @cache_store = cache_store + @global_ttl = global_ttl + @custom_ttls = {} + @no_cache_endpoints = [] + end + + # Adds an endpoint that should bypass the cache. + # + # @param endpoint [String] The endpoint to exclude from caching. + def add_no_cache_endpoint(endpoint) + @no_cache_endpoints << endpoint + end + + # Adds a custom TTL for a specific endpoint. + # + # @param endpoint [String] The endpoint to apply a custom TTL to. + # @param ttl [Integer] The custom TTL value in seconds. + def add_custom_ttl(endpoint, ttl = 300) + @custom_ttls[endpoint] = ttl + end + + # Returns the TTL for a specific endpoint. Defaults to the global TTL if no custom TTL is set. + # + # @param endpoint [String] The endpoint to fetch the TTL for. + # @return [Integer] The TTL in seconds. + def ttl_for(endpoint) + @custom_ttls.fetch(endpoint, @global_ttl) + end + + # Determines if caching should be used for the given endpoint. + # + # @param endpoint [String] The endpoint to check. + # @return [Boolean] True if caching is enabled for the endpoint, false otherwise. + def use_cache?(endpoint) + !@no_cache_endpoints.include?(endpoint) + end + + # Fetches the data for the given endpoint, using cache if applicable. + # + # @param endpoint [String] The endpoint to fetch data for. + # @param force_refresh [Boolean] Whether to force refresh the data, bypassing the cache. + # @yield The block that fetches fresh data if cache is not used or is stale. + # @return [Object] The data fetched from cache or fresh data. + def fetch(endpoint, force_refresh: false, &block) + if force_refresh || !use_cache?(endpoint) + fetch_fresh_data(endpoint, &block) + else + fetch_cached_or_fresh_data(endpoint, &block) + end + end + + private + + # Fetches fresh data and writes it to the cache if applicable. + # + # @param endpoint [String] The endpoint to fetch fresh data for. + # @yield The block that fetches fresh data. + # @return [Object] The fresh data. + def fetch_fresh_data(endpoint) + fresh_data = yield + cache_fresh_data(endpoint, fresh_data) + fresh_data + end + + # Fetches cached data or fresh data if not available in the cache. + # + # @param endpoint [String] The endpoint to fetch data for. + # @yield The block that fetches fresh data if cache is not used or is stale. + # @return [Object] The cached or fresh data. + def fetch_cached_or_fresh_data(endpoint, &block) + cached_data = @cache_store.read(endpoint) + if cached_data + Faraday::Response.new(body: cached_data) + else + fetch_fresh_data(endpoint, &block) + end + end + + # Writes fresh data to the cache. + # + # @param endpoint [String] The endpoint for which to store the data. + # @param fresh_data [Object] The data to be stored in the cache. + def cache_fresh_data(endpoint, fresh_data) + ttl = determine_ttl(endpoint) + @cache_store.write(endpoint, fresh_data.body, ttl) if fresh_data.is_a?(Faraday::Response) + end + + # Determines the TTL value for the given endpoint. + # + # @param endpoint [String] The endpoint to fetch the TTL for. + # @return [Integer] The TTL value in seconds. + def determine_ttl(endpoint) + @custom_ttls.fetch(endpoint, @global_ttl) + end + end + end +end diff --git a/lib/api_wrapper/cache/cache_store.rb b/lib/api_wrapper/cache/cache_store.rb new file mode 100644 index 0000000..a681199 --- /dev/null +++ b/lib/api_wrapper/cache/cache_store.rb @@ -0,0 +1,84 @@ +# frozen_string_literal: true + +module ApiWrapper + module Cache + # CacheStore class provides an in-memory caching mechanism. + class CacheStore + def initialize + @store = {} + end + + # Retrieves the cached data for the given key, or fetches fresh data if not cached or expired. + # + # @param key [String] The cache key. + # @param ttl [Integer] The time-to-live in seconds. + # @yield Fetches fresh data if cache is expired or not present. + # @return [Object] The cached data or the result of the block if not cached or expired. + def fetch(key, ttl) + if cached?(key, ttl) + @store[key][:data] + else + fresh_data = yield + store(key, fresh_data, ttl) + fresh_data + end + end + + # Reads data from the cache. + # + # @param key [String] The cache key. + # @return [Object, nil] The cached data, or nil if not present. + def read(key) + cached?(key) ? @store[key][:data] : nil + end + + # Writes data to the cache with an expiration time. + # + # @param key [String] The cache key. + # @param data [Object] The data to cache. + # @param ttl [Integer] The time-to-live in seconds. + def write(key, data, ttl) + store(key, data, ttl) + end + + # Deletes data from the cache. + # + # @param key [String] The cache key. + def delete(key) + @store.delete(key) + end + + private + + # Checks if the data for the given key is cached and not expired. + # + # @param key [String] The cache key. + # @param ttl [Integer] The time-to-live in seconds. + # @return [Boolean] Whether the data is cached and valid. + def cached?(key, ttl = nil) + return false unless @store.key?(key) + + !expired?(key, ttl) + end + + # Checks if the cached data for the given key has expired. + # + # @param key [String] The cache key. + # @param ttl [Integer] The time-to-live in seconds. + # @return [Boolean] Whether the cached data has expired. + def expired?(key, ttl) + stored_time = @store[key][:timestamp] + ttl && (Time.now - stored_time) >= ttl + end + + # Stores the data in the cache. + # + # @param key [String] The cache key. + # @param data [Object] The data to cache. + # @param ttl [Integer] The time-to-live in seconds. + def store(key, data, ttl) + @store[key] = { data: data, timestamp: Time.now, ttl: ttl } + end + end + end +end diff --git a/lib/api_wrapper/cache/redis_cache_store.rb b/lib/api_wrapper/cache/redis_cache_store.rb new file mode 100644 index 0000000..f4cd5aa --- /dev/null +++ b/lib/api_wrapper/cache/redis_cache_store.rb @@ -0,0 +1,3 @@ +# frozen_string_literal: true + +# TODO: Implement in near future :) diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb index 155f890..b46d546 100644 --- a/spec/spec_helper.rb +++ b/spec/spec_helper.rb @@ -1,5 +1,10 @@ # frozen_string_literal: true +require 'simplecov' +SimpleCov.start do + add_filter '/spec/' +end + require 'api_wrapper' RSpec.configure do |config| diff --git a/spec/unit/cache/cache_policy_spec.rb b/spec/unit/cache/cache_policy_spec.rb new file mode 100644 index 0000000..65b5177 --- /dev/null +++ b/spec/unit/cache/cache_policy_spec.rb @@ -0,0 +1,51 @@ +# frozen_string_literal: true + +require 'spec_helper' +require 'api_wrapper/cache/cache_policy' +require 'api_wrapper/cache/cache_store' +require 'faraday' + +RSpec.describe ApiWrapper::Cache::CachePolicy do + let(:cache_store) { ApiWrapper::Cache::CacheStore.new } + let(:cache_policy) { described_class.new(cache_store) } + + before do + cache_policy.add_no_cache_endpoint('/no-cache') + cache_policy.add_custom_ttl('/custom-ttl', 600) # 10 minutes + end + + describe '#use_cache?' do + it 'returns true for endpoints that are not in the no-cache list' do + expect(cache_policy.use_cache?('/some-endpoint')).to be(true) + end + + it 'returns false for endpoints that are in the no-cache list' do + expect(cache_policy.use_cache?('/no-cache')).to be(false) + end + end + + describe '#fetch' do + it 'uses cache if available and not forced to refresh' do + # Simulate a Faraday::Response object + cached_response = Faraday::Response.new(body: 'cached data') + cache_policy.fetch('/some-endpoint') { cached_response } + result = cache_policy.fetch('/some-endpoint') + expect(result.body).to eq('cached data') + end + + it 'bypasses cache if force_refresh is true' do + cached_response = Faraday::Response.new(body: 'cached data') + cache_policy.fetch('/some-endpoint') { cached_response } + fresh_response = Faraday::Response.new(body: 'fresh data') + result = cache_policy.fetch('/some-endpoint', force_refresh: true) { fresh_response } + expect(result.body).to eq('fresh data') + end + + it 'uses custom TTL for specific endpoints' do + fresh_response = Faraday::Response.new(body: 'data with custom ttl') + cache_policy.fetch('/custom-ttl') { fresh_response } + result = cache_policy.fetch('/custom-ttl') + expect(result.body).to eq('data with custom ttl') + end + end +end diff --git a/spec/unit/cache/cache_store_spec.rb b/spec/unit/cache/cache_store_spec.rb new file mode 100644 index 0000000..d2db781 --- /dev/null +++ b/spec/unit/cache/cache_store_spec.rb @@ -0,0 +1,79 @@ +# frozen_string_literal: true + +require 'spec_helper' +require 'api_wrapper/cache/cache_store' + +RSpec.describe ApiWrapper::Cache::CacheStore do + let(:cache_store) { described_class.new } + let(:key) { 'test_key' } + let(:data) { 'test_data' } + let(:ttl) { 5 } # 5 seconds TTL + + describe '#fetch' do + context 'when data is not cached' do + it 'fetches new data and stores it' do + result = cache_store.fetch(key, ttl) { data } + expect(result).to eq(data) + expect(cache_store.read(key)).to eq(data) + end + end + + context 'when data is cached' do + before do + cache_store.fetch(key, ttl) { data } + end + + it 'returns cached data within TTL' do + result = cache_store.fetch(key, ttl) { 'new_data' } + expect(result).to eq(data) + end + + it 'returns fresh data after TTL expires' do + # Use Timecop to freeze time for precise TTL testing + require 'timecop' + Timecop.travel(Time.now + ttl + 1) do + result = cache_store.fetch(key, ttl) { 'new_data' } + expect(result).to eq('new_data') + end + end + end + end + + describe '#read' do + context 'when data is cached' do + before do + cache_store.fetch(key, ttl) { data } + end + + it 'returns the cached data' do + expect(cache_store.read(key)).to eq(data) + end + end + + context 'when data is not cached' do + it 'returns nil' do + expect(cache_store.read(key)).to be_nil + end + end + end + + describe '#write' do + it 'stores data in the cache' do + cache_store.write(key, data, ttl) + expect(cache_store.read(key)).to eq(data) + end + end + + describe '#delete' do + context 'when data is cached' do + before do + cache_store.write(key, data, ttl) + end + + it 'removes the data from the cache' do + cache_store.delete(key) + expect(cache_store.read(key)).to be_nil + end + end + end +end