Skip to content

Commit

Permalink
Fixed typos in comments
Browse files Browse the repository at this point in the history
Signed-off-by: Marco Pracucci <marco@pracucci.com>
  • Loading branch information
pracucci committed Dec 19, 2019
1 parent 54eefc4 commit 1249341
Show file tree
Hide file tree
Showing 2 changed files with 13 additions and 12 deletions.
11 changes: 6 additions & 5 deletions pkg/cacheutil/memcached_client.go
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,8 @@ type MemcachedClient interface {
GetMulti(ctx context.Context, keys []string) map[string][]byte

// SetAsync enqueues an asynchronous operation to store a key into memcached.
// Returns an error in case it fails to enqueue the operation. In case the
// underlying async operation will fail, the error will be tracked/logged.
SetAsync(ctx context.Context, key string, value []byte, ttl time.Duration) error

// Stop client and release underlying resources.
Expand Down Expand Up @@ -261,7 +263,7 @@ func (c *memcachedClient) SetAsync(ctx context.Context, key string, value []byte
span.Finish()
if err != nil {
c.failures.WithLabelValues(opSet).Inc()
level.Warn(c.logger).Log("msg", "failed to store item memcached", "key", key, "err", err)
level.Warn(c.logger).Log("msg", "failed to store item to memcached", "key", key, "err", err)
return
}

Expand All @@ -272,7 +274,7 @@ func (c *memcachedClient) SetAsync(ctx context.Context, key string, value []byte
func (c *memcachedClient) GetMulti(ctx context.Context, keys []string) map[string][]byte {
batches, err := c.getMultiBatched(ctx, keys)
if err != nil {
level.Warn(c.logger).Log("msg", "failed to fetch keys from memcached", "err", err)
level.Warn(c.logger).Log("msg", "failed to fetch items from memcached", "err", err)

// In case we have both results and an error, it means some batch requests
// failed and other succeeded. In this case we prefer to log it and move on,
Expand Down Expand Up @@ -311,12 +313,11 @@ func (c *memcachedClient) getMultiBatched(ctx context.Context, keys []string) ([
numResults++
}

// Split input keys into batches and schedule a job for it.
// Spawn a goroutine for each batch request. The max concurrency will be
// enforced by getMultiSingle().
results := make(chan *memcachedGetMultiResult, numResults)
defer close(results)

// Spawn a goroutine for each batch request. The max concurrency will be
// enforced by getMultiSingle().
for batchStart := 0; batchStart < len(keys); batchStart += batchSize {
batchEnd := batchStart + batchSize
if batchEnd > len(keys) {
Expand Down
14 changes: 7 additions & 7 deletions pkg/cacheutil/memcached_client_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -148,7 +148,7 @@ func TestMemcachedClient_GetMulti(t *testing.T) {
},
expectedGetMultiCount: 1,
},
"should fetch keys in a multiple batches if the input keys is > the max batch size": {
"should fetch keys in multiple batches if the input keys is > the max batch size": {
maxBatchSize: 2,
maxConcurrency: 5,
initialItems: []memcache.Item{
Expand All @@ -164,7 +164,7 @@ func TestMemcachedClient_GetMulti(t *testing.T) {
},
expectedGetMultiCount: 2,
},
"should fetch keys in a multiple batches on input keys exact multiple of batch size": {
"should fetch keys in multiple batches on input keys exact multiple of batch size": {
maxBatchSize: 2,
maxConcurrency: 5,
initialItems: []memcache.Item{
Expand All @@ -182,7 +182,7 @@ func TestMemcachedClient_GetMulti(t *testing.T) {
},
expectedGetMultiCount: 2,
},
"should fetch keys in a multiple batches on input keys exact multiple of batch size with max concurrency disabled (0)": {
"should fetch keys in multiple batches on input keys exact multiple of batch size with max concurrency disabled (0)": {
maxBatchSize: 2,
maxConcurrency: 0,
initialItems: []memcache.Item{
Expand All @@ -200,7 +200,7 @@ func TestMemcachedClient_GetMulti(t *testing.T) {
},
expectedGetMultiCount: 2,
},
"should fetch keys in a multiple batches on input keys exact multiple of batch size with max concurrency lower than the batches": {
"should fetch keys in multiple batches on input keys exact multiple of batch size with max concurrency lower than the batches": {
maxBatchSize: 1,
maxConcurrency: 1,
initialItems: []memcache.Item{
Expand Down Expand Up @@ -254,7 +254,7 @@ func TestMemcachedClient_GetMulti(t *testing.T) {
},
expectedGetMultiCount: 1,
},
"should no hits on all keys missing": {
"should return no hits on all keys missing": {
maxBatchSize: 2,
maxConcurrency: 5,
initialItems: []memcache.Item{
Expand All @@ -268,7 +268,7 @@ func TestMemcachedClient_GetMulti(t *testing.T) {
},
expectedGetMultiCount: 2,
},
"should no hits on partial errors while fetching batches and no items found": {
"should return no hits on partial errors while fetching batches and no items found": {
maxBatchSize: 2,
maxConcurrency: 5,
mockedGetMultiErrors: 1,
Expand All @@ -281,7 +281,7 @@ func TestMemcachedClient_GetMulti(t *testing.T) {
expectedHits: map[string][]byte{},
expectedGetMultiCount: 2,
},
"should no hits on all errors while fetching batches": {
"should return no hits on all errors while fetching batches": {
maxBatchSize: 2,
maxConcurrency: 5,
mockedGetMultiErrors: 2,
Expand Down

0 comments on commit 1249341

Please sign in to comment.