Skip to content

Commit

Permalink
PBM-1252 check for "an inserted document is too large" error (#128)
Browse files Browse the repository at this point in the history
  • Loading branch information
olexandr-havryliak authored Mar 7, 2024
1 parent 0f7ead7 commit 29bec5f
Showing 1 changed file with 64 additions and 0 deletions.
64 changes: 64 additions & 0 deletions pbm-functional/pytest/test_PBM-1252.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
import pytest
import pymongo
import bson
import testinfra
import time
import os
import docker
import random
import string
import json

from datetime import datetime
from cluster import Cluster

@pytest.fixture(scope="package")
def docker_client():
return docker.from_env()

@pytest.fixture(scope="package")
def config():
return {"_id": "rs1", "members": [{"host":"rs101"}]}

@pytest.fixture(scope="package")
def cluster(config):
return Cluster(config, mongod_extra_args="--directoryperdb")

@pytest.fixture(scope="function")
def start_cluster(cluster,request):
try:
cluster.destroy()
cluster.create()
cluster.setup_pbm()
os.chmod("/backups",0o777)
os.system("rm -rf /backups/*")
pymongo.MongoClient(cluster.connection).admin.command( { "setParameter": 1, "wiredTigerEngineRuntimeConfig": "cache_size=4G"} )
result = cluster.exec_pbm_cli("config --set storage.type=filesystem --set storage.filesystem.path=/backups --set backup.compression=none --out json")
assert result.rc == 0
Cluster.log("Setup PBM with fs storage:\n" + result.stdout)
yield True
finally:
if request.config.getoption("--verbose"):
cluster.get_logs()
cluster.destroy(cleanup_backups=True)

@pytest.mark.timeout(3600,func_only=True)
def test_load(start_cluster,cluster):
cluster.check_pbm_status()
indexes = []

for i in range(63):
indexes.append(pymongo.IndexModel(str(i),background=True))

for i in range(1500):
database = ''.join(random.choice(string.ascii_lowercase) for _ in range(63))
client=pymongo.MongoClient(cluster.connection)
db = client[database]
db.create_collection("test_collection")
db["test_collection"].create_indexes(indexes)
Cluster.log( database + ": " + str(i))

backup = cluster.make_backup("physical")

Check failure on line 61 in pbm-functional/pytest/test_PBM-1252.py

View workflow job for this annotation

GitHub Actions / JUnit Test Report

test_PBM-1252.test_load

AssertionError: set shard's files list: an inserted document is too large
Raw output
start_cluster = True, cluster = <cluster.Cluster object at 0x7f6f60477850>

    @pytest.mark.timeout(3600,func_only=True)
    def test_load(start_cluster,cluster):
        cluster.check_pbm_status()
        indexes = []
    
        for i in range(63):
            indexes.append(pymongo.IndexModel(str(i),background=True))
    
        for i in range(1500):
            database = ''.join(random.choice(string.ascii_lowercase) for _ in range(63))
            client=pymongo.MongoClient(cluster.connection)
            db = client[database]
            db.create_collection("test_collection")
            db["test_collection"].create_indexes(indexes)
            Cluster.log( database + ": " + str(i))
    
>       backup = cluster.make_backup("physical")

test_PBM-1252.py:61: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <cluster.Cluster object at 0x7f6f60477850>, type = 'physical'

    def make_backup(self, type):
        n = testinfra.get_host("docker://" + self.pbm_cli)
        timeout = time.time() + 120
        while True:
            running = self.get_status()['running']
            Cluster.log("Current operation: " + str(running))
            if not running:
                if type:
                    start = n.check_output(
                        'pbm backup --out=json --type=' + type)
                else:
                    start = n.check_output('pbm backup --out=json')
                name = json.loads(start)['name']
                Cluster.log("Backup started")
                break
            if time.time() > timeout:
                assert False
            time.sleep(1)
        timeout = time.time() + 900
        while True:
            status = self.get_status()
            Cluster.log("Current operation: " + str(status['running']))
            if status['backups']['snapshot']:
                for snapshot in status['backups']['snapshot']:
                    if snapshot['name'] == name:
                        if snapshot['status'] == 'done':
                            Cluster.log("Backup found: " + str(snapshot))
                            return name
                            break
                        elif snapshot['status'] == 'error':
                            self.get_logs()
>                           assert False, snapshot['error']
E                           AssertionError: set shard's files list: an inserted document is too large

cluster.py:404: AssertionError
cluster.make_restore(backup,restart_cluster=True, check_pbm_status=True,timeout=1200)
Cluster.log("Finished successfully")

0 comments on commit 29bec5f

Please sign in to comment.