diff --git a/gcloud/bigtable/row_data.py b/gcloud/bigtable/row_data.py index 5191a1759b16..b6a52405f8b9 100644 --- a/gcloud/bigtable/row_data.py +++ b/gcloud/bigtable/row_data.py @@ -323,14 +323,18 @@ def consume_all(self, max_loops=None): break -class ReadRowsResponseError(RuntimeError): - """Exception raised to to invalid chunk / response data from back-end.""" +class InvalidReadRowsResponse(RuntimeError): + """Exception raised to to invalid response data from back-end.""" -def _raise_if(predicate): +class InvalidChunk(RuntimeError): + """Exception raised to to invalid chunk data from back-end.""" + + +def _raise_if(predicate, *args): """Helper for validation methods.""" if predicate: - raise ReadRowsResponseError() + raise InvalidChunk(*args) class PartialCellV2(object): @@ -408,7 +412,6 @@ def rows(self): :rtype: dict :returns: Dictionary of :class:`PartialRowData`. """ - _raise_if(self.state not in (self.NEW_ROW,)) # NOTE: To avoid duplicating large objects, this is just the # mutable private data. return self._rows @@ -534,7 +537,7 @@ def consume_next(self): if self._last_scanned_row_key is None: # first response if response.last_scanned_row_key: - raise ReadRowsResponseError() + raise InvalidReadRowsResponse() self._last_scanned_row_key = response.last_scanned_row_key diff --git a/gcloud/bigtable/test_row_data.py b/gcloud/bigtable/test_row_data.py index eb85a8bd9fe7..2c3c9ba260f5 100644 --- a/gcloud/bigtable/test_row_data.py +++ b/gcloud/bigtable/test_row_data.py @@ -649,11 +649,11 @@ def test__save_row_no_cell(self): self.assertTrue(prd._rows[ROW_KEY] is row) def test_invalid_last_scanned_row_key_on_start(self): - from gcloud.bigtable.row_data import ReadRowsResponseError + from gcloud.bigtable.row_data import InvalidReadRowsResponse response = _ReadRowsResponseV2(chunks=(), last_scanned_row_key='ABC') iterator = _MockCancellableIterator(response) prd = self._makeOne(iterator) - with self.assertRaises(ReadRowsResponseError): + with self.assertRaises(InvalidReadRowsResponse): prd.consume_next() def test_valid_last_scanned_row_key_on_start(self): @@ -666,16 +666,16 @@ def test_valid_last_scanned_row_key_on_start(self): self.assertEqual(prd._last_scanned_row_key, 'AFTER') def test_invalid_empty_chunk(self): - from gcloud.bigtable.row_data import ReadRowsResponseError + from gcloud.bigtable.row_data import InvalidChunk chunks = _generate_cell_chunks(['']) response = _ReadRowsResponseV2(chunks) iterator = _MockCancellableIterator(response) prd = self._makeOne(iterator) - with self.assertRaises(ReadRowsResponseError): + with self.assertRaises(InvalidChunk): prd.consume_next() def test_invalid_empty_second_chunk(self): - from gcloud.bigtable.row_data import ReadRowsResponseError + from gcloud.bigtable.row_data import InvalidChunk chunks = _generate_cell_chunks(['', '']) first = chunks[0] first.row_key = b'RK' @@ -684,32 +684,23 @@ def test_invalid_empty_second_chunk(self): response = _ReadRowsResponseV2(chunks) iterator = _MockCancellableIterator(response) prd = self._makeOne(iterator) - with self.assertRaises(ReadRowsResponseError): + with self.assertRaises(InvalidChunk): prd.consume_next() - # JSON Error cases + # JSON Error cases: invalid chunks def _fail_during_consume(self, testcase_name): - from gcloud.bigtable.row_data import ReadRowsResponseError - chunks, _ = self._load_json_test(testcase_name) + from gcloud.bigtable.row_data import InvalidChunk + chunks, results = self._load_json_test(testcase_name) response = _ReadRowsResponseV2(chunks) iterator = _MockCancellableIterator(response) prd = self._makeOne(iterator) - with self.assertRaises(ReadRowsResponseError): + with self.assertRaises(InvalidChunk): prd.consume_next() - - def _fail_during_rows(self, testcase_name): - from gcloud.bigtable.row_data import ReadRowsResponseError - chunks, _ = self._load_json_test(testcase_name) - response = _ReadRowsResponseV2(chunks) - iterator = _MockCancellableIterator(response) - prd = self._makeOne(iterator) - prd.consume_next() - with self.assertRaises(ReadRowsResponseError): - _ = prd.rows - - def test_invalid_no_commit(self): - self._fail_during_rows('invalid - no commit') + expected_result = self._sort_flattend_cells( + [result for result in results if not result['error']]) + flattened = self._sort_flattend_cells(_flatten_cells(prd)) + self.assertEqual(flattened, expected_result) def test_invalid_no_cell_key_before_commit(self): self._fail_during_consume('invalid - no cell key before commit') @@ -727,9 +718,6 @@ def test_invalid_no_commit_between_rows(self): def test_invalid_no_commit_after_first_row(self): self._fail_during_consume('invalid - no commit after first row') - def test_invalid_last_row_missing_commit(self): - self._fail_during_rows('invalid - last row missing commit') - def test_invalid_duplicate_row_key(self): self._fail_during_consume('invalid - duplicate row key') @@ -751,21 +739,44 @@ def test_invalid_reset_with_chunk(self): def test_invalid_commit_with_chunk(self): self._fail_during_consume('invalid - commit with chunk') + # JSON Error cases: incomplete final row + + def _sort_flattend_cells(self, flattened): + import operator + key_func = operator.itemgetter('rk', 'fm', 'qual') + return sorted(flattened, key=key_func) + + def _incomplete_final_row(self, testcase_name): + chunks, results = self._load_json_test(testcase_name) + response = _ReadRowsResponseV2(chunks) + iterator = _MockCancellableIterator(response) + prd = self._makeOne(iterator) + prd.consume_next() + self.assertEqual(prd.state, prd.ROW_IN_PROGRESS) + expected_result = self._sort_flattend_cells( + [result for result in results if not result['error']]) + flattened = self._sort_flattend_cells(_flatten_cells(prd)) + self.assertEqual(flattened, expected_result) + + def test_invalid_no_commit(self): + self._incomplete_final_row('invalid - no commit') + + def test_invalid_last_row_missing_commit(self): + self._incomplete_final_row('invalid - last row missing commit') + # Non-error cases _marker = object() def _match_results(self, testcase_name, expected_result=_marker): - import operator - key_func = operator.itemgetter('rk', 'fm', 'qual') chunks, results = self._load_json_test(testcase_name) response = _ReadRowsResponseV2(chunks) iterator = _MockCancellableIterator(response) prd = self._makeOne(iterator) prd.consume_next() - flattened = sorted(_flatten_cells(prd), key=key_func) + flattened = self._sort_flattend_cells(_flatten_cells(prd)) if expected_result is self._marker: - expected_result = sorted(results, key=key_func) + expected_result = self._sort_flattend_cells(results) self.assertEqual(flattened, expected_result) def test_bare_commit_implies_ts_zero(self):