Skip to content

Commit

Permalink
Merge branch 'master' into simulators-sdk-second-models
Browse files Browse the repository at this point in the history
  • Loading branch information
abdullah-cognite authored Feb 4, 2025
2 parents 7ee0ed6 + 705ea61 commit 79e311b
Show file tree
Hide file tree
Showing 22 changed files with 147 additions and 50 deletions.
4 changes: 2 additions & 2 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
---
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.8.6
rev: v0.9.3
hooks:
- id: ruff
args:
Expand Down Expand Up @@ -48,7 +48,7 @@ repos:
require_serial: true # avoid possible race conditions

- repo: https://github.com/jsh9/pydoclint # Run after 'custom-checks' as these may auto-fix
rev: 0.5.19
rev: 0.6.0
hooks:
- id: pydoclint
require_serial: true # Spammy in run-all scenarios (more than fast enough already)
Expand Down
18 changes: 17 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,22 @@ Changes are grouped as follows
### Added
- Support for the `/simulators/models` and `/simulators/models/revisions` API endpoints.
- Support for the `/simulators` and `/simulators/integration` API endpoints.
### Changed
- Set the limit for create, update, and delete endpoints for postgres gateway users to 1.

## [7.73.1] - 2025-01-23
### Fixed
- Data Workflows hotfix: mark `useTransformationCredentials` as optional.

## [7.73.0] - 2025-01-22
### Added
- Data Workflows: Support for `useTransformationCredentials` for the transformations task. This allows running
transformation tasks with pre-configured client credentials.

## [7.72.2] - 2025-01-20
### Fixed
- Updating a Kafka or MQTT source with a write object in `mode="replace"` no longer raises a `CogniteAPIError` with
422 status code.

## [7.72.1] - 2025-01-14
### Fixed
Expand All @@ -32,7 +48,7 @@ Changes are grouped as follows

## [7.71.4] - 2025-01-09
### Changed
- Update classes accepting insance_id now raise when id/external_id are also given.
- Update classes accepting instance_id now raise when id/external_id are also given.
### Added
- All update classes warn when external_id is ignored (when id is also given, it takes precedence)

Expand Down
5 changes: 5 additions & 0 deletions cognite/client/_api/postgres_gateway/users.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,11 @@ class UsersAPI(APIClient):
def __init__(self, config: ClientConfig, api_version: str | None, cognite_client: CogniteClient) -> None:
super().__init__(config, api_version, cognite_client)
self._warning = FeaturePreviewWarning(api_maturity="beta", sdk_maturity="alpha", feature_name="Users")
self._CREATE_LIMIT = 1
self._UPDATE_LIMIT = 1
self._DELETE_LIMIT = 1
self._LIST_LIMIT = 100
self._RETRIEVE_LIMIT = 10

@overload
def __call__(
Expand Down
9 changes: 2 additions & 7 deletions cognite/client/_api/user_profiles.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from __future__ import annotations

from typing import cast, overload
from typing import overload

from cognite.client._api_client import APIClient
from cognite.client._constants import DEFAULT_LIMIT_READ
Expand Down Expand Up @@ -76,16 +76,11 @@ def retrieve(self, user_identifier: str | SequenceNotStr[str]) -> UserProfile |
>>> res = client.iam.user_profiles.retrieve(["bar", "baz"])
"""
identifiers = UserIdentifierSequence.load(user_identifier)
profiles = self._retrieve_multiple(
return self._retrieve_multiple(
list_cls=UserProfileList,
resource_cls=UserProfile,
identifiers=identifiers,
)
if identifiers.is_singleton():
return profiles
# TODO: The API does not guarantee any ordering (against style guidelines, no timeline for fix)
# so we sort manually for now:
return UserProfileList(cast(list[UserProfile], [profiles.get(user) for user in user_identifier]))

def search(self, name: str, limit: int = DEFAULT_LIMIT_READ) -> UserProfileList:
"""`Search for user profiles <https://developer.cognite.com/api#tag/User-profiles/operation/userProfilesSearch>`_
Expand Down
4 changes: 3 additions & 1 deletion cognite/client/_api_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -1277,8 +1277,10 @@ def _clear_all_attributes(update_attributes: list[PropertySpec]) -> dict[str, di
for prop in update_attributes:
if prop.is_beta:
continue
elif prop.is_explicit_nullable_object:
clear_with: dict = {"setNull": True}
elif prop.is_object:
clear_with: dict = {"set": {}}
clear_with = {"set": {}}
elif prop.is_list:
clear_with = {"set": []}
elif prop.is_nullable:
Expand Down
2 changes: 1 addition & 1 deletion cognite/client/_version.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from __future__ import annotations

__version__ = "7.72.2"
__version__ = "7.73.1"

__api_subversion__ = "20230101"
2 changes: 2 additions & 0 deletions cognite/client/data_classes/_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -447,6 +447,8 @@ class PropertySpec:
is_nullable: bool = True
# Used to skip replace when the value is None
is_beta: bool = False
# Objects that are nullable and support setNull. This is hosted extractor mqtt/kafka sources
is_explicit_nullable_object: bool = False

def __post_init__(self) -> None:
assert not (self.is_list and self.is_object), "PropertySpec cannot be both list and object"
Expand Down
3 changes: 1 addition & 2 deletions cognite/client/data_classes/data_modeling/instances.py
Original file line number Diff line number Diff line change
Expand Up @@ -233,8 +233,7 @@ def load(
view_tuple = tuple(view_id_str.split("/", 1))
if len(view_tuple) != 2:
warnings.warn(
f"Unknown type of view id: {view_id_str}, expected format <external_id>/<version>. "
"Skipping...",
f"Unknown type of view id: {view_id_str}, expected format <external_id>/<version>. Skipping...",
stacklevel=2,
)
continue
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,10 @@ def target_data_set_id(self) -> DestinationUpdate._TargetDataSetIdUpdate:

@classmethod
def _get_update_properties(cls, item: CogniteResource | None = None) -> list[PropertySpec]:
return [PropertySpec("credentials", is_nullable=True), PropertySpec("target_data_set_id", is_nullable=True)]
return [
PropertySpec("credentials", is_nullable=True, is_object=True, is_explicit_nullable_object=True),
PropertySpec("target_data_set_id", is_nullable=True),
]


class DestinationWriteList(CogniteResourceList[DestinationWrite], ExternalIDTransformerMixin):
Expand Down
12 changes: 6 additions & 6 deletions cognite/client/data_classes/hosted_extractors/sources.py
Original file line number Diff line number Diff line change
Expand Up @@ -391,10 +391,10 @@ def _get_update_properties(cls, item: CogniteResource | None = None) -> list[Pro
return [
PropertySpec("host", is_nullable=False),
PropertySpec("port", is_nullable=True),
PropertySpec("authentication", is_nullable=True, is_object=True),
PropertySpec("authentication", is_nullable=True, is_object=True, is_explicit_nullable_object=True),
PropertySpec("use_tls", is_nullable=False),
PropertySpec("ca_certificate", is_nullable=True, is_object=True),
PropertySpec("auth_certificate", is_nullable=True, is_object=True),
PropertySpec("ca_certificate", is_nullable=True, is_object=True, is_explicit_nullable_object=True),
PropertySpec("auth_certificate", is_nullable=True, is_object=True, is_explicit_nullable_object=True),
]


Expand Down Expand Up @@ -662,10 +662,10 @@ def auth_certificate(self) -> _AuthCertificateUpdate:
def _get_update_properties(cls, item: CogniteResource | None = None) -> list[PropertySpec]:
return [
PropertySpec("bootstrap_brokers", is_nullable=False),
PropertySpec("authentication", is_nullable=True, is_object=True),
PropertySpec("authentication", is_nullable=True, is_object=True, is_explicit_nullable_object=True),
PropertySpec("use_tls", is_nullable=False),
PropertySpec("ca_certificate", is_nullable=True, is_object=True),
PropertySpec("auth_certificate", is_nullable=True, is_object=True),
PropertySpec("ca_certificate", is_nullable=True, is_object=True, is_explicit_nullable_object=True),
PropertySpec("auth_certificate", is_nullable=True, is_object=True, is_explicit_nullable_object=True),
]


Expand Down
2 changes: 1 addition & 1 deletion cognite/client/data_classes/sequences.py
Original file line number Diff line number Diff line change
Expand Up @@ -663,7 +663,7 @@ def __init__(
col_length = len(columns)
if wrong_length := [r for r in rows if len(r.values) != col_length]:
raise ValueError(
f"Rows { [r.row_number for r in wrong_length] } have wrong number of values, expected {col_length}"
f"Rows {[r.row_number for r in wrong_length]} have wrong number of values, expected {col_length}"
)
self.rows = rows
self.columns: SequenceColumnList = columns
Expand Down
19 changes: 14 additions & 5 deletions cognite/client/data_classes/workflows.py
Original file line number Diff line number Diff line change
Expand Up @@ -292,7 +292,7 @@ def dump(self, camel_case: bool = True) -> dict[str, Any]:
if self.run_time:
simulation["runTime" if camel_case else "run_time"] = self.run_time
if self.inputs:
simulation["inputs" if camel_case else "inputs"] = [item.dump(camel_case) for item in self.inputs]
simulation["inputs"] = [item.dump(camel_case) for item in self.inputs]

return {"simulation": simulation}

Expand All @@ -304,30 +304,39 @@ class TransformationTaskParameters(WorkflowTaskParameters):
Args:
external_id (str): The external ID of the transformation to be called.
concurrency_policy (Literal['fail', 'restartAfterCurrent', 'waitForCurrent']): Determines the behavior of the task if the Transformation is already running. ``fail``: The task fails if another instance of the Transformation is currently running. ``waitForCurrent``: The task will pause and wait for the already running Transformation to complete. Once completed, the task is completed. This mode is useful for preventing redundant Transformation runs. ``restartAfterCurrent``: The task waits for the ongoing Transformation to finish. After completion, the task restarts the Transformation. This mode ensures that the most recent data can be used by following tasks.
use_transformation_credentials (bool): If set to `true`, the transformation will be run using the client credentials configured on the transformation. If set to `false`, the transformation will be run using the client credentials used to trigger the workflow.
"""

task_type = "transformation"

def __init__(
self, external_id: str, concurrency_policy: Literal["fail", "restartAfterCurrent", "waitForCurrent"] = "fail"
self,
external_id: str,
concurrency_policy: Literal["fail", "restartAfterCurrent", "waitForCurrent"] = "fail",
use_transformation_credentials: bool = False,
) -> None:
self.external_id = external_id
self.concurrency_policy = concurrency_policy
self.use_transformation_credentials = use_transformation_credentials

@classmethod
def _load(cls, resource: dict, cognite_client: CogniteClient | None = None) -> TransformationTaskParameters:
return cls(
resource["transformation"]["externalId"],
resource["transformation"]["concurrencyPolicy"],
resource[cls.task_type]["externalId"],
resource[cls.task_type].get("concurrencyPolicy", "fail"),
resource[cls.task_type].get("useTransformationCredentials", False),
)

def dump(self, camel_case: bool = True) -> dict[str, Any]:
transformation = {
"externalId" if camel_case else "external_id": self.external_id,
"concurrencyPolicy" if camel_case else "concurrency_policy": self.concurrency_policy,
"useTransformationCredentials"
if camel_case
else "use_transformation_credentials": self.use_transformation_credentials,
}

return {"transformation": transformation}
return {self.task_type: transformation}


class CDFTaskParameters(WorkflowTaskParameters):
Expand Down
2 changes: 1 addition & 1 deletion cognite/client/utils/_text.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,4 +89,4 @@ def shorten(obj: Any, width: int = 20, placeholder: str = "...") -> str:
s = obj if isinstance(obj, str) else repr(obj)
if len(s) <= width:
return s
return f"{s[:width-n]}{placeholder}"
return f"{s[: width - n]}{placeholder}"
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
[tool.poetry]
name = "cognite-sdk"

version = "7.72.2"
version = "7.73.1"

description = "Cognite Python SDK"
readme = "README.md"
Expand Down
2 changes: 1 addition & 1 deletion scripts/custom_checks/docstr_examples.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ def parse_example_section(lines, ex_idx) -> tuple[bool, set[int]]:
def fix_single_file(path: Path) -> str | None:
was_fixed = []
dotted = path_to_importable(path)
full_text = path.read_text()
full_text = path.read_text(encoding="utf-8")
module = importlib.import_module(dotted)
for cls_name, cls in get_valid_members(module, dotted):
for method_name, ex_idx, docstr, lines in get_info_on_valid_methods(cls, full_text):
Expand Down
12 changes: 6 additions & 6 deletions tests/tests_integration/test_api/test_datapoint_subscriptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -248,14 +248,14 @@ def test_iterate_data_subscription_initial_call(
)
batch = next(subscription_changes)

assert (
len(batch.subscription_changes.added) > 0
), "The subscription used for testing datapoint subscriptions must have at least one time series"
assert len(batch.subscription_changes.added) > 0, (
"The subscription used for testing datapoint subscriptions must have at least one time series"
)

batch = next(subscription_changes)
assert (
len(batch.subscription_changes.added) == 0
), "There should be no more timeseries in the subsequent batches"
assert len(batch.subscription_changes.added) == 0, (
"There should be no more timeseries in the subsequent batches"
)

def test_iterate_data_subscription_changed_time_series(
self, cognite_client: CogniteClient, time_series_external_ids: list[str]
Expand Down
19 changes: 14 additions & 5 deletions tests/tests_integration/test_api/test_datapoints.py
Original file line number Diff line number Diff line change
Expand Up @@ -480,7 +480,7 @@ def test_insert_read_delete_dps(self, cognite_client, ts_create_in_dms):


@pytest.fixture
def queries_for_iteration():
def queries_for_iteration(all_test_time_series):
# Mix of ids, external_ids, and instance_ids
return [
DatapointsQuery(
Expand All @@ -495,7 +495,7 @@ def queries_for_iteration():
start=132710400000,
),
DatapointsQuery(
id=1162585250935723, # 'PYSDK integration test 089: weekly values, 1950-2000, string,
id=all_test_time_series.get(external_id="PYSDK integration test 089: weekly values, 1950-2000, string").id,
start=-334195200000,
end=270000000000 + 1,
),
Expand All @@ -504,11 +504,15 @@ def queries_for_iteration():
start=1166400000,
),
DatapointsQuery(
id=7134564432527017, # 'PYSDK integration test 108: every millisecond, 1969-12-31 23:59:58.500 - 1970-01-01 00:00:01.500, numeric,
id=all_test_time_series.get(
external_id="PYSDK integration test 108: every millisecond, 1969-12-31 23:59:58.500 - 1970-01-01 00:00:01.500, numeric"
).id,
start=-1118,
),
DatapointsQuery(
id=8856777097037888, # 'PYSDK integration test 114: 1mill dps, random distribution, 1950-2020, numeric,
id=all_test_time_series.get(
external_id="PYSDK integration test 114: 1mill dps, random distribution, 1950-2020, numeric"
).id,
start=-111360848336,
),
DatapointsQuery(
Expand All @@ -529,7 +533,9 @@ def queries_for_iteration():
start=1702252800000,
),
DatapointsQuery(
id=6236123831652881, # 'PYSDK integration test 121: mixed status codes, daily values, 2023-2024, numeric,
id=all_test_time_series.get(
external_id="PYSDK integration test 121: mixed status codes, daily values, 2023-2024, numeric"
).id,
start=1678924800000,
),
DatapointsQuery(
Expand Down Expand Up @@ -1874,6 +1880,9 @@ def test_status_codes_affect_aggregate_calculations(self, retrieve_endpoints, ts
[349079144838.96564, 512343998481.7162, 159180999248.7119, 529224146671.5178],
)

@pytest.mark.skip(
reason="TODO(doctrino): Skipped while awaiting https://github.com/cognitedata/cognite-sdk-python/pull/2102"
)
def test_timezone_agg_query_dst_transitions(self, all_retrieve_endpoints, dps_queries_dst_transitions):
expected_values1 = [0.23625579717753353, 0.02829928231631262, -0.0673823850533647, -0.20908049925449418]
expected_values2 = [-0.13218082741552517, -0.20824244773820486, 0.02566169899072951, 0.15040625644292185]
Expand Down
2 changes: 1 addition & 1 deletion tests/tests_integration/test_api/test_diagrams.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
)
from cognite.client.data_classes.data_modeling import NodeApply, NodeId, NodeOrEdgeData, Space, SpaceApply, ViewId

PNID_FILE_EXTERNAL_ID = "mypnid.pdf" ""
PNID_FILE_EXTERNAL_ID = "mypnid.pdf"
DIAGRAM_SPACE = "diagram_space"

CDM_SPACE = "cdf_cdm"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@
EventHubSource,
EventHubSourceUpdate,
EventHubSourceWrite,
MQTT5SourceWrite,
Source,
SourceList,
)
from cognite.client.exceptions import CogniteAPIError
Expand Down Expand Up @@ -60,6 +62,32 @@ def test_create_update_retrieve_delete(self, cognite_client: CogniteClient) -> N
if created:
cognite_client.hosted_extractors.sources.delete(created.external_id, ignore_unknown_ids=True)

def test_create_update_replace_retrieve(self, cognite_client: CogniteClient) -> None:
original = MQTT5SourceWrite(
external_id=f"myMqttSource-{random_string(10)}",
host="mqtt.hsl.fi",
port=1883,
)

created: Source | None = None
try:
created = cognite_client.hosted_extractors.sources.create(original)

update = MQTT5SourceWrite(original.external_id, host="mqtt.hsl.fi", port=1884)

updated = cognite_client.hosted_extractors.sources.update(update, mode="replace")

assert updated.port == 1884

retrieved = cognite_client.hosted_extractors.sources.retrieve(created.external_id)

assert retrieved is not None
assert retrieved.external_id == created.external_id
assert retrieved.port == 1884
finally:
if created:
cognite_client.hosted_extractors.sources.delete(created.external_id, ignore_unknown_ids=True)

@pytest.mark.usefixtures("one_event_hub_source")
def test_list(self, cognite_client: CogniteClient) -> None:
res = cognite_client.hosted_extractors.sources.list(limit=1)
Expand Down
Loading

0 comments on commit 79e311b

Please sign in to comment.