Skip to content

Commit

Permalink
Merge branch 'master' into specify_version_salesforce_ingestor
Browse files Browse the repository at this point in the history
  • Loading branch information
skrydal authored Aug 13, 2024
2 parents 4469c57 + 3d4b3b9 commit 059e09d
Show file tree
Hide file tree
Showing 21 changed files with 1,984 additions and 49 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@ import { SchemaField, StdDataType } from '../../../../../../../../types.generate
import { SectionHeader, StyledDivider } from './components';
import { mapStructuredPropertyValues } from '../../../../Properties/useStructuredProperties';
import StructuredPropertyValue from '../../../../Properties/StructuredPropertyValue';
import { EditColumn } from '../../../../Properties/Edit/EditColumn';
import { useGetEntityWithSchema } from '../../useGetEntitySchema';

const PropertyTitle = styled.div`
font-size: 14px;
Expand All @@ -13,6 +15,8 @@ const PropertyTitle = styled.div`

const PropertyWrapper = styled.div`
margin-bottom: 12px;
display: flex;
justify-content: space-between;
`;

const PropertiesWrapper = styled.div`
Expand All @@ -29,6 +33,7 @@ interface Props {

export default function FieldProperties({ expandedField }: Props) {
const { schemaFieldEntity } = expandedField;
const { refetch } = useGetEntityWithSchema(true);

if (!schemaFieldEntity?.structuredProperties?.properties?.length) return null;

Expand All @@ -43,23 +48,33 @@ export default function FieldProperties({ expandedField }: Props) {
const hasMultipleValues = valuesData.length > 1;

return (
<PropertyWrapper>
<PropertyTitle>{structuredProp.structuredProperty.definition.displayName}</PropertyTitle>
{hasMultipleValues ? (
<StyledList>
{valuesData.map((value) => (
<li>
<PropertyWrapper key={structuredProp.structuredProperty.urn}>
<div>
<PropertyTitle>
{structuredProp.structuredProperty.definition.displayName}
</PropertyTitle>
{hasMultipleValues ? (
<StyledList>
{valuesData.map((value) => (
<li>
<StructuredPropertyValue value={value} isRichText={isRichText} />
</li>
))}
</StyledList>
) : (
<>
{valuesData.map((value) => (
<StructuredPropertyValue value={value} isRichText={isRichText} />
</li>
))}
</StyledList>
) : (
<>
{valuesData.map((value) => (
<StructuredPropertyValue value={value} isRichText={isRichText} />
))}
</>
)}
))}
</>
)}
</div>
<EditColumn
structuredProperty={structuredProp.structuredProperty}
associatedUrn={schemaFieldEntity.urn}
values={valuesData.map((v) => v.value) || []}
refetch={refetch}
/>
</PropertyWrapper>
);
})}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,16 +1,19 @@
import { Button } from 'antd';
import React, { useState } from 'react';
import { PropertyRow } from '../types';
import EditStructuredPropertyModal from './EditStructuredPropertyModal';
import { StructuredPropertyEntity } from '../../../../../../types.generated';

interface Props {
propertyRow: PropertyRow;
structuredProperty?: StructuredPropertyEntity;
associatedUrn?: string;
values?: (string | number | null)[];
refetch?: () => void;
}

export function EditColumn({ propertyRow }: Props) {
export function EditColumn({ structuredProperty, associatedUrn, values, refetch }: Props) {
const [isEditModalVisible, setIsEditModalVisible] = useState(false);

if (!propertyRow.structuredProperty || propertyRow.structuredProperty?.definition.immutable) {
if (!structuredProperty || structuredProperty?.definition.immutable) {
return null;
}

Expand All @@ -21,9 +24,11 @@ export function EditColumn({ propertyRow }: Props) {
</Button>
<EditStructuredPropertyModal
isOpen={isEditModalVisible}
propertyRow={propertyRow}
structuredProperty={propertyRow.structuredProperty}
structuredProperty={structuredProperty}
associatedUrn={associatedUrn}
values={values}
closeModal={() => setIsEditModalVisible(false)}
refetch={refetch}
/>
</>
);
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
import { Button, Modal, message } from 'antd';
import React from 'react';
import React, { useEffect, useMemo } from 'react';
import styled from 'styled-components';
import { PropertyRow } from '../types';
import StructuredPropertyInput from '../../../components/styled/StructuredProperty/StructuredPropertyInput';
import { PropertyValueInput, StructuredPropertyEntity } from '../../../../../../types.generated';
import { useUpsertStructuredPropertiesMutation } from '../../../../../../graphql/structuredProperties.generated';
Expand All @@ -17,19 +16,33 @@ const Description = styled.div`

interface Props {
isOpen: boolean;
propertyRow: PropertyRow;
structuredProperty: StructuredPropertyEntity;
associatedUrn?: string;
values?: (string | number | null)[];
closeModal: () => void;
refetch?: () => void;
}

export default function EditStructuredPropertyModal({ isOpen, propertyRow, structuredProperty, closeModal }: Props) {
const { refetch } = useEntityContext();
const urn = useMutationUrn();
const initialValues = propertyRow.values?.map((v) => v.value) || [];
const { selectedValues, selectSingleValue, toggleSelectedValue, updateSelectedValues } =
export default function EditStructuredPropertyModal({
isOpen,
structuredProperty,
associatedUrn,
values,
closeModal,
refetch,
}: Props) {
const { refetch: entityRefetch } = useEntityContext();
const mutationUrn = useMutationUrn();
const urn = associatedUrn || mutationUrn;
const initialValues = useMemo(() => values || [], [values]);
const { selectedValues, selectSingleValue, toggleSelectedValue, updateSelectedValues, setSelectedValues } =
useEditStructuredProperty(initialValues);
const [upsertStructuredProperties] = useUpsertStructuredPropertiesMutation();

useEffect(() => {
setSelectedValues(initialValues);
}, [isOpen, initialValues, setSelectedValues]);

function upsertProperties() {
message.loading('Updating...');
upsertStructuredProperties({
Expand All @@ -51,7 +64,11 @@ export default function EditStructuredPropertyModal({ isOpen, propertyRow, struc
},
})
.then(() => {
refetch();
if (refetch) {
refetch();
} else {
entityRefetch();
}
message.destroy();
message.success('Successfully updated structured property!');
closeModal();
Expand All @@ -67,7 +84,7 @@ export default function EditStructuredPropertyModal({ isOpen, propertyRow, struc

return (
<Modal
title={propertyRow.displayName}
title={structuredProperty.definition.displayName}
onCancel={closeModal}
open={isOpen}
width={650}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,12 @@ export const PropertiesTab = () => {
propertyTableColumns.push({
title: '',
width: '10%',
render: (propertyRow: PropertyRow) => <EditColumn propertyRow={propertyRow} />,
render: (propertyRow: PropertyRow) => (
<EditColumn
structuredProperty={propertyRow.structuredProperty}
values={propertyRow.values?.map((v) => v.value) || []}
/>
),
} as any);
}

Expand Down
4 changes: 2 additions & 2 deletions docs/lineage/airflow.md
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ There's two actively supported implementations of the plugin, with different Air
| Approach | Airflow Version | Notes |
| --------- | --------------- | --------------------------------------------------------------------------- |
| Plugin v2 | 2.3.4+ | Recommended. Requires Python 3.8+ |
| Plugin v1 | 2.1+ | No automatic lineage extraction; may not extract lineage if the task fails. |
| Plugin v1 | 2.1 - 2.8 | No automatic lineage extraction; may not extract lineage if the task fails. |

If you're using Airflow older than 2.1, it's possible to use the v1 plugin with older versions of `acryl-datahub-airflow-plugin`. See the [compatibility section](#compatibility) for more details.

Expand Down Expand Up @@ -84,7 +84,7 @@ enabled = True # default

### Installation

The v1 plugin requires Airflow 2.1+ and Python 3.8+. If you're on older versions, it's still possible to use an older version of the plugin. See the [compatibility section](#compatibility) for more details.
The v1 plugin requires Airflow 2.1 - 2.8 and Python 3.8+. If you're on older versions, it's still possible to use an older version of the plugin. See the [compatibility section](#compatibility) for more details.

If you're using Airflow 2.3+, we recommend using the v2 plugin instead. If you need to use the v1 plugin with Airflow 2.3+, you must also set the environment variable `DATAHUB_AIRFLOW_PLUGIN_USE_V1_PLUGIN=true`.

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -143,6 +143,20 @@ def wrapper(*args, **kwargs):
return cast(_F, wrapper)


def _render_templates(task_instance: "TaskInstance") -> "TaskInstance":
# Render templates in a copy of the task instance.
# This is necessary to get the correct operator args in the extractors.
try:
task_instance_copy = copy.deepcopy(task_instance)
task_instance_copy.render_templates()
return task_instance_copy
except Exception as e:
logger.info(
f"Error rendering templates in DataHub listener. Jinja-templated variables will not be extracted correctly: {e}"
)
return task_instance


class DataHubListener:
__name__ = "DataHubListener"

Expand Down Expand Up @@ -360,15 +374,7 @@ def on_task_instance_running(
f"DataHub listener got notification about task instance start for {task_instance.task_id}"
)

# Render templates in a copy of the task instance.
# This is necessary to get the correct operator args in the extractors.
try:
task_instance = copy.deepcopy(task_instance)
task_instance.render_templates()
except Exception as e:
logger.info(
f"Error rendering templates in DataHub listener. Jinja-templated variables will not be extracted correctly: {e}"
)
task_instance = _render_templates(task_instance)

# The type ignore is to placate mypy on Airflow 2.1.x.
dagrun: "DagRun" = task_instance.dag_run # type: ignore[attr-defined]
Expand Down Expand Up @@ -459,8 +465,17 @@ def on_task_instance_finish(
self, task_instance: "TaskInstance", status: InstanceRunResult
) -> None:
dagrun: "DagRun" = task_instance.dag_run # type: ignore[attr-defined]
task = self._task_holder.get_task(task_instance) or task_instance.task

task_instance = _render_templates(task_instance)

# We must prefer the task attribute, in case modifications to the task's inlets/outlets
# were made by the execute() method.
if getattr(task_instance, "task", None):
task = task_instance.task
else:
task = self._task_holder.get_task(task_instance)
assert task is not None

dag: "DAG" = task.dag # type: ignore[assignment]

datajob = AirflowGenerator.generate_datajob(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

from airflow.plugins_manager import AirflowPlugin

from datahub_airflow_plugin import __package_name__
from datahub_airflow_plugin._airflow_compat import AIRFLOW_PATCHED
from datahub_airflow_plugin._airflow_shims import (
HAS_AIRFLOW_LISTENER_API,
Expand All @@ -23,7 +24,10 @@
from openlineage.airflow.utils import try_import_from_string # noqa: F401
except ImportError:
# If v2 plugin dependencies are not installed, we fall back to v1.
logger.debug("Falling back to v1 plugin due to missing dependencies.")
logger.warning(
"Falling back to the v1 DataHub plugin due to missing dependencies. "
f"Please install {__package_name__}[plugin-v2] to fix this."
)
_USE_AIRFLOW_LISTENER_INTERFACE = False


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@
from airflow.lineage import PIPELINE_OUTLETS
from airflow.models.baseoperator import BaseOperator
from airflow.utils.module_loading import import_string
from cattr import structure
from datahub.api.entities.dataprocess.dataprocess_instance import InstanceRunResult
from datahub.telemetry import telemetry

Expand Down Expand Up @@ -52,6 +51,7 @@ def get_task_inlets_advanced(task: BaseOperator, context: Any) -> Iterable[Any]:
)

from airflow.lineage import AUTO
from cattr import structure

# pick up unique direct upstream task_ids if AUTO is specified
if AUTO.upper() in task_inlets or AUTO.lower() in task_inlets:
Expand Down
4 changes: 3 additions & 1 deletion metadata-ingestion-modules/airflow-plugin/tox.ini
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,9 @@ deps =

# Respect the Airflow constraints files.
# We can't make ourselves work with the constraints of Airflow < 2.3.
py310-airflow24: -c https://raw.githubusercontent.com/apache/airflow/constraints-2.4.3/constraints-3.10.txt
# The Airflow 2.4 constraints file requires a version of the sqlite provider whose
# hook type is missing the `conn_name_attr` property.
; py310-airflow24: -c https://raw.githubusercontent.com/apache/airflow/constraints-2.4.3/constraints-3.10.txt
py310-airflow26: -c https://raw.githubusercontent.com/apache/airflow/constraints-2.6.3/constraints-3.10.txt
py310-airflow27: -c https://raw.githubusercontent.com/apache/airflow/constraints-2.7.3/constraints-3.10.txt
py310-airflow28: -c https://raw.githubusercontent.com/apache/airflow/constraints-2.8.1/constraints-3.10.txt
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
namespace com.linkedin.test

import com.linkedin.common.Urn
import com.linkedin.common.AuditStamp

/**
* Information about a Test Result
Expand All @@ -24,4 +25,15 @@ record TestResult {
*/
FAILURE
}
}

/**
* The md5 of the test definition that was used to compute this result.
* See TestInfo.testDefinition.md5 for more information.
*/
testDefinitionMd5: optional string

/**
* The audit stamp of when the result was computed, including the actor who computed it.
*/
lastComputed: optional AuditStamp
}
1 change: 1 addition & 0 deletions smoke-test/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -18,3 +18,4 @@ types-requests>=2.28.11.6,<=2.31.0.3
types-PyYAML
# https://github.com/docker/docker-py/issues/3256
requests<=2.31.0
deepdiff
33 changes: 33 additions & 0 deletions smoke-test/tests/openapi/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@

# Goal

This test is configuration driven by json files which contain request/response sequences intended to
detect unexpected regressions between releases.

Files can be executed in parallel but each request within the file is sequential.

## Adding a test

Create a file for a given OpenAPI version which contains a list of request/response pairs in the following
format.

The request json object is translated into the python request arguments and the response object is the
expected status code and optional body.

```json
[
{
"request": {
"urn": "",
"description": "",
"method": "",
"json": {}
},
"response": {
"status_codes": [200],
"exclude_regex_paths": [],
"json": {}
}
}
]
```
Empty file.
Loading

0 comments on commit 059e09d

Please sign in to comment.