File size: 10,550 Bytes
9c6594c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
"""Events that trigger W&B Automations."""

# ruff: noqa: UP007  # Avoid using `X | Y` for union fields, as this can cause issues with pydantic < 2.6

from __future__ import annotations

from typing import TYPE_CHECKING, Any, Literal, Optional, Union

from pydantic import Field
from typing_extensions import Annotated, Self, get_args

from wandb._pydantic import (
    GQLBase,
    SerializedToJson,
    ensure_json,
    field_validator,
    model_validator,
    pydantic_isinstance,
)

from ._filters import And, MongoLikeFilter, Or
from ._filters.expressions import FilterableField
from ._filters.run_metrics import MetricChangeFilter, MetricThresholdFilter, MetricVal
from ._generated import FilterEventFields
from ._validators import LenientStrEnum, simplify_op
from .actions import InputAction, InputActionTypes, SavedActionTypes
from .scopes import ArtifactCollectionScope, AutomationScope, ProjectScope

if TYPE_CHECKING:
    from .automations import NewAutomation


# NOTE: Re-defined publicly with a more readable name for easier access
class EventType(LenientStrEnum):
    """The type of event that triggers an automation."""

    # ---------------------------------------------------------------------------
    # Events triggered by GraphQL mutations
    UPDATE_ARTIFACT_ALIAS = "UPDATE_ARTIFACT_ALIAS"  # NOTE: Avoid in new automations

    CREATE_ARTIFACT = "CREATE_ARTIFACT"
    ADD_ARTIFACT_ALIAS = "ADD_ARTIFACT_ALIAS"
    LINK_ARTIFACT = "LINK_MODEL"
    # Note: "LINK_MODEL" is the (legacy) value expected by the backend, but we
    # name it "LINK_ARTIFACT" here in the public API for clarity and consistency.

    # ---------------------------------------------------------------------------
    # Events triggered by Run conditions
    RUN_METRIC_THRESHOLD = "RUN_METRIC"
    RUN_METRIC_CHANGE = "RUN_METRIC_CHANGE"


# ------------------------------------------------------------------------------
# Saved types: for parsing response data from saved automations


# Note: In GQL responses containing saved automation data, the filter is wrapped in an extra `filter` key.
class _WrappedSavedEventFilter(GQLBase):  # from: TriggeringFilterEvent
    filter: SerializedToJson[MongoLikeFilter] = And()


class _WrappedMetricFilter(GQLBase):  # from: RunMetricFilter
    threshold_filter: Optional[MetricThresholdFilter] = None
    change_filter: Optional[MetricChangeFilter] = None

    @model_validator(mode="before")
    @classmethod
    def _wrap_metric_filter(cls, v: Any) -> Any:
        if pydantic_isinstance(v, MetricThresholdFilter):
            return cls(threshold_filter=v)
        if pydantic_isinstance(v, MetricChangeFilter):
            return cls(change_filter=v)
        return v

    @model_validator(mode="after")
    def _ensure_exactly_one_set(self) -> Self:
        set_fields = [name for name, val in self if (val is not None)]

        if not set_fields:
            all_names = ", ".join(map(repr, type(self).model_fields))
            raise ValueError(f"Expected one of: {all_names}")

        if len(set_fields) > 1:
            set_names = ", ".join(map(repr, set_fields))
            raise ValueError(f"Expected exactly one metric filter, got: {set_names}")

        return self

    @property
    def event_type(self) -> EventType:
        if self.threshold_filter is not None:
            return EventType.RUN_METRIC_THRESHOLD
        if self.change_filter is not None:
            return EventType.RUN_METRIC_CHANGE
        raise RuntimeError("Expected one of: `threshold_filter` or `change_filter`")


class RunMetricFilter(GQLBase):  # from: TriggeringRunMetricEvent
    run: Annotated[SerializedToJson[MongoLikeFilter], Field(alias="run_filter")] = And()
    metric: Annotated[_WrappedMetricFilter, Field(alias="run_metric_filter")]

    # ------------------------------------------------------------------------------
    legacy_metric_filter: Annotated[
        Optional[SerializedToJson[MetricThresholdFilter]],
        Field(alias="metric_filter", deprecated=True),
    ] = None
    """Deprecated legacy field that was previously used to define run metric threshold events.

    For new automations, use the `metric` field (`run_metric_filter` JSON alias) instead.
    """

    @model_validator(mode="before")
    @classmethod
    def _wrap_metric_filter(cls, v: Any) -> Any:
        if pydantic_isinstance(v, (MetricThresholdFilter, MetricChangeFilter)):
            # If only an (unnested) metric filter is given, nest it under the
            # `metric` field, delegating to inner validator(s) for further
            # wrapping/nesting, if needed.
            # This is necessary to conform to the expected backend schema.
            return cls(metric=v)
        return v

    @field_validator("run", mode="after")
    def _wrap_run_filter(cls, v: MongoLikeFilter) -> Any:
        v_new = simplify_op(v)
        return v_new if pydantic_isinstance(v_new, And) else And(and_=[v_new])


class SavedEvent(FilterEventFields):  # from: FilterEventTriggeringCondition
    """A triggering event from a saved automation."""

    event_type: Annotated[EventType, Field(frozen=True)]  # type: ignore[assignment]

    # We override the type of the `filter` field in order to enforce the expected
    # structure for the JSON data when validating and serializing.
    filter: SerializedToJson[Union[_WrappedSavedEventFilter, RunMetricFilter]]
    """The condition(s) under which this event triggers an automation."""


# ------------------------------------------------------------------------------
# Input types: for creating or updating automations


# Note: The GQL input for "eventFilter" does NOT wrap the filter in an extra `filter` key, unlike the
# eventFilter returned in responses for saved automations.
class _BaseEventInput(GQLBase):
    event_type: EventType

    scope: AutomationScope
    """The scope of the event."""

    filter: SerializedToJson[Any]

    def then(self, action: InputAction) -> NewAutomation:
        """Define a new Automation in which this event triggers the given action."""
        from .automations import NewAutomation

        if isinstance(action, (InputActionTypes, SavedActionTypes)):
            return NewAutomation(event=self, action=action)

        raise TypeError(f"Expected a valid action, got: {type(action).__qualname__!r}")

    def __rshift__(self, other: InputAction) -> NewAutomation:
        """Implements `event >> action` to define an Automation with this event and action."""
        return self.then(other)


# ------------------------------------------------------------------------------
# Events that trigger on specific mutations in the backend
class _BaseMutationEventInput(_BaseEventInput):
    filter: SerializedToJson[MongoLikeFilter] = And()
    """Additional condition(s), if any, that must be met for this event to trigger an automation."""

    @field_validator("filter", mode="after")
    def _wrap_filter(cls, v: Any) -> Any:
        """Ensure the given filter is wrapped like: `{"$or": [{"$and": [<original_filter>]}]}`.

        This is awkward but necessary, because the frontend expects this format.
        """
        v_new = simplify_op(v)
        v_new = v_new if pydantic_isinstance(v_new, And) else And(and_=[v_new])
        return Or(or_=[v_new])


class OnLinkArtifact(_BaseMutationEventInput):
    """A new artifact is linked to a collection."""

    event_type: Literal[EventType.LINK_ARTIFACT] = EventType.LINK_ARTIFACT


class OnAddArtifactAlias(_BaseMutationEventInput):
    """A new alias is assigned to an artifact."""

    event_type: Literal[EventType.ADD_ARTIFACT_ALIAS] = EventType.ADD_ARTIFACT_ALIAS


class OnCreateArtifact(_BaseMutationEventInput):
    """A new artifact is created."""

    event_type: Literal[EventType.CREATE_ARTIFACT] = EventType.CREATE_ARTIFACT

    scope: ArtifactCollectionScope
    """The scope of the event: only artifact collections are valid scopes for this event."""


# ------------------------------------------------------------------------------
# Events that trigger on run conditions
class _BaseRunEventInput(_BaseEventInput):
    scope: ProjectScope
    """The scope of the event: only projects are valid scopes for this event."""


class OnRunMetric(_BaseRunEventInput):
    """A run metric satisfies a user-defined condition."""

    event_type: Literal[EventType.RUN_METRIC_THRESHOLD, EventType.RUN_METRIC_CHANGE]

    filter: SerializedToJson[RunMetricFilter]
    """Run and/or metric condition(s) that must be satisfied for this event to trigger an automation."""

    @model_validator(mode="before")
    @classmethod
    def _infer_event_type(cls, data: Any) -> Any:
        """Infer the event type at validation time from the inner filter.

        This allows this class to accommodate both "threshold" and "change" metric
        filter types, which are can only be determined after parsing and validating
        the inner JSON data.
        """
        if isinstance(data, dict) and (raw_filter := data.get("filter")):
            # At this point, `raw_filter` may or may not be JSON-serialized
            parsed_filter = RunMetricFilter.model_validate_json(ensure_json(raw_filter))
            return {**data, "event_type": parsed_filter.metric.event_type}

        return data


# for type annotations
InputEvent = Annotated[
    Union[
        OnLinkArtifact,
        OnAddArtifactAlias,
        OnCreateArtifact,
        OnRunMetric,
    ],
    Field(discriminator="event_type"),
]
# for runtime type checks
InputEventTypes: tuple[type, ...] = get_args(InputEvent.__origin__)  # type: ignore[attr-defined]


# ----------------------------------------------------------------------------


class RunEvent:
    name = FilterableField(server_name="display_name")
    # `Run.name` is actually filtered on `Run.display_name` in the backend.
    # We can't reasonably expect users to know this a priori, so
    # automatically fix it here.

    @staticmethod
    def metric(name: str) -> MetricVal:
        """Define a metric filter condition."""
        return MetricVal(name=name)


class ArtifactEvent:
    alias = FilterableField()


MetricThresholdFilter.model_rebuild()
RunMetricFilter.model_rebuild()
_WrappedSavedEventFilter.model_rebuild()

OnLinkArtifact.model_rebuild()
OnAddArtifactAlias.model_rebuild()
OnCreateArtifact.model_rebuild()
OnRunMetric.model_rebuild()

__all__ = [
    "EventType",
    *(cls.__name__ for cls in InputEventTypes),
    "RunEvent",
    "ArtifactEvent",
    "MetricThresholdFilter",
    "MetricChangeFilter",
]