Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Alternative Modified Backport of #4619 #4660

Merged
merged 3 commits into from
Feb 2, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 12 additions & 6 deletions core/dbt/adapters/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

from dbt.adapters.reference_keys import _make_key, _ReferenceKey
import dbt.exceptions
from dbt.events.functions import fire_event
from dbt.events.functions import fire_event, fire_event_if
from dbt.events.types import (
AddLink,
AddRelation,
Expand All @@ -20,6 +20,7 @@
UncachedRelation,
UpdateReference
)
import dbt.flags as flags
from dbt.utils import lowercase


Expand Down Expand Up @@ -323,11 +324,11 @@ def add(self, relation):
"""
cached = _CachedRelation(relation)
fire_event(AddRelation(relation=_make_key(cached)))
fire_event(DumpBeforeAddGraph(dump=self.dump_graph()))
fire_event_if(flags.LOG_CACHE_EVENTS, lambda: DumpBeforeAddGraph(dump=self.dump_graph()))

with self.lock:
self._setdefault(cached)
fire_event(DumpAfterAddGraph(dump=self.dump_graph()))
fire_event_if(flags.LOG_CACHE_EVENTS, lambda: DumpAfterAddGraph(dump=self.dump_graph()))

def _remove_refs(self, keys):
"""Removes all references to all entries in keys. This does not
Expand Down Expand Up @@ -440,16 +441,21 @@ def rename(self, old, new):
old_key = _make_key(old)
new_key = _make_key(new)
fire_event(RenameSchema(old_key=old_key, new_key=new_key))

fire_event(DumpBeforeRenameSchema(dump=self.dump_graph()))
fire_event_if(
flags.LOG_CACHE_EVENTS,
lambda: DumpBeforeRenameSchema(dump=self.dump_graph())
)

with self.lock:
if self._check_rename_constraints(old_key, new_key):
self._rename_relation(old_key, _CachedRelation(new))
else:
self._setdefault(_CachedRelation(new))

fire_event(DumpAfterRenameSchema(dump=self.dump_graph()))
fire_event_if(
flags.LOG_CACHE_EVENTS,
lambda: DumpAfterRenameSchema(dump=self.dump_graph())
)

def get_relations(
self, database: Optional[str], schema: Optional[str]
Expand Down
7 changes: 7 additions & 0 deletions core/dbt/events/functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -306,6 +306,13 @@ def send_exc_to_logger(
)


# an alternative to fire_event which only creates and logs the event value
# if the condition is met. Does nothing otherwise.
def fire_event_if(conditional: bool, lazy_e: Callable[[], Event]) -> None:
if conditional:
fire_event(lazy_e())


# top-level method for accessing the new eventing system
# this is where all the side effects happen branched by event type
# (i.e. - mutating the event history, printing to stdout, logging
Expand Down
8 changes: 4 additions & 4 deletions core/dbt/events/types.py
Original file line number Diff line number Diff line change
Expand Up @@ -699,7 +699,7 @@ def message(self) -> str:

@dataclass
class DumpBeforeAddGraph(DebugLevel, Cli, File, Cache):
# large value. delay not necessary since every debug level message is logged anyway.
# large value. delay creation with fire_event_if.
dump: Dict[str, List[str]]
code: str = "E031"

Expand All @@ -709,7 +709,7 @@ def message(self) -> str:

@dataclass
class DumpAfterAddGraph(DebugLevel, Cli, File, Cache):
# large value. delay not necessary since every debug level message is logged anyway.
# large value. delay creation with fire_event_if.
dump: Dict[str, List[str]]
code: str = "E032"

Expand All @@ -719,7 +719,7 @@ def message(self) -> str:

@dataclass
class DumpBeforeRenameSchema(DebugLevel, Cli, File, Cache):
# large value. delay not necessary since every debug level message is logged anyway.
# large value. delay creation with fire_event_if.
dump: Dict[str, List[str]]
code: str = "E033"

Expand All @@ -729,7 +729,7 @@ def message(self) -> str:

@dataclass
class DumpAfterRenameSchema(DebugLevel, Cli, File, Cache):
# large value. delay not necessary since every debug level message is logged anyway.
# large value. delay creation with fire_event_if.
dump: Dict[str, List[str]]
code: str = "E034"

Expand Down
31 changes: 31 additions & 0 deletions test/unit/test_events.py
Original file line number Diff line number Diff line change
Expand Up @@ -428,3 +428,34 @@ def test_all_serializable(self):
except TypeError as e:
raise Exception(f"{event} is not serializable to json. Originating exception: {e}")


class TestConditionallyFiringEvents(TestCase):

# tests that fire_event_if only creates the event if the condition is met
def test_fire_event_if(self):

# this class exists to count how many times the stub is called
@dataclass
class DumpStub():
counter = 0

def dump_graph(self):
self.counter = self.counter + 1
return dict()

# initialize to start the counter at 0
dump_stub = DumpStub()

# becaue the condition is false, the event should never be created,
# and the stub function should never be called
event_funcs.fire_event_if(False, lambda: DumpAfterAddGraph(dump=dump_stub.dump_graph()))

# assert the stub function was never called
self.assertEqual(0, dump_stub.counter)

# becuase the condition is true, the event should be created and
# the stub function will be called
event_funcs.fire_event_if(True, lambda: DumpAfterAddGraph(dump=dump_stub.dump_graph()))

# assert that the stub function was called
self.assertEqual(1, dump_stub.counter)