Skip to content

Commit

Permalink
Internal refactors and naming scheme changes
Browse files Browse the repository at this point in the history
* Plus fix CacheImpl.update_me not copying the stored member entry before returning it
  • Loading branch information
FasterSpeeding committed Jun 1, 2021
1 parent 5490bea commit 0481fb8
Show file tree
Hide file tree
Showing 4 changed files with 38 additions and 31 deletions.
2 changes: 1 addition & 1 deletion hikari/events/base_events.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ class Event(abc.ABC):

def __init_subclass__(cls) -> None:
super().__init_subclass__()
# hasattr doesn't work with protected variables in this case so we use a try except.
# hasattr doesn't work with private variables in this case so we use a try except.
# We need to set Event's __dispatches when the first subclass is made as Event itself cannot
# be included in a tuple literal on itself due to not existing yet.
try:
Expand Down
7 changes: 5 additions & 2 deletions hikari/impl/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -726,15 +726,18 @@ def get_me(self) -> typing.Optional[users.OwnUser]:

def set_me(self, user: users.OwnUser, /) -> None:
if self._is_cache_enabled_for(config.CacheComponents.ME):
_LOGGER.debug("setting my user to %s", user)
self._me = copy.copy(user)

def update_me(
self, user: users.OwnUser, /
) -> typing.Tuple[typing.Optional[users.OwnUser], typing.Optional[users.OwnUser]]:
_LOGGER.debug("setting my user to %s", user)
if not self._is_cache_enabled_for(config.CacheComponents.ME):
return None, None

cached_user = self.get_me()
self.set_me(user)
return cached_user, self._me
return cached_user, self.get_me()

def _build_member(
self,
Expand Down
3 changes: 1 addition & 2 deletions hikari/impl/event_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -216,8 +216,7 @@ async def on_guild_create(self, shard: gateway_shard.GatewayShard, payload: data
# When intents are enabled discord will only send other member objects on the guild create
# payload if presence intents are also declared, so if this isn't the case then we also want
# to chunk small guilds.
guild_is_large = payload.get("large")
if recv_chunks and members_declared and (guild_is_large or not presences_declared):
if recv_chunks and members_declared and (payload.get("large") or not presences_declared):
# We create a task here instead of awaiting the result to avoid any rate-limits from delaying dispatch.
nonce = f"{shard.id}.{_fixed_size_nonce()}"

Expand Down
57 changes: 31 additions & 26 deletions hikari/impl/event_manager_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,8 +106,13 @@ def decorator(method: UnboundMethodT[EventManagerBaseT], /) -> UnboundMethodT[Ev
@attr.frozen()
class _Consumer:
callback: ConsumerT
cache: undefined.UndefinedOr[config.CacheComponents]
"""The callback function for this consumer."""

cache_components: undefined.UndefinedOr[config.CacheComponents]
"""Bitfield of the cache components this consumer makes modifying calls to, if set."""

event_types: undefined.UndefinedOr[typing.Sequence[typing.Type[base_events.Event]]]
"""A sequence of the types of events this consumer dispatches to, if set."""


class EventManagerBase(event_manager.EventManager):
Expand All @@ -117,57 +122,57 @@ class EventManagerBase(event_manager.EventManager):
is the raw event name being dispatched in lower-case.
"""

__slots__: typing.Sequence[str] = ("_app", "_dispatches_for_cache", "_listeners", "_consumers", "_waiters")
__slots__: typing.Sequence[str] = ("_app", "_consumers", "_enabled_consumers_cache", "_listeners", "_waiters")

def __init__(self, app: traits.BotAware) -> None:
self._app = app
self._dispatches_for_cache: typing.Dict[_Consumer, bool] = {}
self._consumers: typing.Dict[str, _Consumer] = {}
self._enabled_consumers_cache: typing.Dict[_Consumer, bool] = {}
self._listeners: ListenerMapT[base_events.Event] = {}
self._waiters: WaiterMapT[base_events.Event] = {}

for name, member in inspect.getmembers(self):
if name.startswith("on_"):
member = typing.cast("MethodT", member)
member = typing.cast(MethodT, member)
cache_resource = getattr(member, _CACHE_RESOURCE_ATTRIBUTE, undefined.UNDEFINED)
event_types = getattr(member, _EVENT_TYPES_ATTRIBUTE, undefined.UNDEFINED)
cache_resource = typing.cast("undefined.UndefinedOr[config.CacheComponents]", cache_resource)
event_types = typing.cast(
"undefined.UndefinedOr[typing.Sequence[typing.Type[base_events.Event]]]", event_types
)
cache_resource: undefined.UndefinedOr[config.CacheComponents] = cache_resource
event_types: undefined.UndefinedOr[typing.Sequence[typing.Type[base_events.Event]]] = event_types
self._consumers[name[3:]] = _Consumer(member, cache_resource, event_types)

def _clear_enabled_cache(self) -> None:
self._enabled_consumers_cache = {}

def _enabled_for_event(self, event_type: typing.Type[base_events.Event], /) -> bool:
for cls in event_type.dispatches():
if cls in self._listeners or cls in self._waiters:
return True

return False

# This returns int rather than bool to avoid unnecessary bool casts
def _enabled_for_consumer(self, consumer: _Consumer) -> int:
# If undefined then we can only safely assume that this does link to registered listeners.
# This returns int rather than bool to avoid an unnecessary bool cast
def _enabled_for_consumer(self, consumer: _Consumer, /) -> int:
# If undefined then we can only assume that this may link to registered listeners.
if consumer.event_types is undefined.UNDEFINED:
return True

if (cached_value := self._dispatches_for_cache.get(consumer, ...)) is True:
if (cached_value := self._enabled_consumers_cache.get(consumer)) is True:
return True

if cached_value is ...:
if cached_value is None:
for event_type in consumer.event_types:
if event_type in self._listeners or event_type in self._waiters:
self._dispatches_for_cache[consumer] = True
self._enabled_consumers_cache[consumer] = True
return True

else:
self._dispatches_for_cache[consumer] = False
self._enabled_consumers_cache[consumer] = False

# If consumer.cache is UNDEFINED then we have to fall back to assuming that the consumer might set state.
# If consumer.cache is NONE then it doesn't make set state.
# If cache_components is UNDEFINED then we have to fall back to assuming that the consumer might set state.
# If cache_components is NONE then it doesn't make set state calls.
return (
consumer.cache is undefined.UNDEFINED
or consumer.cache != config.CacheComponents.NONE
and consumer.cache & self._app.cache.settings.components
consumer.cache_components is undefined.UNDEFINED
or consumer.cache_components != config.CacheComponents.NONE
and consumer.cache_components & self._app.cache.settings.components
)

def consume_raw_event(
Expand Down Expand Up @@ -210,7 +215,7 @@ def subscribe(
self._listeners[event_type].append(callback) # type: ignore[arg-type]
except KeyError:
self._listeners[event_type] = [callback] # type: ignore[list-item]
self._dispatches_for_cache.clear()
self._clear_enabled_cache()

return callback

Expand Down Expand Up @@ -269,7 +274,7 @@ def unsubscribe(
listeners.remove(callback) # type: ignore[arg-type]
if not listeners:
del self._listeners[event_type]
self._dispatches_for_cache.clear()
self._clear_enabled_cache()

def listen(
self,
Expand Down Expand Up @@ -336,7 +341,7 @@ def dispatch(self, event: event_manager.EventT_inv) -> asyncio.Future[typing.Any
clear_cache = True

if clear_cache:
self._dispatches_for_cache.clear()
self._clear_enabled_cache()

return asyncio.gather(*tasks) if tasks else aio.completed_future()

Expand Down Expand Up @@ -367,7 +372,7 @@ async def wait_for(
try:
waiter_set = self._waiters[event_type]
except KeyError:
self._dispatches_for_cache.clear()
self._clear_enabled_cache()
waiter_set = set()
self._waiters[event_type] = waiter_set

Expand All @@ -380,7 +385,7 @@ async def wait_for(
waiter_set.remove(pair) # type: ignore[arg-type]
if not waiter_set:
del self._waiters[event_type]
self._dispatches_for_cache.clear()
self._clear_enabled_cache()

raise

Expand Down

0 comments on commit 0481fb8

Please sign in to comment.