Skip to content
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 10 additions & 1 deletion oocana/oocana/context.py
Original file line number Diff line number Diff line change
Expand Up @@ -347,6 +347,15 @@ def __store_ref(self, handle: str):

def __is_basic_type(self, value: Any) -> bool:
return isinstance(value, (int, float, str, bool))

def __is_dataframe_like(self, value: Any) -> bool:
"""Check if value is DataFrame-like by duck-typing.
Supports pandas DataFrame and subclasses (GeoDataFrame, etc.)
"""
return (
hasattr(value, 'to_pickle') and callable(getattr(value, 'to_pickle', None)) and
hasattr(value, 'copy') and callable(getattr(value, 'copy', None))
)
Comment on lines +351 to +358
Copy link

Copilot AI Jan 31, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

__is_dataframe_like() returns True based only on presence/callability of to_pickle/copy, but the serialization path later calls to_pickle(..., compression=compression). Objects with a to_pickle() method that doesn’t accept the compression kwarg (or whose copy() returns an object without to_pickle) will pass this check and then fail at runtime. Consider either tightening the duck-type check to the actual required API (including kwarg support), or making the serialization call resilient (e.g., fallback call without compression / catch TypeError and skip caching).

Copilot uses AI. Check for mistakes.

def __wrap_output_value(self, handle: str, value: Any):
"""
Expand Down Expand Up @@ -375,7 +384,7 @@ def __wrap_output_value(self, handle: str, value: Any):

serialize_path = None
# only cache root flow
if len(self.__block_info.stacks) < 2 and output_def.need_serialize_var_for_cache() and value.__class__.__name__ == 'DataFrame' and callable(getattr(value, 'to_pickle', None)):
if len(self.__block_info.stacks) < 2 and output_def.need_serialize_var_for_cache() and self.__is_dataframe_like(value):
from .serialization import compression_suffix, compression_options
suffix = compression_suffix(context=self)
compression = compression_options(context=self)
Expand Down
Loading