Thanks to visit codestin.com
Credit goes to github.com

Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
Show all changes
20 commits
Select commit Hold shift + click to select a range
2d623b8
enhanced stack trace:
twin-drill May 31, 2024
4acf6ab
Refactored `@dataclass`es to `temporalio.common`.
twin-drill May 31, 2024
9579582
Added integration test for external file, via added file (externalsta…
twin-drill Jun 3, 2024
3cfcce0
Reformatted with `poe format`.
twin-drill Jun 3, 2024
c72f9d0
Fix typing issues (_FileLocation to _StackTrace) and adjust integ tes…
twin-drill Jun 3, 2024
d9a930a
Fix typing errors in new integ test (Forgot an External-)
twin-drill Jun 3, 2024
16cad2c
refactor dataclasses into `_workflow_instance.py`; use `typing` packa…
twin-drill Jun 4, 2024
21acbda
Merge branch 'main' into main
twin-drill Jun 4, 2024
80bd0e5
Merge branch 'main' of https://github.com/twin-drill/temporal-sdk-python
twin-drill Jun 4, 2024
3e5037b
multi-file stack traces not working atm
twin-drill Jun 7, 2024
ae4f4a9
delete extra file
twin-drill Jun 10, 2024
428fb7f
Merge branch 'main' into main
twin-drill Jun 12, 2024
0c5f175
remove in-file classes
twin-drill Jun 12, 2024
0380f30
update submodule
twin-drill Jun 12, 2024
37b8b33
refactor with proto classes
twin-drill Jun 14, 2024
c8902c7
changed `test_workflow_enhanced_stack_trace` and `test_workflow_exter…
twin-drill Jun 14, 2024
a2c0803
Merge branch 'main' into main
twin-drill Jun 17, 2024
393ca37
Make `FileSlice` a copy of the entire source file, with `line_offset`…
twin-drill Jun 17, 2024
91584e3
add `Cargo.lock`
twin-drill Jun 17, 2024
2440ea8
Skip `test_replayer_workflow_complete` for Python versions < 3.12 -- …
twin-drill Jun 18, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Make FileSlice a copy of the entire source file, with line_offset
… set to 0. Update tests to account for this, made them more specific while I was at it.
  • Loading branch information
twin-drill committed Jun 17, 2024
commit 393ca3755e4825403c7fcd7c84a1dded0436ea38
75 changes: 38 additions & 37 deletions temporalio/worker/_workflow_instance.py
Original file line number Diff line number Diff line change
Expand Up @@ -1812,46 +1812,47 @@ def _enhanced_stack_trace(self) -> temporalio.api.sdk.v1.EnhancedStackTrace:
name="sdk-python", version=__version__
)

sources = dict()
stacks = []

for task in list(self._tasks):
locations = []
for frame in task.get_stack():
filename = frame.f_code.co_filename
line_number = frame.f_lineno
func_name = frame.f_code.co_name

try:
source = inspect.getsourcelines(frame)
code = "".join(source[0])
line_number = int(source[1])
except OSError as ose:
code = f"Cannot access code.\n---\n{ose.strerror}"
# TODO possibly include sentinel/property for success of src scrape? work out with ui
except Exception:
code = f"Generic Error.\n\n{traceback.format_exc()}"

file_slice = temporalio.api.sdk.v1.StackTraceFileSlice(
line_offset=line_number, content=code
)
file_location = temporalio.api.sdk.v1.StackTraceFileLocation(
file_path=filename,
line=line_number,
column=-1,
function_name=func_name,
internal_code=False,
)
# this is to use `open`
with temporalio.workflow.unsafe.sandbox_unrestricted():
sources = dict()
stacks = []

for task in list(self._tasks):
locations = []
for frame in task.get_stack():
filename = frame.f_code.co_filename
line_number = frame.f_lineno
func_name = frame.f_code.co_name

try:
with open(filename, "r") as f:
code = f.read()
except OSError as ose:
code = f"Cannot access code.\n---\n{ose.strerror}"
# TODO possibly include sentinel/property for success of src scrape? work out with ui
except Exception:
code = f"Generic Error.\n\n{traceback.format_exc()}"

file_slice = temporalio.api.sdk.v1.StackTraceFileSlice(
line_offset=0, content=code
)
file_location = temporalio.api.sdk.v1.StackTraceFileLocation(
file_path=filename,
line=line_number,
column=-1,
function_name=func_name,
internal_code=False,
)

sources[f"{filename} {line_number}"] = file_slice
locations.append(file_location)
sources[filename] = file_slice
locations.append(file_location)

stacks.append(temporalio.api.sdk.v1.StackTrace(locations=locations))
stacks.append(temporalio.api.sdk.v1.StackTrace(locations=locations))

est = temporalio.api.sdk.v1.EnhancedStackTrace(
sdk=sdk, sources=sources, stacks=stacks
)
return est
est = temporalio.api.sdk.v1.EnhancedStackTrace(
sdk=sdk, sources=sources, stacks=stacks
)
return est

#### asyncio.AbstractEventLoop function impls ####
# These are in the order defined in CPython's impl of the base class. Many
Expand Down
63 changes: 17 additions & 46 deletions tests/worker/test_workflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -2131,9 +2131,16 @@ async def status() -> str:
]

# first line of never_completing_coroutine
assert 'self._status = "waiting"' in str(
[fileslice.content for fileslice in trace.sources.values()]
)
cur_source = None
for source in trace.sources.keys():
if source.endswith("test_workflow.py"):
cur_source = source

# make sure the source exists
assert cur_source is not None

# make sure the line is present in the source
assert 'self._status = "waiting"' in trace.sources[cur_source].content
assert trace.sdk.version == __version__


Expand All @@ -2158,61 +2165,25 @@ async def status() -> str:

# test that a coroutine only has the source as its stack

assert type(trace) == EnhancedStackTrace

assert "never_completing_coroutine" in [
loc.function_name for stack in trace.stacks for loc in stack.locations
]

fn = None
for source in trace.sources.keys():
if source.endswith("external_coroutine.py 10"):
if source.endswith("external_coroutine.py"):
fn = source

assert fn != None
assert 'status[0] = "waiting" # external coroutine test' in str(
[fileslice.content for fileslice in trace.sources.values()]
assert fn is not None
assert (
'status[0] = "waiting" # external coroutine test'
in trace.sources[fn].content
)
assert trace.sdk.version == __version__


async def test_workflow_external_multifile_enhanced_stack_trace(client: Client):
async with new_worker(
client, MultiFileStackTraceWorkflow, activities=[]
) as multifile_worker:
mf_handle = await client.start_workflow(
MultiFileStackTraceWorkflow.run_multifile_workflow,
id=f"workflow-{uuid.uuid4()}",
task_queue=multifile_worker.task_queue,
)

async def mf_status() -> str:
return await mf_handle.query(MultiFileStackTraceWorkflow.status)

await assert_eq_eventually("waiting", mf_status)

mf_trace = await mf_handle.query("__enhanced_stack_trace")

assert "wait_on_timer" in [
loc.function_name for stack in mf_trace.stacks for loc in stack.locations
]

filenames = [None, None]
for source in mf_trace.sources.keys():
if source.endswith("external_coroutine.py 15"):
filenames[1] = source
if source.endswith("external_stack_trace.py 49"):
filenames[0] = source

assert filenames[0] is not None and filenames[1] is not None
assert 'status[0] = "waiting" # multifile test' in str(
[fileslice.content for fileslice in mf_trace.sources.values()]
)

assert "await wait_on_timer(self._status)" in str(
[fileslice.content for fileslice in mf_trace.sources.values()]
)
assert mf_trace.sdk.version == __version__


@dataclass
class MyDataClass:
field1: str
Expand Down