Skip to content

[Bug]: LlamaDeploy Prasing LlamaIndex Context Handler #17011

Open
@hristogg

Description

Bug Description

Deploying the below workflow, results in the following error:

ERROR:llama_deploy.services.workflow - Encountered error in task 0546b1fe-2041-4040-8bba-36bebc083379! keys must be str, int, float, bool or None, not ModelMetaclass

due to:
{<class 'main.OutputDoc'>: [], <class 'main.OutputQna'>: []}

import asyncio
from llama_index.core.prompts.prompt_type import PromptType
from llama_index.core.workflow import (
    Context,
    Event,
    StartEvent,
    StopEvent,
    Workflow,
    step,
)

class StepA(Event):
    data: str

class StepB(Event):
    data: str

class StepA1(Event):
    data: str

class StepB1(Event):
    data: str

class RAGWorkflow(Workflow):

    @step
    async def StartingStep(self, ctx: Context, ev: StartEvent) -> StepA | StepB:
        print('Going into starting step')
        ctx.send_event(StepA(data='Start -> StepA'))
        ctx.send_event(StepB(data='Start -> StepB'))
        return None
    
    @step
    async def stepa(self, ctx: Context, ev: StepA) -> StepA1:
        data = ev.data
        print(f'Reading data in stepA: {data}')
        data = data + ' + StepA append'
        print(f'Data after something from stepA {data}')
        ctx.send_event(StepA1(data=data))
        return None
    
    @step
    async def stepb(self, ctx: Context, ev: StepB) -> StepB1:
        data = ev.data
        print(f'Reading data in stepB: {data}')
        data = data + '+ stepB Append'
        print(f'Data after something from stepB: {data}')
        ctx.send_event(StepB1(data=data))
        return None
    
    @step
    async def combine(self, ctx: Context, ev: StepA1 | StepB1 ) -> StopEvent:
        print('Going into step combine')
        all_events = ctx.collect_events(ev, [StepA1, StepB1])
        if all_events is None:
            return None
        event_a, event_b = all_events
        #print(f'From Combine event_a is {event_a}')
        #print(f'From Combine event_b is {event_b}')
        combined = event_a.data + event_b.data
        print(f'Combined is {combined}')
        return StopEvent(result=combined)

#async def main():
#    w = RAGWorkflow(timeout=60, verbose=True)
#    result = await w.run()
#    print(result)
#    return(result)
async def main():
    from llama_deploy import ControlPlaneConfig, WorkflowServiceConfig, deploy_workflow

    print('start deployment')
    await deploy_workflow(
        workflow=RAGWorkflow(timeout=100,verbose=True),
        workflow_config=WorkflowServiceConfig(
            host="127.0.0.1",
            port=8002,
            service_name="my_workflow",  # This will make it accessible to all interfaces on the host
        ),
        control_plane_config=ControlPlaneConfig(),
    )
    print("Created workflow successfully")


if __name__ == "__main__":
    asyncio.run(main())

Version

0.11.23

Steps to Reproduce

First deploy core:

from llama_deploy import ControlPlaneConfig, SimpleMessageQueueConfig, deploy_core


async def main() -> None:
    """Launches the core services required for the Llama workflow application."""
    await deploy_core(
        control_plane_config=ControlPlaneConfig(),
        message_queue_config=SimpleMessageQueueConfig(host="127.0.0.1", port=8009),
    )
if __name__ == "__main__":
    import asyncio

    asyncio.run(main())

and then deploy the above mentioned workflow.

Relevant Logs/Tracbacks

ERROR:llama_deploy.services.workflow - Encountered error in task 34cc3f7e-6242-4cd5-a3eb-0f650c7813f9! keys must be str, int, float, bool or None, not ModelMetaclass

Metadata

Assignees

No one assigned

    Labels

    bugSomething isn't workingtriageIssue needs to be triaged/prioritized

    Type

    No type

    Projects

    No projects

    Milestone

    No milestone

    Relationships

    None yet

    Development

    No branches or pull requests

    Issue actions