fromhera.workflowsimport(Steps,Workflow,script,)@script(image="ghcr.io/dask/dask:latest")defdask_computation(namespace:str="default",n_workers:int=1)->None:importsubprocess# this is required for otherwise the dask distributed and kubernetes clients packages are not included by default# ideally, you'd have a package in your organization that takes care of the following cluster details :)subprocess.run(["pip","install","dask-kubernetes","dask[distributed]"],stdout=subprocess.PIPE,universal_newlines=True)importdask.arrayasdafromdask.distributedimportClientfromdask_kubernetes.operatorimportKubeClustercluster=KubeCluster(image="ghcr.io/dask/dask:latest",resources={"requests":{"memory":"2G","cpu":"1"},"limits":{"memory":"4G","cpu":"1"}},namespace=namespace,n_workers=n_workers,)# once the `Client` is initialized all dask calls are actually implicitly performed against itclient=Client(cluster)array=da.ones((1000,1000,1000))print("Array mean = {array_mean}, expected = 1.0".format(array_mean=array.mean().compute()))client.close()withWorkflow(generate_name="dask-",entrypoint="s")asw:withSteps(name="s"):dask_computation()