Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

refactor[next]: workflowify step3 #1516

Merged
merged 32 commits into from
Apr 29, 2024
Merged
Show file tree
Hide file tree
Changes from 15 commits
Commits
Show all changes
32 commits
Select commit Hold shift + click to select a range
b0f7e3a
workflowify past linting and args injection
DropD Mar 26, 2024
445b35d
workflowify func -> FOAST
DropD Apr 2, 2024
3561a0c
fix missing attribute rename.
DropD Apr 2, 2024
2842cad
workflowify `FieldOperator.as_program`
DropD Apr 2, 2024
3357d11
make sure scan operator attributes are properly hashed
DropD Apr 3, 2024
3aab3c8
Merge branch 'main' into c20-workflowify-step3
DropD Apr 4, 2024
44b3060
Merge branch 'main' into c20-workflowify-step3
DropD Apr 4, 2024
ebf164c
add closure vars to hash for program definition
DropD Apr 4, 2024
d3a7851
[wip] integrating fieldop workflows
DropD Apr 4, 2024
a99fc48
update foast pretty printer doctest
DropD Apr 5, 2024
c71e0bf
fix code quality issues
DropD Apr 5, 2024
df648e8
reuse content hashing code in ffront.stages
DropD Apr 5, 2024
1f5c336
remove erroneously committed .python-version
DropD Apr 5, 2024
2cab5b7
Merge remote-tracking branch 'upstream/main' into c20-workflowify-step3
DropD Apr 8, 2024
0a54c3d
re-apply Program.itir fix after merge
DropD Apr 9, 2024
e1ed8d2
move backend transforms to `next.backend`
DropD Apr 11, 2024
3e30ca2
add tested toolchain workthrough notebook
DropD Apr 12, 2024
9e50bd8
fix toolchain walkthrough notebook
DropD Apr 12, 2024
678bd67
put default toolchain steps into definitions
DropD Apr 18, 2024
fad81c0
replace content_hash with dedicated cache key gen for ffront stages
DropD Apr 18, 2024
af1a016
add typeignores for hash algorithms
DropD Apr 18, 2024
556e8c5
downgrade singledispatch type hints for py < 310
DropD Apr 19, 2024
fba07f1
docstrings for AST based decorator wrappers
DropD Apr 19, 2024
50bdea2
todos for linting step calls in decorator wrappers
DropD Apr 19, 2024
63121fd
comment first occurrence of backwards compat backend pattern
DropD Apr 19, 2024
04e407b
stages hasher: avoid recursing into non-stage dataclasses
DropD Apr 19, 2024
ba838f1
Merge remote-tracking branch 'upstream/main' into c20-workflowify-step3
DropD Apr 19, 2024
9bb341a
rename `ffront.stages.cache_key` -> `ffront.stages.fingerprint_stage`
DropD Apr 22, 2024
888017d
improve ffront.stage fingerprinting
DropD Apr 22, 2024
345ce8e
update HashlibAlgorithm in eve
DropD Apr 22, 2024
7be23de
remove redundant singledispatch methods
DropD Apr 23, 2024
73c3d18
escape the recursion depth hammer when hashing stages
DropD Apr 23, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
94 changes: 87 additions & 7 deletions src/gt4py/next/backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,14 +18,73 @@
from typing import Any, Generic

from gt4py._core import definitions as core_defs
from gt4py.eve import utils as eve_utils
from gt4py.next import allocators as next_allocators
from gt4py.next.ffront import func_to_past, past_process_args, past_to_itir, stages as ffront_stages
from gt4py.next.otf import recipes
from gt4py.next.ffront import (
foast_to_itir,
foast_to_past,
func_to_foast,
func_to_past,
past_process_args,
past_to_itir,
stages as ffront_stages,
)
from gt4py.next.ffront.past_passes import linters as past_linters
from gt4py.next.otf import recipes, workflow
from gt4py.next.program_processors import processor_interface as ppi


DEFAULT_TRANSFORMS = recipes.ProgramTransformWorkflow(
@dataclasses.dataclass(frozen=True)
class ProgArgsInjector(workflow.Workflow):
args: tuple[Any, ...] = dataclasses.field(default_factory=tuple)
kwargs: dict[str, Any] = dataclasses.field(default_factory=dict)

def __call__(self, inp: ffront_stages.PastProgramDefinition) -> ffront_stages.PastClosure:
return ffront_stages.PastClosure(
past_node=inp.past_node,
closure_vars=inp.closure_vars,
grid_type=inp.grid_type,
args=self.args,
kwargs=self.kwargs,
)


@dataclasses.dataclass(frozen=True)
class FopArgsInjector(workflow.Workflow):
args: tuple[Any, ...] = dataclasses.field(default_factory=tuple)
kwargs: dict[str, Any] = dataclasses.field(default_factory=dict)
from_fieldop: Any = None

def __call__(self, inp: ffront_stages.FoastOperatorDefinition) -> ffront_stages.FoastClosure:
return ffront_stages.FoastClosure(
foast_op_def=inp,
args=self.args,
kwargs=self.kwargs,
closure_vars={inp.foast_node.id: self.from_fieldop},
)


DEFAULT_FIELDOP_TRANSFORMS = recipes.FieldopTransformWorkflow(
func_to_foast=func_to_foast.OptionalFuncToFoastFactory(cached=True),
foast_inject_args=FopArgsInjector(),
foast_to_past_closure=foast_to_past.FoastToPastClosure(
foast_to_past=workflow.CachedStep(
foast_to_past.foast_to_past,
hash_function=eve_utils.content_hash,
)
),
past_transform_args=past_process_args.past_process_args,
past_to_itir=past_to_itir.PastToItirFactory(),
foast_to_itir=workflow.CachedStep(
step=foast_to_itir.foast_to_itir, hash_function=eve_utils.content_hash
),
)


DEFAULT_PROG_TRANSFORMS = recipes.ProgramTransformWorkflow(
func_to_past=func_to_past.OptionalFuncToPastFactory(cached=True),
past_lint=past_linters.LinterFactory(),
past_inject_args=ProgArgsInjector(),
past_transform_args=past_process_args.past_process_args,
past_to_itir=past_to_itir.PastToItirFactory(),
)
Expand All @@ -35,13 +94,34 @@
class Backend(Generic[core_defs.DeviceTypeT]):
executor: ppi.ProgramExecutor
allocator: next_allocators.FieldBufferAllocatorProtocol[core_defs.DeviceTypeT]
transformer: recipes.ProgramTransformWorkflow = DEFAULT_TRANSFORMS
transforms_fop: recipes.FieldopTransformWorkflow = DEFAULT_FIELDOP_TRANSFORMS
transforms_prog: recipes.ProgramTransformWorkflow = DEFAULT_PROG_TRANSFORMS

def __call__(
self, program: ffront_stages.ProgramDefinition, *args: tuple[Any], **kwargs: dict[str, Any]
self,
program: ffront_stages.ProgramDefinition | ffront_stages.FieldOperatorDefinition,
*args: tuple[Any],
**kwargs: dict[str, Any],
) -> None:
transformer = self.transformer.replace(args=args, kwargs=kwargs)
program_call = transformer(program)
if isinstance(
program, (ffront_stages.FieldOperatorDefinition, ffront_stages.FoastOperatorDefinition)
):
offset_provider = kwargs.pop("offset_provider")
from_fieldop = kwargs.pop("from_fieldop")
transforms_fop = self.transforms_fop.replace(
foast_inject_args=FopArgsInjector(
args=args, kwargs=kwargs, from_fieldop=from_fieldop
)
)
program_call = transforms_fop(program)
program_call = dataclasses.replace(
program_call, kwargs=program_call.kwargs | {"offset_provider": offset_provider}
)
else:
transforms_prog = self.transforms_prog.replace(
past_inject_args=ProgArgsInjector(args=args, kwargs=kwargs)
)
program_call = transforms_prog(program)
self.executor(program_call.program, *program_call.args, **program_call.kwargs)

@property
Expand Down
Loading
Loading