Skip to content
This repository has been archived by the owner on Jul 24, 2024. It is now read-only.

Commit

Permalink
xsmm config draft
Browse files Browse the repository at this point in the history
  • Loading branch information
Devjiu committed Dec 22, 2023
1 parent 5dee994 commit 2f65c69
Show file tree
Hide file tree
Showing 5 changed files with 51 additions and 2 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/execute-test-script.yml
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ jobs:
shell: bash -el {0}
run: |
case "${{ inputs.compiler }}" in
torch_mlir)
torch_mlir | torch_mlir_xsmm)
echo conda_env=mlir-dev >> $GITHUB_OUTPUT;;
ipex)
echo conda_env=ipex >> $GITHUB_OUTPUT;;
Expand Down
1 change: 1 addition & 0 deletions .github/workflows/test-single-config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ on:
- torchscript_onednn
- ipex
- torch_mlir
- torch_mlir_xsmm
tag:
description: tag to label this run in DB
required: true
Expand Down
3 changes: 2 additions & 1 deletion .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,8 @@ jobs:
{device: 'cpu', compiler: 'torchscript_onednn'},
{device: 'cpu', compiler: 'ipex'},
# {device: 'xpu', compiler: 'ipex'},
{device: 'cpu', compiler: 'torch_mlir'}
{device: 'cpu', compiler: 'torch_mlir'},
{device: 'cpu', compiler: 'torch_mlir_xsmm'}
]
test_script: ${{ fromJson(inputs.test_scripts) }}
fail-fast: false
Expand Down
1 change: 1 addition & 0 deletions dl_bench/cli/launcher.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,7 @@ def parse_args():
"torchscript_onednn",
"ipex",
"torch_mlir",
"torch_mlir_xsmm",
],
help="Compilation mode to use. No compilation by default.",
)
Expand Down
46 changes: 46 additions & 0 deletions dl_bench/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -229,6 +229,52 @@ def eval(self):

compiled_model = result()
print("Compiled with torch_mlir")
elif compile_mode == "torch_mlir_xsmm":
from torch_mlir._dynamo_fx_importer import import_fx_graph_as_func
from torch_mlir_e2e_test.configs.torchdynamo import jit
from torch_mlir_e2e_test.framework import TestOptions

# from torch_mlir_e2e_test.linalg_on_tensors_backends.refbackend import RefBackendLinalgOnTensorsBackend
from torch_mlir_e2e_test.linalg_on_tensors_backends.xsmmprotobackend import (
XsmmProtoLinalgOnTensorsBackend,
)
import torch.utils._pytree as pytree

# debug_timer seems to cause problems:
# TypeError: TestOptions.__init__() got an unexpected keyword argument 'debug_timer'
# opts = TestOptions(debug_timer=False, use_kernels=True)
opts = TestOptions()
module = jit(
model,
[sample_input],
"test_name",
opts,
output_type="linalg-on-tensors",
)
backend = XsmmProtoLinalgOnTensorsBackend(opts)
# backend = RefBackendLinalgOnTensorsBackend()
module = backend.compile(module)
backend_module = backend.load(module)

params = {
**dict(model.named_parameters(remove_duplicate=False)),
**dict(model.named_buffers(remove_duplicate=False)),
}
params_flat, params_spec = pytree.tree_flatten(params)
params_flat = list(params_flat)

class result:
def __call__(self, *args):
numpy_inputs = recursively_convert_to_numpy(params_flat + [*args])
return refine_result_type(
getattr(backend_module, model.__class__.__name__)(*numpy_inputs)
)

def eval(self):
pass

compiled_model = result()
print("Compiled with XSMM torch_mlir")
else:
raise ValueError(f"Unsupported mode {compile_mode}")

Expand Down

0 comments on commit 2f65c69

Please sign in to comment.