Files
clang-p2996/mlir/test/python/dialects/sparse_tensor/dialect.py
Stella Laurenzo f13893f66a [mlir][Python] Upstream the PybindAdaptors.h helpers and use it to implement sparse_tensor.encoding.
* The PybindAdaptors.h file has been evolving across different sub-projects (npcomp, circt) and has been successfully used for out of tree python API interop/extensions and defining custom types.
* Since sparse_tensor.encoding is the first in-tree custom attribute we are supporting, it seemed like the right time to upstream this header and use it to define the attribute in a way that we can support for both in-tree and out-of-tree use (prior, I had not wanted to upstream dead code which was not used in-tree).
* Adapted the circt version of `mlir_type_subclass`, also providing an `mlir_attribute_subclass`. As we get a bit of mileage on this, I would like to transition the builtin types/attributes to this mechanism and delete the old in-tree only `PyConcreteType` and `PyConcreteAttribute` template helpers (which cannot work reliably out of tree as they depend on internals).
* Added support for defaulting the MlirContext if none is passed so that we can support the same idioms as in-tree versions.

There is quite a bit going on here and I can split it up if needed, but would prefer to keep the first use and the header together so sending out in one patch.

Differential Revision: https://reviews.llvm.org/D102144
2021-05-10 17:15:43 +00:00

77 lines
2.5 KiB
Python

# RUN: %PYTHON %s | FileCheck %s
from mlir.ir import *
# TODO: Import this into the user-package vs the cext.
from _mlir.dialects import sparse_tensor as st
def run(f):
print("\nTEST:", f.__name__)
f()
return f
# CHECK-LABEL: TEST: testEncodingAttr1D
@run
def testEncodingAttr1D():
with Context() as ctx:
parsed = Attribute.parse(
'#sparse_tensor.encoding<{ dimLevelType = [ "compressed" ], '
'pointerBitWidth = 16, indexBitWidth = 32 }>')
print(parsed)
casted = st.EncodingAttr(parsed)
# CHECK: equal: True
print(f"equal: {casted == parsed}")
# CHECK: dim_level_types: [<DimLevelType.compressed: 1>]
print(f"dim_level_types: {casted.dim_level_types}")
# CHECK: dim_ordering: None
# Note that for 1D, the ordering is None, which exercises several special
# cases.
print(f"dim_ordering: {casted.dim_ordering}")
# CHECK: pointer_bit_width: 16
print(f"pointer_bit_width: {casted.pointer_bit_width}")
# CHECK: index_bit_width: 32
print(f"index_bit_width: {casted.index_bit_width}")
created = st.EncodingAttr.get(casted.dim_level_types, None, 16, 32)
print(created)
# CHECK: created_equal: True
print(f"created_equal: {created == casted}")
# Verify that the factory creates an instance of the proper type.
# CHECK: is_proper_instance: True
print(f"is_proper_instance: {isinstance(created, st.EncodingAttr)}")
# CHECK: created_pointer_bit_width: 16
print(f"created_pointer_bit_width: {created.pointer_bit_width}")
# CHECK-LABEL: TEST: testEncodingAttr2D
@run
def testEncodingAttr2D():
with Context() as ctx:
parsed = Attribute.parse(
'#sparse_tensor.encoding<{ dimLevelType = [ "dense", "compressed" ], '
'dimOrdering = affine_map<(d0, d1) -> (d0, d1)>, '
'pointerBitWidth = 16, indexBitWidth = 32 }>')
print(parsed)
casted = st.EncodingAttr(parsed)
# CHECK: equal: True
print(f"equal: {casted == parsed}")
# CHECK: dim_level_types: [<DimLevelType.dense: 0>, <DimLevelType.compressed: 1>]
print(f"dim_level_types: {casted.dim_level_types}")
# CHECK: dim_ordering: (d0, d1) -> (d0, d1)
print(f"dim_ordering: {casted.dim_ordering}")
# CHECK: pointer_bit_width: 16
print(f"pointer_bit_width: {casted.pointer_bit_width}")
# CHECK: index_bit_width: 32
print(f"index_bit_width: {casted.index_bit_width}")
created = st.EncodingAttr.get(casted.dim_level_types, casted.dim_ordering,
16, 32)
print(created)
# CHECK: created_equal: True
print(f"created_equal: {created == casted}")