21 for (uint64_t l = 0; l < lvlRank; l++)
28 uint64_t dimRank,
const uint64_t *dimSizes, uint64_t lvlRank,
29 const uint64_t *lvlSizes,
const LevelType *lvlTypes,
30 const uint64_t *dim2lvl,
const uint64_t *lvl2dim)
31 : dimSizes(dimSizes, dimSizes + dimRank),
32 lvlSizes(lvlSizes, lvlSizes + lvlRank),
33 lvlTypes(lvlTypes, lvlTypes + lvlRank),
34 dim2lvlVec(dim2lvl, dim2lvl + lvlRank),
35 lvl2dimVec(lvl2dim, lvl2dim + dimRank),
36 map(dimRank, lvlRank, dim2lvlVec.data(), lvl2dimVec.data()),
38 assert(dimSizes && lvlSizes && lvlTypes && dim2lvl && lvl2dim);
40 assert(dimRank > 0 &&
"Trivial shape is unsupported");
41 for (uint64_t d = 0; d < dimRank; d++)
42 assert(dimSizes[d] > 0 &&
"Dimension size zero has trivial storage");
44 assert(lvlRank > 0 &&
"Trivial shape is unsupported");
45 for (uint64_t l = 0; l < lvlRank; l++) {
46 assert(lvlSizes[l] > 0 &&
"Level size zero has trivial storage");
53 #define FATAL_PIV(NAME) \
54 fprintf(stderr, "<P,I,V> type mismatch for: " #NAME); \
57 #define IMPL_GETPOSITIONS(PNAME, P) \
58 void SparseTensorStorageBase::getPositions(std::vector<P> **, uint64_t) { \
59 FATAL_PIV("getPositions" #PNAME); \
62 #undef IMPL_GETPOSITIONS
64 #define IMPL_GETCOORDINATES(CNAME, C) \
65 void SparseTensorStorageBase::getCoordinates(std::vector<C> **, uint64_t) { \
66 FATAL_PIV("getCoordinates" #CNAME); \
69 #undef IMPL_GETCOORDINATES
71 #define IMPL_GETCOORDINATESBUFFER(CNAME, C) \
72 void SparseTensorStorageBase::getCoordinatesBuffer(std::vector<C> **, \
74 FATAL_PIV("getCoordinatesBuffer" #CNAME); \
77 #undef IMPL_GETCOORDINATESBUFFER
79 #define IMPL_GETVALUES(VNAME, V) \
80 void SparseTensorStorageBase::getValues(std::vector<V> **) { \
81 FATAL_PIV("getValues" #VNAME); \
86 #define IMPL_LEXINSERT(VNAME, V) \
87 void SparseTensorStorageBase::lexInsert(const uint64_t *, V) { \
88 FATAL_PIV("lexInsert" #VNAME); \
93 #define IMPL_EXPINSERT(VNAME, V) \
94 void SparseTensorStorageBase::expInsert(uint64_t *, V *, bool *, uint64_t *, \
95 uint64_t, uint64_t) { \
96 FATAL_PIV("expInsert" #VNAME); \
#define MLIR_SPARSETENSOR_FOREVERY_FIXED_O(DO)
#define MLIR_SPARSETENSOR_FOREVERY_V(DO)
#define IMPL_GETCOORDINATES(CNAME, C)
#define IMPL_GETPOSITIONS(PNAME, P)
#define IMPL_EXPINSERT(VNAME, V)
#define IMPL_LEXINSERT(VNAME, V)
#define IMPL_GETVALUES(VNAME, V)
static bool isAllDense(uint64_t lvlRank, const LevelType *lvlTypes)
#define IMPL_GETCOORDINATESBUFFER(CNAME, C)
bool isSingletonLvl(uint64_t l) const
Safely checks if the level uses singleton storage.
bool isCompressedLvl(uint64_t l) const
Safely checks if the level uses compressed storage.
SparseTensorStorageBase(const SparseTensorStorageBase &)=default
bool isDenseLvl(uint64_t l) const
Safely checks if the level uses dense storage.
bool isNOutOfMLvl(uint64_t l) const
Safely checks if the level uses n out of m storage.
bool isLooseCompressedLvl(uint64_t l) const
Safely checks if the level uses loose compressed storage.
bool isDenseLT(LevelType lt)
This enum defines all the sparse representations supportable by the SparseTensor dialect.