IGC intrinsic refactor - new input file format

This change is to support yaml files decoding and replace python files with yaml files which define IGC intrinsics. This simplifies the generator.
This commit is contained in:
Andrzejewski, Krystian
2024-07-01 10:59:23 +00:00
committed by igcbot
parent 4ccc798cb2
commit e9dc98c991
11 changed files with 700 additions and 128 deletions

View File

@ -13,12 +13,11 @@ set(IGC_BUILD__PROJ_LABEL__GenISAIntrinsics "${IGC_BUILD__PROJ__GenISAIntrinsics
include_directories("${CMAKE_CURRENT_SOURCE_DIR}")
set(IGC_BUILD__GenISAIntrinsics_GENERATOR_INPUT_MODULES
set(IGC_BUILD__GenISAIntrinsics_GENERATOR_INPUT_FILES
"${CMAKE_CURRENT_SOURCE_DIR}/Intrinsic_definitions.py"
)
set(IGC_BUILD__GenISAIntrinsics_GENERATOR_SCRIPTS
"${CMAKE_CURRENT_SOURCE_DIR}/generator/generate_intrinsic_files.py"
"${CMAKE_CURRENT_SOURCE_DIR}/generator/Intrinsic_definition_objects.py"
"${CMAKE_CURRENT_SOURCE_DIR}/generator/Intrinsic_definition_translation.py"
"${CMAKE_CURRENT_SOURCE_DIR}/generator/Intrinsic_generator.py"
@ -50,11 +49,11 @@ OUTPUT
${IGC_BUILD__GenISAIntrinsics_GENERATOR_OUTPUT_SOURCES}
COMMAND
${CMAKE_COMMAND} -E env "PYTHONPATH=${BS_DIR_EXTERNAL_COMPONENTS}/build-tools/python-site-packages"
${PYTHON_EXECUTABLE} generate_intrinsic_files.py
"${IGC_BUILD__GenISAIntrinsics_GENERATOR_INPUT_MODULES}" --output ${IGC_OPTION__OUTPUT_DIR}/${IGC_CMAKE_CFG_INTDIR}
${PYTHON_EXECUTABLE} Intrinsic_generator.py
"${IGC_BUILD__GenISAIntrinsics_GENERATOR_INPUT_FILES}" --output ${IGC_OPTION__OUTPUT_DIR}/${IGC_CMAKE_CFG_INTDIR}
$<$<NOT:$<CONFIG:Release>>:"--use_comments">
DEPENDS
${IGC_BUILD__GenISAIntrinsics_GENERATOR_INPUT_MODULES}
${IGC_BUILD__GenISAIntrinsics_GENERATOR_INPUT_FILES}
${IGC_BUILD__GenISAIntrinsics_GENERATOR_SCRIPTS}
${IGC_BUILD__GenISAIntrinsics_GENERATOR_TEMPLATES}
WORKING_DIRECTORY
@ -67,7 +66,7 @@ DEPENDS
${IGC_BUILD__GenISAIntrinsics_GENERATOR_OUTPUT_HEADERS}
${IGC_BUILD__GenISAIntrinsics_GENERATOR_OUTPUT_SOURCES}
SOURCES
${IGC_BUILD__GenISAIntrinsics_GENERATOR_INPUT_MODULES}
${IGC_BUILD__GenISAIntrinsics_GENERATOR_INPUT_FILES}
${IGC_BUILD__GenISAIntrinsics_GENERATOR_SCRIPTS}
${IGC_BUILD__GenISAIntrinsics_GENERATOR_TEMPLATES}
)

View File

@ -174,7 +174,7 @@ private:
{
case TypeID::ArgumentReference:
{
uint8_t argIndex = typeDef.m_ArgumentReference.m_Index;
uint8_t argIndex = typeDef.m_Reference.m_Index;
IGC_ASSERT_MESSAGE(argIndex < overloadedTypes.size(), "Argument reference index must point out one of the overloaded types");
pDest = overloadedTypes[argIndex];
break;

View File

@ -256,11 +256,11 @@ struct TypeDescriptionTraits<TypeID::ArgumentReference>
};
struct ArgumentReferenceType
struct ReferenceType
{
using Traits = TypeDescriptionTraits<TypeID::ArgumentReference>;
constexpr ArgumentReferenceType(uint8_t index = 0) :
constexpr ReferenceType(uint8_t index = 0) :
m_Index(index)
{
@ -322,9 +322,9 @@ struct TypeDescription
{
}
constexpr TypeDescription(const ArgumentReferenceType& def) :
constexpr TypeDescription(const ReferenceType& def) :
m_ID(std::decay_t<decltype(def)>::Traits::scTypeID),
m_ArgumentReference{ def }
m_Reference{ def }
{
}
@ -355,7 +355,7 @@ struct TypeDescription
func(m_Struct);
break;
case TypeID::ArgumentReference:
func(m_ArgumentReference);
func(m_Reference);
break;
default:
break;
@ -403,7 +403,7 @@ struct TypeDescription
AnyType m_Any;
PointerType m_Pointer;
StructType m_Struct;
ArgumentReferenceType m_ArgumentReference;
ReferenceType m_Reference;
};
const TypeID m_ID;
@ -420,9 +420,9 @@ struct EmptyTypeHolderT
};
template<uint8_t index>
struct ArgumentReferenceTypeHolderT
struct ReferenceTypeHolderT
{
static constexpr TypeDescription scType = ArgumentReferenceType(index);
static constexpr TypeDescription scType = ReferenceType(index);
};
template<typename TypeHolderT = EmptyTypeHolderT>

View File

@ -8,16 +8,58 @@
from typing import List, Set
from enum import Enum
import yaml
def generate_anchor(self, node):
if node.tag == TypeDefinition.yaml_tag:
return node.anchor_name
else:
res = super(yaml.Dumper, self).generate_anchor(node)
return res
yaml.Dumper.generate_anchor = generate_anchor
def ignore_aliases(self, data):
if isinstance(data, TypeDefinition):
return False
else:
return True
yaml.Dumper.ignore_aliases = ignore_aliases
def custom_represent_data(self, data):
res = super(yaml.Dumper, self).represent_data(data)
if isinstance(data, TypeDefinition):
res.anchor_name = str(data)
return res
yaml.Dumper.represent_data = custom_represent_data
def increase_indent(self, flow=False, indentless=False):
"""Ensure that lists items are always indented."""
return super(yaml.Dumper, self).increase_indent(
flow=flow,
indentless=False)
yaml.Dumper.increase_indent = increase_indent
class QuotedString(str): # just subclass the built-in str
pass
def quoted_scalar(dumper, data): # a representer to force quotations on scalars
return dumper.represent_scalar('tag:yaml.org,2002:str', data, style='"')
# add the QuotedString custom type with a forced quotation representer to your dumper
yaml.add_representer(QuotedString, quoted_scalar)
class TypeID(Enum):
Void = 0,
Integer = 1,
Float = 2,
Vector = 3,
Struct = 4,
Pointer = 5,
Any = 6,
ArgumentReference = 7
Void = 0
Integer = 1
Float = 2
Vector = 3
Struct = 4
Pointer = 5
Any = 6
Reference = 7
def __str__(self):
return self.name
@ -30,16 +72,33 @@ class TypeID(Enum):
else:
raise ValueError("{value} is not present in {cls.__name__}")
def TypeID_representer(dumper, data):
return dumper.represent_scalar(u'!TypeID', u'%s' % str(data), style='"')
yaml.add_representer(TypeID, TypeID_representer)
def TypeID_constructor(loader, node):
value = loader.construct_scalar(node)
return TypeID.from_str(value)
yaml.SafeLoader.add_constructor(u'!TypeID', TypeID_constructor)
class AddressSpace(Enum):
Undefined = 0,
Private = 1,
Global = 2,
Constant = 3,
Local = 4,
Undefined = 0
Private = 1
Global = 2
Constant = 3
Local = 4
Generic = 5
def __int__(self):
return self.value[0] - 1
return self.value - 1
def __str__(self):
return self.name
def __repr__(self):
return '%s("%s")' % (self.__class__.__name__, self)
@classmethod
def from_str(cls, value : str):
@ -49,20 +108,34 @@ class AddressSpace(Enum):
else:
raise ValueError("{value} is not present in {cls.__name__}")
def AddressSpace_representer(dumper, data):
return dumper.represent_scalar(u'!AddressSpace', u'%s' % str(data), style='"')
yaml.add_representer(AddressSpace, AddressSpace_representer)
def AddressSpace_constructor(loader, node):
value = loader.construct_scalar(node)
return AddressSpace.from_str(value)
yaml.SafeLoader.add_constructor(u'!AddressSpace', AddressSpace_constructor)
class AttributeID(Enum):
NoUnwind = 0,
ReadNone = 1,
ReadOnly = 2,
ArgMemOnly = 3,
WriteOnly = 4,
NoReturn = 5,
NoDuplicate = 6,
Convergent = 7,
NoUnwind = 0
ReadNone = 1
ReadOnly = 2
ArgMemOnly = 3
WriteOnly = 4
NoReturn = 5
NoDuplicate = 6
Convergent = 7
InaccessibleMemOnly = 8
def __str__(self):
return self.name
def __repr__(self):
return '%s("%s")' % (self.__class__.__name__, self)
@classmethod
def from_str(cls, value : str):
for key, val in cls.__members__.items():
@ -71,7 +144,23 @@ class AttributeID(Enum):
else:
raise ValueError("{value} is not present in {cls.__name__}")
class TypeDefinition:
def AttributeID_representer(dumper, data):
return dumper.represent_scalar(u'!AttributeID', u'%s' % str(data), style='"')
yaml.add_representer(AttributeID, AttributeID_representer)
def AttributeID_constructor(loader, node):
value = loader.construct_scalar(node)
return AttributeID.from_str(value)
yaml.SafeLoader.add_constructor(u'!AttributeID', AttributeID_constructor)
class SafeYAMLObject(yaml.YAMLObject):
yaml_loader = yaml.SafeLoader
class TypeDefinition(SafeYAMLObject):
yaml_tag = u'TypeDefinition'
def __init__(self, typeID : TypeID, bit_width : int = 0, num_elements : int = 0,
address_space : AddressSpace = AddressSpace.Undefined, internal_type = None,
index : int = 0, internal_types = None):
@ -90,9 +179,148 @@ class TypeDefinition:
self.pointed_type = internal_type
elif self.ID == TypeID.Struct:
self.member_types = internal_types
elif self.ID == TypeID.ArgumentReference:
elif self.ID == TypeID.Reference:
self.index = index
def __str__(self):
if self.ID == TypeID.Integer:
if self.bit_width == 0:
return "any_int"
else:
return "i{}".format(self.bit_width)
elif self.ID == TypeID.Float:
if self.bit_width == 0:
return "any_float"
else:
return "f{}".format(self.bit_width)
elif self.ID == TypeID.Any:
if self.default_type:
return "any_{}_".format(self.default_type)
else:
return "any"
elif self.ID == TypeID.Vector:
if self.element_type and self.num_elements:
return "v{}_{}_".format(self.num_elements, self.element_type)
elif self.element_type and self.num_elements == 0:
return "v_{}_".format(self.element_type)
elif not self.element_type and self.num_elements:
return "v{}_any_".format(self.num_elements)
else:
return "any_vector"
elif self.ID == TypeID.Pointer:
addr_space = int(self.address_space)
if addr_space >= 0 and self.pointed_type:
return "p{}_{}_".format(addr_space, self.pointed_type)
elif self.pointed_type and addr_space < 0:
return "p_{}_".format(self.pointed_type)
elif not self.pointed_type and addr_space <= 0:
return "p{}_any_".format(addr_space)
else:
return "any_pointer"
elif self.ID == TypeID.Struct:
if len(self.member_types) > 0:
return "s_{}_".format('-'.join([str(m) for m in self.member_types]))
else:
return "any_struct"
elif self.ID == TypeID.Reference:
return "ref_{}_".format(self.index)
return "void"
def __repr__(self):
if self.ID == TypeID.Integer:
return "%s(ID=%r, bit_width=%r)" % (
self.__class__.__name__, self.ID, self.bit_width)
elif self.ID == TypeID.Float:
return "%s(ID=%r, bit_width=%r)" % (
self.__class__.__name__, self.ID, self.bit_width)
elif self.ID == TypeID.Any:
return "%s(ID=%r, default_type=%r)" % (
self.__class__.__name__, self.ID, self.default_type)
elif self.ID == TypeID.Vector:
return "%s(ID=%r, num_elements=%r, element_type=%r)" % (
self.__class__.__name__, self.ID, self.num_elements, self.element_type)
elif self.ID == TypeID.Pointer:
return "%s(ID=%r, address_space=%r, pointed_type=%r)" % (
self.__class__.__name__, self.ID, self.address_space, self.pointed_type)
elif self.ID == TypeID.Struct:
return "%s(ID=%r, member_types=%r)" % (
self.__class__.__name__, self.ID, self.member_types)
elif self.ID == TypeID.Reference:
return "%s(ID=%r, index=%r)" % (
self.__class__.__name__, self.ID, self.index)
return "%s(ID=%r)" % (
self.__class__.__name__, self.ID)
def __eq__(self, other):
if isinstance(other, TypeDefinition) and self.ID == other.ID:
if self.ID == TypeID.Integer:
return self.bit_width == other.bit_width
elif self.ID == TypeID.Float:
return self.bit_width == other.bit_width
elif self.ID == TypeID.Any:
return self.default_type == other.default_type
elif self.ID == TypeID.Vector:
return self.num_elements == other.num_elements and self.element_type == other.element_type
elif self.ID == TypeID.Pointer:
return self.address_space == other.address_space and self.pointed_type == other.pointed_type
elif self.ID == TypeID.Struct:
return tuple(self.member_types) == tuple(other.member_types)
elif self.ID == TypeID.Reference:
return self.index == other.index
return True
else:
return False
def __lt__(self, other):
if isinstance(other, TypeDefinition) and self.ID == other.ID:
if self.ID == TypeID.Integer:
return self.bit_width < other.bit_width
elif self.ID == TypeID.Float:
return self.bit_width < other.bit_width
elif self.ID == TypeID.Any:
if self.default_type and other.default_type:
return self.default_type < other.default_type
return not self.default_type
elif self.ID == TypeID.Vector:
return self.element_type < other.element_type if self.element_type != other.element_type else self.num_elements < other.num_elements
elif self.ID == TypeID.Pointer:
return int(self.address_space) < int(other.address_space) if int(self.address_space) != int(other.address_space) else self.pointed_type < other.pointed_type
elif self.ID == TypeID.Struct:
if len(self.member_types) == len(other.member_types):
diffrent_types = [(self.member_types[i], other.member_types[i]) for i in range(len(self.member_types))]
return diffrent_types[0][0] < diffrent_types[0][1]
else:
return len(self.member_types) < len(other.member_types)
elif self.ID == TypeID.Reference:
return self.index < other.index
return True
else:
return self.ID.value < other.ID.value
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
if self.ID == TypeID.Integer:
return hash((self.ID, self.bit_width))
elif self.ID == TypeID.Float:
return hash((self.ID, self.bit_width))
elif self.ID == TypeID.Any:
return hash((self.ID, self.default_type))
elif self.ID == TypeID.Vector:
return hash((self.ID, self.num_elements, self.element_type))
elif self.ID == TypeID.Pointer:
return hash((self.ID, self.address_space, self.pointed_type))
elif self.ID == TypeID.Struct:
return hash((self.ID, tuple(self.member_types)))
elif self.ID == TypeID.Reference:
return hash((self.ID, self.index))
elif self.ID == TypeID.Void:
return hash((self.ID))
else:
assert(0)
return 0
def to_dict(self):
if self.ID == TypeID.Integer:
res = {
@ -121,7 +349,7 @@ class TypeDefinition:
"ID": str(self.ID),
"member_types": [ el.to_dict()for el in self.member_types ]
}
elif self.ID == TypeID.ArgumentReference:
elif self.ID == TypeID.Reference:
res = {
"ID": str(self.ID),
"index": self.index
@ -154,16 +382,59 @@ class TypeDefinition:
elif ID == TypeID.Struct:
member_types = [TypeDefinition.from_dict(el) for el in json_dct['member_types']]
return TypeDefinition(ID, internal_types=member_types)
elif ID == TypeID.ArgumentReference:
elif ID == TypeID.Reference:
return TypeDefinition(ID, index=json_dct['index'])
elif ID == TypeID.Any:
internal_type = TypeDefinition.from_dict(json_dct['default_type']) if json_dct['default_type'] else None
return TypeDefinition(ID, internal_type=internal_type)
class ArgumentTypeDefinition:
class ArgumentDefinition(SafeYAMLObject):
yaml_tag = u'ArgumentDefinition'
@classmethod
def from_yaml(cls, loader, node):
arg_dict = loader.construct_mapping(node, deep=True)
return cls(**arg_dict)
def __init__(self, name : str, type_definition : TypeDefinition, comment : str):
self.name = name
self.type_definition = type_definition
self.comment = QuotedString(comment)
def __repr__(self):
return "%s(name=%r, type_definition=%r, comment=%r)" % (
self.__class__.__name__, self.name, self.type_definition, self.comment)
def to_dict(self):
res = {
"name": self.name,
"type_definition": self.type_definition.to_dict(),
"comment": self.comment
}
return res
@staticmethod
def from_dict(json_dct : dict):
type_definition = TypeDefinition.from_dict(json_dct['type_definition'])
return ArgumentDefinition(json_dct['name'], type_definition, json_dct['comment'])
class ReturnDefinition(SafeYAMLObject):
yaml_tag = u'ReturnDefinition'
@classmethod
def from_yaml(cls, loader, node):
arg_dict = loader.construct_mapping(node, deep=True)
return cls(**arg_dict)
def __init__(self, type_definition : TypeDefinition, comment : str):
self.type_definition = type_definition
self.comment = comment
self.comment = QuotedString(comment)
def __repr__(self):
return "%s(type_definition=%r, comment=%r)" % (
self.__class__.__name__, self.type_definition, self.comment)
def to_dict(self):
res = {
@ -175,33 +446,194 @@ class ArgumentTypeDefinition:
@staticmethod
def from_dict(json_dct : dict):
type_definition = TypeDefinition.from_dict(json_dct['type_definition'])
return ArgumentTypeDefinition(type_definition, json_dct['comment'])
return ReturnDefinition(type_definition, json_dct['comment'])
class IntrinsicDefinition:
def __init__(self, name : str, comment : str, return_type : ArgumentTypeDefinition,
argument_types : List[ArgumentTypeDefinition], attributes : Set[AttributeID]):
self.name = name
self.comment = comment
self.return_type = return_type
self.argument_types = argument_types
class IntrinsicDefinition(SafeYAMLObject):
yaml_tag = u'IntrinsicDefinition'
@classmethod
def from_yaml(cls, loader, node):
arg_dict = loader.construct_mapping(node, deep=True)
res = cls(**arg_dict)
return res
def __init__(self, name : str, comment : str, return_definition : ReturnDefinition,
arguments : List[ArgumentDefinition], attributes : Set[AttributeID]):
self.name = QuotedString(name)
self.comment = QuotedString(comment)
self.return_definition = return_definition
self.arguments = arguments
self.attributes = sorted(list(attributes), key=lambda x: x.__str__())
def __repr__(self):
return "%s(name=%r, comment=%r, return_definition=%r, arguments=%r, attributes=%r)" % (
self.__class__.__name__, self.name, self.comment, self.return_definition, self.arguments, self.attributes)
def to_dict(self):
res = {
"name": self.name,
"comment": self.comment,
"return_type": self.return_type.to_dict(),
"argument_types":[ el.to_dict() for el in self.argument_types],
"return_definition": self.return_definition.to_dict(),
"arguments":[ el.to_dict() for el in self.arguments],
"attributes": [str(el) for el in self.attributes]
}
return res
@staticmethod
def from_dict(json_dct : dict):
return_type = ArgumentTypeDefinition.from_dict(json_dct['return_type'])
argument_types = []
for arg in json_dct['argument_types']:
argument_types.append(ArgumentTypeDefinition.from_dict(arg))
return_definition = ArgumentDefinition.from_dict(json_dct['return_definition'])
arguments = []
for arg in json_dct['arguments']:
arguments.append(ArgumentDefinition.from_dict(arg))
attributes = set(AttributeID.from_str(el) for el in json_dct['attributes'])
return IntrinsicDefinition(json_dct['name'], json_dct['comment'], return_type,
argument_types, attributes)
return IntrinsicDefinition(json_dct['name'], json_dct['comment'], return_definition,
arguments, attributes)
class PrimitiveArgumentDefinition(SafeYAMLObject):
yaml_tag = u'PrimitiveArgumentDefinition'
@classmethod
def from_yaml(cls, loader, node):
arg_dict = loader.construct_mapping(node, deep=True)
return cls(**arg_dict)
def __init__(self, name : str, comment : str):
self.name = name
self.comment = comment
def __repr__(self):
return "%s(name=%r, comment=%r)" % (
self.__class__.__name__, self.name, self.comment)
def to_dict(self):
res = {
"name": self.name.to_dict(),
"comment": self.comment.to_dict()
}
return res
@staticmethod
def from_dict(json_dct : dict):
return PrimitiveArgumentDefinition(json_dct['name'], json_dct['commnet'])
class IntrinsicPrimitive(SafeYAMLObject):
yaml_tag = u'IntrinsicPrimitive'
@classmethod
def from_yaml(cls, loader, node):
arg_dict = loader.construct_mapping(node, deep=True)
return cls(**arg_dict)
def __init__(self, name : str, comment : str, arguments : List[PrimitiveArgumentDefinition]):
self.name = QuotedString(name)
self.comment = QuotedString(comment)
self.arguments = arguments
def __repr__(self):
return "%s(name=%r, comment=%r, arguments=%r)" % (
self.__class__.__name__, self.name, self.comment, self.arguments)
def to_dict(self):
res = {
"name": self.name,
"comment": self.comment,
"arguments":[ el.to_dict() for el in self.arguments]
}
return res
@staticmethod
def from_dict(json_dct : dict):
arguments = []
for arg in json_dct['arguments']:
arguments.append(PrimitiveArgumentDefinition.from_dict(arg))
return IntrinsicDefinition(json_dct['name'], json_dct['comment'], arguments)
class InternalGrammar(SafeYAMLObject):
yaml_tag = u'InternalGrammar'
def __init__(self, types : List[TypeDefinition], intrinsics : List[IntrinsicDefinition]):
self.types = types
self.intrinsics = intrinsics
def __repr__(self):
return "%s(types=%r, intrinsics=%r)" % (
self.__class__.__name__, self.types, self.intrinsics)
def to_dict(self):
res = {
"types": [ el.to_dict()for el in self.types ],
"intrinsics": [ el.to_dict()for el in self.intrinsics ]
}
return res
@staticmethod
def from_dict(json_dct : dict):
types = []
for arg in json_dct['types']:
types.append(TypeDefinition.from_dict(arg))
intrinsics = []
for arg in json_dct['intrinsics']:
intrinsics.append(IntrinsicDefinition.from_dict(arg))
return InternalGrammar(types, intrinsics)
if __name__ == '__main__':
import sys
import argparse
from Intrinsic_utils import *
import json
def main(args):
parser = argparse.ArgumentParser(description='Recreate a file with IGC intrinsic definitions.')
parser.add_argument("input", help="the source path to the file with intrinsic defintions")
parser.add_argument('--input_format',
default='yaml',
choices=['yaml', 'json'],
help='the data representation format of the input')
parser.add_argument('--output_format',
default='yaml',
choices=['yaml', 'json'],
help='the data representation format of the output')
parser.add_argument("--output", help="the destination path for the file with intrinsic definitions",
type=str)
parser.add_argument("-v", "--verbose", help="print intrinsic definitions in the current IGC format to the console",
action="store_true")
parser.add_argument("-l", "--license_header", help="attaches a license header to the output file",
action="store_true")
args = parser.parse_args(args[1:])
with open(args.input) as f:
try:
if args.input_format == 'json':
internal_grammar = InternalGrammar.from_dict(json.load(f))
else:
internal_grammar = yaml.safe_load(f)
except Exception as err:
print("Error on loading data from: {}\n{}".format(args.input, err))
if args.output_format == 'json':
text = json.dumps(internal_grammar.to_dict(), indent=2)
else:
text = yaml.dump(internal_grammar, default_flow_style = False, allow_unicode = True, encoding = None,
sort_keys = False, indent=4)
if args.verbose:
print(text)
if args.output:
if args.license_header:
template_lookup = TemplateLookup(directories=[r'.'])
template = Template(filename=r'templates/intrinsic_definition.mako',
lookup=template_lookup)
output_file_path = args.output
write_to_file_using_template(output_file_path, template, content=text)
else:
with open(args.output, 'w') as f:
f.write(text)
main(sys.argv)

View File

@ -13,13 +13,13 @@ import json
import importlib.util
import argparse
from Intrinsic_definition_objects import *
from Intrinsic_utils import file_path, Path
from Intrinsic_utils import *
def translate_type_definition(type_description):
if type_description == None:
return
if isinstance(type_description, int):
return TypeDefinition(TypeID.ArgumentReference, index=type_description)
return TypeDefinition(TypeID.Reference, index=type_description)
if isinstance(type_description, list):
internal_types = []
for type_str in type_description:
@ -129,7 +129,99 @@ def translate_attribute_list(attribute):
}
return attribute_map[attribute]
def generate_type_definitions_from_modules(inputs):
def topological_sort(input_list : List[TypeDefinition]) -> List[TypeDefinition]:
provided = set()
def is_provided(type_def : TypeDefinition):
if type_def.ID == TypeID.Any:
return not type_def.default_type or type_def.default_type in provided
elif type_def.ID == TypeID.Vector:
return not type_def.element_type or type_def.element_type in provided
elif type_def.ID == TypeID.Pointer:
return not type_def.pointed_type or type_def.pointed_type in provided
elif type_def.ID == TypeID.Struct:
return len(type_def.member_types) == 0 or all([member_type in provided for member_type in type_def.member_types])
return True
copied_input = set(input_list)
res = []
while copied_input:
found_val = None
for val in input_list:
if val in copied_input and is_provided(val):
found_val = val
break
else:
assert(0)
break
provided.add(found_val)
copied_input.remove(found_val)
res.append(found_val)
return res
def get_unique_types_list(intrinsic_definitions : List[IntrinsicDefinition]):
types = {}
for intrinsic in intrinsic_definitions:
if intrinsic.return_definition.type_definition in types:
intrinsic.return_definition.type_definition = types[intrinsic.return_definition.type_definition]
else:
types[intrinsic.return_definition.type_definition] = intrinsic.return_definition.type_definition
for arg in intrinsic.arguments:
if arg.type_definition in types:
arg.type_definition = types[arg.type_definition]
else:
types[arg.type_definition] = arg.type_definition
res = list(types.values())
def process_type(type_def : TypeDefinition):
if type_def.ID == TypeID.Any:
if not type_def.default_type:
return
if type_def.default_type in types:
if type_def.default_type in types:
type_def.default_type = types[type_def.default_type]
else:
types[type_def.default_type] = type_def.default_type
process_type(type_def.default_type)
elif type_def.ID == TypeID.Vector:
if not type_def.element_type:
return
if type_def.element_type in types:
if type_def.element_type in types:
type_def.element_type = types[type_def.element_type]
else:
types[type_def.element_type] = type_def.element_type
process_type(type_def.element_type)
elif type_def.ID == TypeID.Pointer:
if not type_def.pointed_type:
return
if type_def.pointed_type in types:
if type_def.pointed_type in types:
type_def.pointed_type = types[type_def.pointed_type]
else:
types[type_def.pointed_type] = type_def.pointed_type
process_type(type_def.pointed_type)
elif type_def.ID == TypeID.Struct:
if len(type_def.member_types) == 0:
return
for i in range(len(type_def.member_types)):
if type_def.member_types[i] in types:
if type_def.member_types[i] in types:
type_def.member_types[i] = types[type_def.member_types[i]]
else:
types[type_def.member_types[i]] = type_def.member_types[i]
process_type(type_def.member_types[i])
for type_def in res:
process_type(type_def)
res = list(types.values())
res.sort()
res = topological_sort(res)
return res
def generate_intrinsic_definitions_from_modules(*inputs):
intrinsics = dict()
for el in inputs:
spec = importlib.util.spec_from_file_location(Path(el).stem, el)
@ -142,19 +234,19 @@ def generate_type_definitions_from_modules(inputs):
name = key
comment = value[0]
func_type_def = value[1]
return_type_str = func_type_def[0]
if isinstance(return_type_str, list):
type_strs = [x[0] for x in return_type_str]
comments = '\n'.join(['Member {}: {}'.format(idx, x[1]) for idx, x in enumerate(return_type_str) if x[1] != ''])
return_type = ArgumentTypeDefinition(translate_type_definition(type_strs), comments)
return_definition_str = func_type_def[0]
if isinstance(return_definition_str, list):
type_strs = [x[0] for x in return_definition_str]
comments = '\n'.join(['Member {}: {}'.format(idx, x[1]) for idx, x in enumerate(return_definition_str) if x[1] != ''])
return_definition = ReturnDefinition(translate_type_definition(type_strs), comments)
else:
return_type = ArgumentTypeDefinition(translate_type_definition(return_type_str[0]), return_type_str[1])
return_definition = ReturnDefinition(translate_type_definition(return_definition_str[0]), return_definition_str[1])
argument_type_strs = func_type_def[1]
argument_types = []
for type_str in argument_type_strs:
argument_types.append(ArgumentTypeDefinition(translate_type_definition(type_str[0]), type_str[1]))
arguments = []
for index, type_str in enumerate(argument_type_strs):
arguments.append(ArgumentDefinition("Arg{}".format(index),translate_type_definition(type_str[0]), type_str[1]))
attributes = translate_attribute_list(func_type_def[2])
intrinsic_definitions.append(IntrinsicDefinition(name, comment, return_type, argument_types, attributes))
intrinsic_definitions.append(IntrinsicDefinition(name, comment, return_definition, arguments, attributes))
return intrinsic_definitions
if __name__ == '__main__':
@ -162,22 +254,66 @@ if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Translate from past IGC format to new IGC format.')
parser.add_argument("inputs", nargs='+', help="the source path to the file with intrinsic defintions (past IGC format)",
type=file_path)
parser.add_argument('--format',
default='yaml',
choices=['yaml', 'json'],
help='the data representation format of the output')
parser.add_argument("--output", help="the destination path for the file with intrinsic definitions (current IGC format)",
type=str)
parser.add_argument("-v", "--verbose", help="print intrinsic definitions in the current IGC format to the console",
action="store_true")
parser.add_argument("-l", "--license_header", help="attaches a license header to the output file",
action="store_true")
parser.add_argument("-u", "--update", help="consider the current content of the output file",
action="store_true")
args = parser.parse_args(args)
args = parser.parse_args(args[1:])
intrinsic_definitions = generate_type_definitions_from_modules(args.inputs)
serializable_obj = [el.to_dict() for el in intrinsic_definitions]
ret_json_txt = json.dumps(serializable_obj, indent=2)
intrinsic_definitions = generate_intrinsic_definitions_from_modules(*args.inputs)
if args.update and os.path.isfile(args.output):
with open(args.output) as f:
json_ext = '.json'
file_ext = Path(args.output).suffix
try:
if file_ext == json_ext:
intrinsic_definitions.extend(InternalGrammar.from_dict(json.load(f)).intrinsics)
else:
intrinsic_definitions.extend(yaml.safe_load(f).intrinsics)
except Exception as err:
print("Error on loading data from: {}\n{}".format(args.output, err))
intrinsic_ids = set()
unique_intrinsic_definitions = []
for intrinsic_def in intrinsic_definitions:
if intrinsic_def.name in intrinsic_ids:
print("WARNING: The following intrinsic definition is repeated: {}.".format(intrinsic_def.name))
continue
unique_intrinsic_definitions.append(intrinsic_def)
intrinsic_ids.add(intrinsic_def.name)
intrinsic_definitions = unique_intrinsic_definitions
types = get_unique_types_list(intrinsic_definitions)
internal_grammar = InternalGrammar(types, intrinsic_definitions)
if args.format == 'json':
text = json.dumps(internal_grammar.to_dict(), indent=2)
else:
text = yaml.dump(internal_grammar, default_flow_style = False, allow_unicode = True, encoding = None,
sort_keys = False, indent=4)
if args.verbose:
print(ret_json_txt)
print(text)
if args.output:
with open(args.output, 'w') as f:
f.write(ret_json_txt)
if args.license_header:
template_lookup = TemplateLookup(directories=[r'.'])
template = Template(filename=r'templates/intrinsic_definition.mako',
lookup=template_lookup)
output_file_path = args.output
write_to_file_using_template(output_file_path, template, content=text)
else:
with open(args.output, 'w') as f:
f.write(text)
main(sys.argv)

View File

@ -14,6 +14,8 @@ from Intrinsic_definition_objects import *
from Intrinsic_utils import *
from itertools import takewhile
from Intrinsic_definition_translation import generate_intrinsic_definitions_from_modules
class IntrinsicLookupTableEntry:
def __init__(self, id : str, lookup_name : str, common_prefix_len : int):
self.id = id
@ -49,10 +51,10 @@ class IntrinsicFormatter:
return output
@staticmethod
def get_argument_name(argument_type, index):
def get_argument_name(argument, index):
output = "Arg{}".format(index)
if hasattr(argument_type, 'name'):
output = argument_type.name
if hasattr(argument, 'name'):
output = argument.name
if (index == 0):
output = "{} = 0".format(output)
output = "{},".format(output)
@ -108,8 +110,8 @@ class IntrinsicFormatter:
elif type_def.ID == TypeID.Struct:
output = "StructTypeHolderT<MemberTypeListHolderT<{}>>".format(
", ".join([ "{}".format(IntrinsicFormatter.get_type_definition(member_type)) for member_type in type_def.member_types ]))
elif type_def.ID == TypeID.ArgumentReference:
output = "ArgumentReferenceTypeHolderT<{}>".format(type_def.index)
elif type_def.ID == TypeID.Reference:
output = "ReferenceTypeHolderT<{}>".format(type_def.index)
elif type_def.ID == TypeID.Void:
output = "EmptyTypeHolderT"
elif type_def.ID == TypeID.Any:
@ -158,6 +160,16 @@ def generate_intrinsic_defintion_files(intrinsic_definitions : List[IntrinsicDef
IntrinsicFormatter.use_comments = use_comments
intrinsic_ids = set()
unique_intrinsic_definitions = []
for intrinsic_def in intrinsic_definitions:
if intrinsic_def.name in intrinsic_ids:
print("WARNING: The following intrinsic definition is repeated: {}.".format(intrinsic_def.name))
continue
unique_intrinsic_definitions.append(intrinsic_def)
intrinsic_ids.add(intrinsic_def.name)
intrinsic_definitions = unique_intrinsic_definitions
template_lookup = TemplateLookup(directories=[r'.'])
template = Template(filename=r'templates/GenIntrinsicEnum.h.mako',
lookup=template_lookup)
@ -198,13 +210,26 @@ if __name__ == '__main__':
type=dir_path)
parser.add_argument("--use_comments", action='store_true')
args = parser.parse_args(args)
raw_data = []
args = parser.parse_args(args[1:])
intrinsic_definitions = []
for el in args.inputs:
with open(el) as f:
raw_data.extend(json.load(f))
intrinsic_definitions = [ IntrinsicDefinition.from_dict(el) for el in raw_data ]
json_ext = '.json'
py_ext = '.py'
file_ext = Path(el).suffix
if file_ext == json_ext:
with open(el) as f:
try:
intrinsic_definitions.extend(InternalGrammar.from_dict(json.load(f)).intrinsics)
except Exception as err:
print("Error on loading data from: {}\n{}".format(el, err))
elif file_ext == py_ext:
intrinsic_definitions.extend(generate_intrinsic_definitions_from_modules(el))
else:
with open(el) as f:
try:
intrinsic_definitions.extend(yaml.safe_load(f).intrinsics)
except Exception as err:
print("Error on loading data from: {}\n{}".format(el, err))
if len(intrinsic_definitions) > 0:
generate_intrinsic_defintion_files(intrinsic_definitions, args.output, args.use_comments)

View File

@ -1,29 +0,0 @@
# ========================== begin_copyright_notice ============================
#
# Copyright (C) 2023 Intel Corporation
#
# SPDX-License-Identifier: MIT
#
# =========================== end_copyright_notice =============================
import sys
import argparse
from Intrinsic_utils import file_path, dir_path
from Intrinsic_definition_translation import generate_type_definitions_from_modules
from Intrinsic_generator import generate_intrinsic_defintion_files
if __name__ == '__main__':
def main(args):
parser = argparse.ArgumentParser(description='Generates IGC intrinsic files.')
parser.add_argument("inputs", nargs='+', help="the source path to the file with intrinsic defintions (current IGC format)",
type=file_path)
parser.add_argument("--use_comments", action='store_true')
parser.add_argument("--output", help="the directory for the files with intrinsic definitions",
type=dir_path)
args = parser.parse_args(args[1:])
intrinsic_definitions = generate_type_definitions_from_modules(args.inputs)
generate_intrinsic_defintion_files(intrinsic_definitions, args.output, args.use_comments)
main(sys.argv)

View File

@ -20,11 +20,11 @@ const char* IntrinsicDefinition<llvm::GenISAIntrinsic::ID::${el.name}>::scFuncti
const char* IntrinsicDefinition<llvm::GenISAIntrinsic::ID::${el.name}>::scMainComment =
${IntrinsicFormatter.get_comment(el.comment)};
const char* IntrinsicDefinition<llvm::GenISAIntrinsic::ID::${el.name}>::scResultComment =
${IntrinsicFormatter.get_comment(el.return_type.comment)};
% if hasattr(el, 'argument_types') and el.argument_types and len(el.argument_types) > 0:
${IntrinsicFormatter.get_comment(el.return_definition.comment)};
% if hasattr(el, 'arguments') and el.arguments and len(el.arguments) > 0:
const std::array<const char*, static_cast<uint32_t>(IntrinsicDefinition<llvm::GenISAIntrinsic::ID::${el.name}>::Argument::Count)>
IntrinsicDefinition<llvm::GenISAIntrinsic::ID::${el.name}>::scArgumentComments {
% for arg in el.argument_types:
% for arg in el.arguments:
${IntrinsicFormatter.get_argument_comment(arg.comment, loop.last)}
% endfor
};

View File

@ -41,19 +41,19 @@ public:
static const char* scFunctionRootName;
static constexpr TypeDescription scResTypes{
${IntrinsicFormatter.get_type_definition(el.return_type.type_definition)}::scType
${IntrinsicFormatter.get_type_definition(el.return_definition.type_definition)}::scType
};
% if hasattr(el, 'argument_types') and el.argument_types and len(el.argument_types) > 0:
% if hasattr(el, 'arguments') and el.arguments and len(el.arguments) > 0:
static constexpr std::array<TypeDescription, static_cast<uint32_t>(Argument::Count)> scArgumentTypes{
% for arg in el.argument_types:
% for arg in el.arguments:
${IntrinsicFormatter.get_argument_type_entry(arg.type_definition, loop.last)}
% endfor
};
% endif
static const char* scMainComment;
static const char* scResultComment;
% if hasattr(el, 'argument_types') and el.argument_types and len(el.argument_types) > 0:
% if hasattr(el, 'arguments') and el.arguments and len(el.arguments) > 0:
static const std::array<const char*, static_cast<uint32_t>(Argument::Count)> scArgumentComments;
% endif

View File

@ -27,8 +27,8 @@ public:
enum class Argument : uint32_t
{
% if len(el.argument_types) > 0:
% for arg in el.argument_types:
% if len(el.arguments) > 0:
% for arg in el.arguments:
${IntrinsicFormatter.get_argument_name(arg, loop.index)}
% endfor
Count

View File

@ -0,0 +1,9 @@
# ========================== begin_copyright_notice ============================
#
# Copyright (C) 2024 Intel Corporation
#
# SPDX-License-Identifier: MIT
#
# =========================== end_copyright_notice =============================
${content}