diff options
| author | gingerBill <gingerBill@users.noreply.github.com> | 2023-06-23 14:33:01 +0100 |
|---|---|---|
| committer | GitHub <noreply@github.com> | 2023-06-23 14:33:01 +0100 |
| commit | 26a5614572afd39fc35fc32b47d5f7e5e9771e56 (patch) | |
| tree | ed94ed8589d3d0e32a5a8e494319bfcf292d27cd /src/types.cpp | |
| parent | f36e19e86fe88198fc1d17426afea577920efbf8 (diff) | |
| parent | 19ea0906332e6185cd0eefe873179b9058ccd725 (diff) | |
Merge branch 'master' into skytrias-vendor-additions
Diffstat (limited to 'src/types.cpp')
| -rw-r--r-- | src/types.cpp | 1052 |
1 files changed, 506 insertions, 546 deletions
diff --git a/src/types.cpp b/src/types.cpp index 28628fd97..385ca926d 100644 --- a/src/types.cpp +++ b/src/types.cpp @@ -83,7 +83,7 @@ enum BasicKind { Basic_UntypedString, Basic_UntypedRune, Basic_UntypedNil, - Basic_UntypedUndef, + Basic_UntypedUninit, Basic_COUNT, @@ -149,6 +149,7 @@ struct TypeStruct { bool are_offsets_being_processed : 1; bool is_packed : 1; bool is_raw_union : 1; + bool is_no_copy : 1; bool is_poly_specialized : 1; }; @@ -287,7 +288,7 @@ enum TypeKind { Type_Count, }; -String const type_strings[] = { +gb_global String const type_strings[] = { {cast(u8 *)"Invalid", gb_size_of("Invalid")}, #define TYPE_KIND(k, ...) {cast(u8 *)#k, gb_size_of(#k)-1}, TYPE_KINDS @@ -368,10 +369,10 @@ enum : int { }; -bool is_type_comparable(Type *t); -bool is_type_simple_compare(Type *t); +gb_internal bool is_type_comparable(Type *t); +gb_internal bool is_type_simple_compare(Type *t); -u32 type_info_flags_of_type(Type *type) { +gb_internal u32 type_info_flags_of_type(Type *type) { if (type == nullptr) { return 0; } @@ -396,14 +397,14 @@ struct Selection { u8 swizzle_indices; // 2 bits per component, representing which swizzle index bool pseudo_field; }; -Selection empty_selection = {0}; +gb_global Selection const empty_selection = {0}; -Selection make_selection(Entity *entity, Array<i32> index, bool indirect) { +gb_internal Selection make_selection(Entity *entity, Array<i32> index, bool indirect) { Selection s = {entity, index, indirect}; return s; } -void selection_add_index(Selection *s, isize index) { +gb_internal void selection_add_index(Selection *s, isize index) { // IMPORTANT NOTE(bill): this requires a stretchy buffer/dynamic array so it requires some form // of heap allocation // TODO(bill): Find a way to use a backing buffer for initial use as the general case is probably .count<3 @@ -413,7 +414,7 @@ void selection_add_index(Selection *s, isize index) { array_add(&s->index, cast(i32)index); } -Selection selection_combine(Selection const &lhs, Selection const &rhs) { +gb_internal Selection selection_combine(Selection const &lhs, Selection const &rhs) { Selection new_sel = lhs; new_sel.indirect = lhs.indirect || rhs.indirect; new_sel.index = array_make<i32>(heap_allocator(), lhs.index.count+rhs.index.count); @@ -422,7 +423,7 @@ Selection selection_combine(Selection const &lhs, Selection const &rhs) { return new_sel; } -Selection sub_selection(Selection const &sel, isize offset) { +gb_internal Selection sub_selection(Selection const &sel, isize offset) { Selection res = {}; res.index.data = sel.index.data + offset; res.index.count = gb_max(sel.index.count - offset, 0); @@ -430,16 +431,6 @@ Selection sub_selection(Selection const &sel, isize offset) { return res; } -Selection sub_selection_with_length(Selection const &sel, isize offset, isize len) { - Selection res = {}; - res.index.data = sel.index.data + offset; - res.index.count = gb_max(len, gb_max(sel.index.count - offset, 0)); - res.index.capacity = res.index.count; - return res; -} - - - gb_global Type basic_types[] = { {Type_Basic, {Basic_Invalid, 0, 0, STR_LIT("invalid type")}}, @@ -524,7 +515,7 @@ gb_global Type basic_types[] = { {Type_Basic, {Basic_UntypedString, BasicFlag_String | BasicFlag_Untyped, 0, STR_LIT("untyped string")}}, {Type_Basic, {Basic_UntypedRune, BasicFlag_Integer | BasicFlag_Untyped, 0, STR_LIT("untyped rune")}}, {Type_Basic, {Basic_UntypedNil, BasicFlag_Untyped, 0, STR_LIT("untyped nil")}}, - {Type_Basic, {Basic_UntypedUndef, BasicFlag_Untyped, 0, STR_LIT("untyped undefined")}}, + {Type_Basic, {Basic_UntypedUninit, BasicFlag_Untyped, 0, STR_LIT("untyped uninitialized")}}, }; // gb_global Type basic_type_aliases[] = { @@ -598,7 +589,7 @@ gb_global Type *t_untyped_quaternion = &basic_types[Basic_UntypedQuaternion]; gb_global Type *t_untyped_string = &basic_types[Basic_UntypedString]; gb_global Type *t_untyped_rune = &basic_types[Basic_UntypedRune]; gb_global Type *t_untyped_nil = &basic_types[Basic_UntypedNil]; -gb_global Type *t_untyped_undef = &basic_types[Basic_UntypedUndef]; +gb_global Type *t_untyped_uninit = &basic_types[Basic_UntypedUninit]; @@ -633,7 +624,7 @@ gb_global Type *t_type_info_array = nullptr; gb_global Type *t_type_info_enumerated_array = nullptr; gb_global Type *t_type_info_dynamic_array = nullptr; gb_global Type *t_type_info_slice = nullptr; -gb_global Type *t_type_info_tuple = nullptr; +gb_global Type *t_type_info_parameters = nullptr; gb_global Type *t_type_info_struct = nullptr; gb_global Type *t_type_info_union = nullptr; gb_global Type *t_type_info_enum = nullptr; @@ -662,7 +653,7 @@ gb_global Type *t_type_info_array_ptr = nullptr; gb_global Type *t_type_info_enumerated_array_ptr = nullptr; gb_global Type *t_type_info_dynamic_array_ptr = nullptr; gb_global Type *t_type_info_slice_ptr = nullptr; -gb_global Type *t_type_info_tuple_ptr = nullptr; +gb_global Type *t_type_info_parameters_ptr = nullptr; gb_global Type *t_type_info_struct_ptr = nullptr; gb_global Type *t_type_info_union_ptr = nullptr; gb_global Type *t_type_info_enum_ptr = nullptr; @@ -732,45 +723,48 @@ gb_global RecursiveMutex g_type_mutex; struct TypePath; -i64 type_size_of (Type *t); -i64 type_align_of (Type *t); -i64 type_offset_of (Type *t, i32 index); -gbString type_to_string (Type *type, bool shorthand=true); -gbString type_to_string (Type *type, gbAllocator allocator, bool shorthand=true); -i64 type_size_of_internal(Type *t, TypePath *path); -void init_map_internal_types(Type *type); -Type * bit_set_to_int(Type *t); -bool are_types_identical(Type *x, Type *y); +gb_internal i64 type_size_of (Type *t); +gb_internal i64 type_align_of (Type *t); +gb_internal i64 type_offset_of (Type *t, i32 index); +gb_internal gbString type_to_string (Type *type, bool shorthand=true); +gb_internal gbString type_to_string (Type *type, gbAllocator allocator, bool shorthand=true); +gb_internal i64 type_size_of_internal(Type *t, TypePath *path); +gb_internal void init_map_internal_types(Type *type); +gb_internal Type * bit_set_to_int(Type *t); +gb_internal bool are_types_identical(Type *x, Type *y); -bool is_type_pointer(Type *t); -bool is_type_soa_pointer(Type *t); -bool is_type_proc(Type *t); -bool is_type_slice(Type *t); -bool is_type_integer(Type *t); -bool type_set_offsets(Type *t); -Type *base_type(Type *t); +gb_internal bool is_type_pointer(Type *t); +gb_internal bool is_type_soa_pointer(Type *t); +gb_internal bool is_type_proc(Type *t); +gb_internal bool is_type_slice(Type *t); +gb_internal bool is_type_integer(Type *t); +gb_internal bool type_set_offsets(Type *t); +gb_internal Type *base_type(Type *t); -i64 type_size_of_internal(Type *t, TypePath *path); -i64 type_align_of_internal(Type *t, TypePath *path); +gb_internal i64 type_size_of_internal(Type *t, TypePath *path); +gb_internal i64 type_align_of_internal(Type *t, TypePath *path); // IMPORTANT TODO(bill): SHould this TypePath code be removed since type cycle checking is handled much earlier on? struct TypePath { + RecursiveMutex mutex; Array<Entity *> path; // Entity_TypeName; bool failure; }; -void type_path_init(TypePath *tp) { +gb_internal void type_path_init(TypePath *tp) { tp->path.allocator = heap_allocator(); } -void type_path_free(TypePath *tp) { +gb_internal void type_path_free(TypePath *tp) { + mutex_lock(&tp->mutex); array_free(&tp->path); + mutex_unlock(&tp->mutex); } -void type_path_print_illegal_cycle(TypePath *tp, isize start_index) { +gb_internal void type_path_print_illegal_cycle(TypePath *tp, isize start_index) { GB_ASSERT(tp != nullptr); GB_ASSERT(start_index < tp->path.count); @@ -789,13 +783,15 @@ void type_path_print_illegal_cycle(TypePath *tp, isize start_index) { base_type(e->type)->failure = true; } -bool type_path_push(TypePath *tp, Type *t) { +gb_internal bool type_path_push(TypePath *tp, Type *t) { GB_ASSERT(tp != nullptr); if (t->kind != Type_Named) { return false; } Entity *e = t->Named.type_name; + mutex_lock(&tp->mutex); + for (isize i = 0; i < tp->path.count; i++) { Entity *p = tp->path[i]; if (p == e) { @@ -804,12 +800,19 @@ bool type_path_push(TypePath *tp, Type *t) { } array_add(&tp->path, e); + + mutex_unlock(&tp->mutex); + return true; } -void type_path_pop(TypePath *tp) { - if (tp != nullptr && tp->path.count > 0) { - array_pop(&tp->path); +gb_internal void type_path_pop(TypePath *tp) { + if (tp != nullptr) { + mutex_lock(&tp->mutex); + if (tp->path.count > 0) { + array_pop(&tp->path); + } + mutex_unlock(&tp->mutex); } } @@ -817,19 +820,22 @@ void type_path_pop(TypePath *tp) { #define FAILURE_SIZE 0 #define FAILURE_ALIGNMENT 0 -void init_type_mutex(void) { - mutex_init(&g_type_mutex); +gb_internal bool type_ptr_set_update(PtrSet<Type *> *s, Type *t) { + if (ptr_set_exists(s, t)) { + return true; + } + ptr_set_add(s, t); + return false; } -bool type_ptr_set_exists(PtrSet<Type *> *s, Type *t) { +gb_internal bool type_ptr_set_exists(PtrSet<Type *> *s, Type *t) { if (ptr_set_exists(s, t)) { return true; } // TODO(bill, 2019-10-05): This is very slow and it's probably a lot // faster to cache types correctly - for_array(i, s->entries) { - Type *f = s->entries[i].ptr; + for (Type *f : *s) { if (are_types_identical(t, f)) { ptr_set_add(s, t); return true; @@ -839,7 +845,7 @@ bool type_ptr_set_exists(PtrSet<Type *> *s, Type *t) { return false; } -Type *base_type(Type *t) { +gb_internal Type *base_type(Type *t) { for (;;) { if (t == nullptr) { break; @@ -855,7 +861,7 @@ Type *base_type(Type *t) { return t; } -Type *base_enum_type(Type *t) { +gb_internal Type *base_enum_type(Type *t) { Type *bt = base_type(t); if (bt != nullptr && bt->kind == Type_Enum) { @@ -864,7 +870,7 @@ Type *base_enum_type(Type *t) { return t; } -Type *core_type(Type *t) { +gb_internal Type *core_type(Type *t) { for (;;) { if (t == nullptr) { break; @@ -886,14 +892,14 @@ Type *core_type(Type *t) { return t; } -void set_base_type(Type *t, Type *base) { +gb_internal void set_base_type(Type *t, Type *base) { if (t && t->kind == Type_Named) { t->Named.base = base; } } -Type *alloc_type(TypeKind kind) { +gb_internal Type *alloc_type(TypeKind kind) { // gbAllocator a = heap_allocator(); gbAllocator a = permanent_allocator(); Type *t = gb_alloc_item(a, Type); @@ -905,7 +911,7 @@ Type *alloc_type(TypeKind kind) { } -Type *alloc_type_generic(Scope *scope, i64 id, String name, Type *specialized) { +gb_internal Type *alloc_type_generic(Scope *scope, i64 id, String name, Type *specialized) { Type *t = alloc_type(Type_Generic); t->Generic.id = id; t->Generic.name = name; @@ -914,26 +920,26 @@ Type *alloc_type_generic(Scope *scope, i64 id, String name, Type *specialized) { return t; } -Type *alloc_type_pointer(Type *elem) { +gb_internal Type *alloc_type_pointer(Type *elem) { Type *t = alloc_type(Type_Pointer); t->Pointer.elem = elem; return t; } -Type *alloc_type_multi_pointer(Type *elem) { +gb_internal Type *alloc_type_multi_pointer(Type *elem) { Type *t = alloc_type(Type_MultiPointer); t->MultiPointer.elem = elem; return t; } -Type *alloc_type_soa_pointer(Type *elem) { +gb_internal Type *alloc_type_soa_pointer(Type *elem) { Type *t = alloc_type(Type_SoaPointer); t->SoaPointer.elem = elem; return t; } -Type *alloc_type_array(Type *elem, i64 count, Type *generic_count = nullptr) { +gb_internal Type *alloc_type_array(Type *elem, i64 count, Type *generic_count = nullptr) { if (generic_count != nullptr) { Type *t = alloc_type(Type_Array); t->Array.elem = elem; @@ -947,7 +953,7 @@ Type *alloc_type_array(Type *elem, i64 count, Type *generic_count = nullptr) { return t; } -Type *alloc_type_matrix(Type *elem, i64 row_count, i64 column_count, Type *generic_row_count = nullptr, Type *generic_column_count = nullptr) { +gb_internal Type *alloc_type_matrix(Type *elem, i64 row_count, i64 column_count, Type *generic_row_count = nullptr, Type *generic_column_count = nullptr) { if (generic_row_count != nullptr || generic_column_count != nullptr) { Type *t = alloc_type(Type_Matrix); t->Matrix.elem = elem; @@ -965,7 +971,7 @@ Type *alloc_type_matrix(Type *elem, i64 row_count, i64 column_count, Type *gener } -Type *alloc_type_enumerated_array(Type *elem, Type *index, ExactValue const *min_value, ExactValue const *max_value, TokenKind op) { +gb_internal Type *alloc_type_enumerated_array(Type *elem, Type *index, ExactValue const *min_value, ExactValue const *max_value, TokenKind op) { Type *t = alloc_type(Type_EnumeratedArray); t->EnumeratedArray.elem = elem; t->EnumeratedArray.index = index; @@ -980,37 +986,37 @@ Type *alloc_type_enumerated_array(Type *elem, Type *index, ExactValue const *min } -Type *alloc_type_slice(Type *elem) { +gb_internal Type *alloc_type_slice(Type *elem) { Type *t = alloc_type(Type_Slice); t->Array.elem = elem; return t; } -Type *alloc_type_dynamic_array(Type *elem) { +gb_internal Type *alloc_type_dynamic_array(Type *elem) { Type *t = alloc_type(Type_DynamicArray); t->DynamicArray.elem = elem; return t; } -Type *alloc_type_struct() { +gb_internal Type *alloc_type_struct() { Type *t = alloc_type(Type_Struct); return t; } -Type *alloc_type_union() { +gb_internal Type *alloc_type_union() { Type *t = alloc_type(Type_Union); return t; } -Type *alloc_type_enum() { +gb_internal Type *alloc_type_enum() { Type *t = alloc_type(Type_Enum); t->Enum.min_value = gb_alloc_item(permanent_allocator(), ExactValue); t->Enum.max_value = gb_alloc_item(permanent_allocator(), ExactValue); return t; } -Type *alloc_type_relative_pointer(Type *pointer_type, Type *base_integer) { +gb_internal Type *alloc_type_relative_pointer(Type *pointer_type, Type *base_integer) { GB_ASSERT(is_type_pointer(pointer_type)); GB_ASSERT(is_type_integer(base_integer)); Type *t = alloc_type(Type_RelativePointer); @@ -1019,7 +1025,7 @@ Type *alloc_type_relative_pointer(Type *pointer_type, Type *base_integer) { return t; } -Type *alloc_type_relative_slice(Type *slice_type, Type *base_integer) { +gb_internal Type *alloc_type_relative_slice(Type *slice_type, Type *base_integer) { GB_ASSERT(is_type_slice(slice_type)); GB_ASSERT(is_type_integer(base_integer)); Type *t = alloc_type(Type_RelativeSlice); @@ -1028,7 +1034,7 @@ Type *alloc_type_relative_slice(Type *slice_type, Type *base_integer) { return t; } -Type *alloc_type_named(String name, Type *base, Entity *type_name) { +gb_internal Type *alloc_type_named(String name, Type *base, Entity *type_name) { Type *t = alloc_type(Type_Named); t->Named.name = name; t->Named.base = base; @@ -1039,7 +1045,7 @@ Type *alloc_type_named(String name, Type *base, Entity *type_name) { return t; } -bool is_calling_convention_none(ProcCallingConvention calling_convention) { +gb_internal bool is_calling_convention_none(ProcCallingConvention calling_convention) { switch (calling_convention) { case ProcCC_None: case ProcCC_InlineAsm: @@ -1048,7 +1054,7 @@ bool is_calling_convention_none(ProcCallingConvention calling_convention) { return false; } -bool is_calling_convention_odin(ProcCallingConvention calling_convention) { +gb_internal bool is_calling_convention_odin(ProcCallingConvention calling_convention) { switch (calling_convention) { case ProcCC_Odin: case ProcCC_Contextless: @@ -1057,12 +1063,12 @@ bool is_calling_convention_odin(ProcCallingConvention calling_convention) { return false; } -Type *alloc_type_tuple() { +gb_internal Type *alloc_type_tuple() { Type *t = alloc_type(Type_Tuple); return t; } -Type *alloc_type_proc(Scope *scope, Type *params, isize param_count, Type *results, isize result_count, bool variadic, ProcCallingConvention calling_convention) { +gb_internal Type *alloc_type_proc(Scope *scope, Type *params, isize param_count, Type *results, isize result_count, bool variadic, ProcCallingConvention calling_convention) { Type *t = alloc_type(Type_Proc); if (variadic) { @@ -1087,26 +1093,17 @@ Type *alloc_type_proc(Scope *scope, Type *params, isize param_count, Type *resul return t; } -bool is_type_valid_for_keys(Type *t); +gb_internal bool is_type_valid_for_keys(Type *t); -Type *alloc_type_map(i64 count, Type *key, Type *value) { - if (key != nullptr) { - GB_ASSERT(value != nullptr); - } - Type *t = alloc_type(Type_Map); - t->Map.key = key; - t->Map.value = value; - return t; -} -Type *alloc_type_bit_set() { +gb_internal Type *alloc_type_bit_set() { Type *t = alloc_type(Type_BitSet); return t; } -Type *alloc_type_simd_vector(i64 count, Type *elem, Type *generic_count=nullptr) { +gb_internal Type *alloc_type_simd_vector(i64 count, Type *elem, Type *generic_count=nullptr) { Type *t = alloc_type(Type_SimdVector); t->SimdVector.count = count; t->SimdVector.elem = elem; @@ -1119,7 +1116,7 @@ Type *alloc_type_simd_vector(i64 count, Type *elem, Type *generic_count=nullptr) //////////////////////////////////////////////////////////////// -Type *type_deref(Type *t, bool allow_multi_pointer=false) { +gb_internal Type *type_deref(Type *t, bool allow_multi_pointer=false) { if (t != nullptr) { Type *bt = base_type(t); if (bt == nullptr) { @@ -1146,27 +1143,14 @@ Type *type_deref(Type *t, bool allow_multi_pointer=false) { return t; } -bool is_type_named(Type *t) { +gb_internal bool is_type_named(Type *t) { if (t->kind == Type_Basic) { return true; } return t->kind == Type_Named; } -bool is_type_named_alias(Type *t) { - if (!is_type_named(t)) { - return false; - } - Entity *e = t->Named.type_name; - if (e == nullptr) { - return false; - } - if (e->kind != Entity_TypeName) { - return false; - } - return e->TypeName.is_type_alias; -} -bool is_type_boolean(Type *t) { +gb_internal bool is_type_boolean(Type *t) { // t = core_type(t); t = base_type(t); if (t->kind == Type_Basic) { @@ -1174,7 +1158,7 @@ bool is_type_boolean(Type *t) { } return false; } -bool is_type_integer(Type *t) { +gb_internal bool is_type_integer(Type *t) { // t = core_type(t); t = base_type(t); if (t->kind == Type_Basic) { @@ -1182,7 +1166,7 @@ bool is_type_integer(Type *t) { } return false; } -bool is_type_integer_like(Type *t) { +gb_internal bool is_type_integer_like(Type *t) { t = core_type(t); if (t->kind == Type_Basic) { return (t->Basic.flags & (BasicFlag_Integer|BasicFlag_Boolean)) != 0; @@ -1196,7 +1180,7 @@ bool is_type_integer_like(Type *t) { return false; } -bool is_type_unsigned(Type *t) { +gb_internal bool is_type_unsigned(Type *t) { t = base_type(t); // t = core_type(t); if (t->kind == Type_Basic) { @@ -1204,7 +1188,7 @@ bool is_type_unsigned(Type *t) { } return false; } -bool is_type_integer_128bit(Type *t) { +gb_internal bool is_type_integer_128bit(Type *t) { // t = core_type(t); t = base_type(t); if (t->kind == Type_Basic) { @@ -1212,7 +1196,7 @@ bool is_type_integer_128bit(Type *t) { } return false; } -bool is_type_rune(Type *t) { +gb_internal bool is_type_rune(Type *t) { // t = core_type(t); t = base_type(t); if (t->kind == Type_Basic) { @@ -1220,7 +1204,7 @@ bool is_type_rune(Type *t) { } return false; } -bool is_type_numeric(Type *t) { +gb_internal bool is_type_numeric(Type *t) { // t = core_type(t); t = base_type(t); if (t->kind == Type_Basic) { @@ -1234,21 +1218,21 @@ bool is_type_numeric(Type *t) { } return false; } -bool is_type_string(Type *t) { +gb_internal bool is_type_string(Type *t) { t = base_type(t); if (t->kind == Type_Basic) { return (t->Basic.flags & BasicFlag_String) != 0; } return false; } -bool is_type_cstring(Type *t) { +gb_internal bool is_type_cstring(Type *t) { t = base_type(t); if (t->kind == Type_Basic) { return t->Basic.kind == Basic_cstring; } return false; } -bool is_type_typed(Type *t) { +gb_internal bool is_type_typed(Type *t) { t = base_type(t); if (t == nullptr) { return false; @@ -1258,7 +1242,7 @@ bool is_type_typed(Type *t) { } return true; } -bool is_type_untyped(Type *t) { +gb_internal bool is_type_untyped(Type *t) { t = base_type(t); if (t == nullptr) { return false; @@ -1268,7 +1252,7 @@ bool is_type_untyped(Type *t) { } return false; } -bool is_type_ordered(Type *t) { +gb_internal bool is_type_ordered(Type *t) { t = core_type(t); switch (t->kind) { case Type_Basic: @@ -1280,7 +1264,7 @@ bool is_type_ordered(Type *t) { } return false; } -bool is_type_ordered_numeric(Type *t) { +gb_internal bool is_type_ordered_numeric(Type *t) { t = core_type(t); switch (t->kind) { case Type_Basic: @@ -1288,7 +1272,7 @@ bool is_type_ordered_numeric(Type *t) { } return false; } -bool is_type_constant_type(Type *t) { +gb_internal bool is_type_constant_type(Type *t) { t = core_type(t); if (t->kind == Type_Basic) { return (t->Basic.flags & BasicFlag_ConstantType) != 0; @@ -1301,110 +1285,89 @@ bool is_type_constant_type(Type *t) { } return false; } -bool is_type_float(Type *t) { +gb_internal bool is_type_float(Type *t) { t = core_type(t); if (t->kind == Type_Basic) { return (t->Basic.flags & BasicFlag_Float) != 0; } return false; } -bool is_type_complex(Type *t) { +gb_internal bool is_type_complex(Type *t) { t = core_type(t); if (t->kind == Type_Basic) { return (t->Basic.flags & BasicFlag_Complex) != 0; } return false; } -bool is_type_quaternion(Type *t) { +gb_internal bool is_type_quaternion(Type *t) { t = core_type(t); if (t->kind == Type_Basic) { return (t->Basic.flags & BasicFlag_Quaternion) != 0; } return false; } -bool is_type_complex_or_quaternion(Type *t) { +gb_internal bool is_type_complex_or_quaternion(Type *t) { t = core_type(t); if (t->kind == Type_Basic) { return (t->Basic.flags & (BasicFlag_Complex|BasicFlag_Quaternion)) != 0; } return false; } -bool is_type_f16(Type *t) { - t = core_type(t); - if (t->kind == Type_Basic) { - return t->Basic.kind == Basic_f16; - } - return false; -} -bool is_type_f32(Type *t) { - t = core_type(t); - if (t->kind == Type_Basic) { - return t->Basic.kind == Basic_f32; - } - return false; -} -bool is_type_f64(Type *t) { - t = core_type(t); - if (t->kind == Type_Basic) { - return t->Basic.kind == Basic_f64; - } - return false; -} -bool is_type_pointer(Type *t) { +gb_internal bool is_type_pointer(Type *t) { t = base_type(t); if (t->kind == Type_Basic) { return (t->Basic.flags & BasicFlag_Pointer) != 0; } return t->kind == Type_Pointer; } -bool is_type_soa_pointer(Type *t) { +gb_internal bool is_type_soa_pointer(Type *t) { t = base_type(t); return t->kind == Type_SoaPointer; } -bool is_type_multi_pointer(Type *t) { +gb_internal bool is_type_multi_pointer(Type *t) { t = base_type(t); return t->kind == Type_MultiPointer; } -bool is_type_internally_pointer_like(Type *t) { +gb_internal bool is_type_internally_pointer_like(Type *t) { return is_type_pointer(t) || is_type_multi_pointer(t) || is_type_cstring(t) || is_type_proc(t); } -bool is_type_tuple(Type *t) { +gb_internal bool is_type_tuple(Type *t) { t = base_type(t); return t->kind == Type_Tuple; } -bool is_type_uintptr(Type *t) { +gb_internal bool is_type_uintptr(Type *t) { if (t->kind == Type_Basic) { return (t->Basic.kind == Basic_uintptr); } return false; } -bool is_type_rawptr(Type *t) { +gb_internal bool is_type_rawptr(Type *t) { if (t->kind == Type_Basic) { return t->Basic.kind == Basic_rawptr; } return false; } -bool is_type_u8(Type *t) { +gb_internal bool is_type_u8(Type *t) { if (t->kind == Type_Basic) { return t->Basic.kind == Basic_u8; } return false; } -bool is_type_array(Type *t) { +gb_internal bool is_type_array(Type *t) { t = base_type(t); return t->kind == Type_Array; } -bool is_type_enumerated_array(Type *t) { +gb_internal bool is_type_enumerated_array(Type *t) { t = base_type(t); return t->kind == Type_EnumeratedArray; } -bool is_type_matrix(Type *t) { +gb_internal bool is_type_matrix(Type *t) { t = base_type(t); return t->kind == Type_Matrix; } -i64 matrix_align_of(Type *t, struct TypePath *tp) { +gb_internal i64 matrix_align_of(Type *t, struct TypePath *tp) { t = base_type(t); GB_ASSERT(t->kind == Type_Matrix); @@ -1440,7 +1403,7 @@ i64 matrix_align_of(Type *t, struct TypePath *tp) { } -i64 matrix_type_stride_in_bytes(Type *t, struct TypePath *tp) { +gb_internal i64 matrix_type_stride_in_bytes(Type *t, struct TypePath *tp) { t = base_type(t); GB_ASSERT(t->kind == Type_Matrix); if (t->Matrix.stride_in_bytes != 0) { @@ -1469,7 +1432,7 @@ i64 matrix_type_stride_in_bytes(Type *t, struct TypePath *tp) { return stride_in_bytes; } -i64 matrix_type_stride_in_elems(Type *t) { +gb_internal i64 matrix_type_stride_in_elems(Type *t) { t = base_type(t); GB_ASSERT(t->kind == Type_Matrix); i64 stride = matrix_type_stride_in_bytes(t, nullptr); @@ -1477,7 +1440,7 @@ i64 matrix_type_stride_in_elems(Type *t) { } -i64 matrix_type_total_internal_elems(Type *t) { +gb_internal i64 matrix_type_total_internal_elems(Type *t) { t = base_type(t); GB_ASSERT(t->kind == Type_Matrix); i64 size = type_size_of(t); @@ -1485,7 +1448,7 @@ i64 matrix_type_total_internal_elems(Type *t) { return size/gb_max(elem_size, 1); } -i64 matrix_indices_to_offset(Type *t, i64 row_index, i64 column_index) { +gb_internal i64 matrix_indices_to_offset(Type *t, i64 row_index, i64 column_index) { t = base_type(t); GB_ASSERT(t->kind == Type_Matrix); GB_ASSERT(0 <= row_index && row_index < t->Matrix.row_count); @@ -1495,7 +1458,7 @@ i64 matrix_indices_to_offset(Type *t, i64 row_index, i64 column_index) { return row_index + stride_elems*column_index; } -i64 matrix_row_major_index_to_offset(Type *t, i64 index) { +gb_internal i64 matrix_row_major_index_to_offset(Type *t, i64 index) { t = base_type(t); GB_ASSERT(t->kind == Type_Matrix); @@ -1503,7 +1466,7 @@ i64 matrix_row_major_index_to_offset(Type *t, i64 index) { i64 column_index = index%t->Matrix.column_count; return matrix_indices_to_offset(t, row_index, column_index); } -i64 matrix_column_major_index_to_offset(Type *t, i64 index) { +gb_internal i64 matrix_column_major_index_to_offset(Type *t, i64 index) { t = base_type(t); GB_ASSERT(t->kind == Type_Matrix); @@ -1513,13 +1476,13 @@ i64 matrix_column_major_index_to_offset(Type *t, i64 index) { } -bool is_matrix_square(Type *t) { +gb_internal bool is_matrix_square(Type *t) { t = base_type(t); GB_ASSERT(t->kind == Type_Matrix); return t->Matrix.row_count == t->Matrix.column_count; } -bool is_type_valid_for_matrix_elems(Type *t) { +gb_internal bool is_type_valid_for_matrix_elems(Type *t) { t = base_type(t); if (is_type_integer(t)) { return true; @@ -1534,32 +1497,28 @@ bool is_type_valid_for_matrix_elems(Type *t) { return false; } -bool is_type_dynamic_array(Type *t) { +gb_internal bool is_type_dynamic_array(Type *t) { t = base_type(t); return t->kind == Type_DynamicArray; } -bool is_type_slice(Type *t) { +gb_internal bool is_type_slice(Type *t) { t = base_type(t); return t->kind == Type_Slice; } -bool is_type_proc(Type *t) { +gb_internal bool is_type_proc(Type *t) { t = base_type(t); return t->kind == Type_Proc; } -bool is_type_asm_proc(Type *t) { +gb_internal bool is_type_asm_proc(Type *t) { t = base_type(t); return t->kind == Type_Proc && t->Proc.calling_convention == ProcCC_InlineAsm; } -bool is_type_poly_proc(Type *t) { - t = base_type(t); - return t->kind == Type_Proc && t->Proc.is_polymorphic; -} -bool is_type_simd_vector(Type *t) { +gb_internal bool is_type_simd_vector(Type *t) { t = base_type(t); return t->kind == Type_SimdVector; } -Type *base_array_type(Type *t) { +gb_internal Type *base_array_type(Type *t) { Type *bt = base_type(t); if (is_type_array(bt)) { return bt->Array.elem; @@ -1573,49 +1532,49 @@ Type *base_array_type(Type *t) { return t; } -bool is_type_generic(Type *t) { +gb_internal bool is_type_generic(Type *t) { t = base_type(t); return t->kind == Type_Generic; } -bool is_type_relative_pointer(Type *t) { +gb_internal bool is_type_relative_pointer(Type *t) { t = base_type(t); return t->kind == Type_RelativePointer; } -bool is_type_relative_slice(Type *t) { +gb_internal bool is_type_relative_slice(Type *t) { t = base_type(t); return t->kind == Type_RelativeSlice; } -bool is_type_u8_slice(Type *t) { +gb_internal bool is_type_u8_slice(Type *t) { t = base_type(t); if (t->kind == Type_Slice) { return is_type_u8(t->Slice.elem); } return false; } -bool is_type_u8_array(Type *t) { +gb_internal bool is_type_u8_array(Type *t) { t = base_type(t); if (t->kind == Type_Array) { return is_type_u8(t->Array.elem); } return false; } -bool is_type_u8_ptr(Type *t) { +gb_internal bool is_type_u8_ptr(Type *t) { t = base_type(t); if (t->kind == Type_Pointer) { return is_type_u8(t->Slice.elem); } return false; } -bool is_type_u8_multi_ptr(Type *t) { +gb_internal bool is_type_u8_multi_ptr(Type *t) { t = base_type(t); if (t->kind == Type_MultiPointer) { return is_type_u8(t->Slice.elem); } return false; } -bool is_type_rune_array(Type *t) { +gb_internal bool is_type_rune_array(Type *t) { t = base_type(t); if (t->kind == Type_Array) { return is_type_rune(t->Array.elem); @@ -1624,10 +1583,10 @@ bool is_type_rune_array(Type *t) { } -bool is_type_array_like(Type *t) { +gb_internal bool is_type_array_like(Type *t) { return is_type_array(t) || is_type_enumerated_array(t); } -i64 get_array_type_count(Type *t) { +gb_internal i64 get_array_type_count(Type *t) { Type *bt = base_type(t); if (bt->kind == Type_Array) { return bt->Array.count; @@ -1642,7 +1601,7 @@ i64 get_array_type_count(Type *t) { -Type *core_array_type(Type *t) { +gb_internal Type *core_array_type(Type *t) { for (;;) { t = base_array_type(t); switch (t->kind) { @@ -1657,7 +1616,7 @@ Type *core_array_type(Type *t) { } } -i32 type_math_rank(Type *t) { +gb_internal i32 type_math_rank(Type *t) { i32 rank = 0; for (;;) { t = base_type(t); @@ -1677,7 +1636,7 @@ i32 type_math_rank(Type *t) { } -Type *base_complex_elem_type(Type *t) { +gb_internal Type *base_complex_elem_type(Type *t) { t = core_type(t); if (t->kind == Type_Basic) { switch (t->Basic.kind) { @@ -1695,37 +1654,41 @@ Type *base_complex_elem_type(Type *t) { return t_invalid; } -bool is_type_struct(Type *t) { +gb_internal bool is_type_struct(Type *t) { t = base_type(t); return t->kind == Type_Struct; } -bool is_type_union(Type *t) { +gb_internal bool is_type_union(Type *t) { t = base_type(t); return t->kind == Type_Union; } -bool is_type_soa_struct(Type *t) { +gb_internal bool is_type_soa_struct(Type *t) { t = base_type(t); return t->kind == Type_Struct && t->Struct.soa_kind != StructSoa_None; } -bool is_type_raw_union(Type *t) { +gb_internal bool is_type_raw_union(Type *t) { t = base_type(t); return (t->kind == Type_Struct && t->Struct.is_raw_union); } -bool is_type_enum(Type *t) { +gb_internal bool is_type_no_copy(Type *t) { + t = base_type(t); + return (t->kind == Type_Struct && t->Struct.is_no_copy); +} +gb_internal bool is_type_enum(Type *t) { t = base_type(t); return (t->kind == Type_Enum); } -bool is_type_bit_set(Type *t) { +gb_internal bool is_type_bit_set(Type *t) { t = base_type(t); return (t->kind == Type_BitSet); } -bool is_type_map(Type *t) { +gb_internal bool is_type_map(Type *t) { t = base_type(t); return t->kind == Type_Map; } -bool is_type_union_maybe_pointer(Type *t) { +gb_internal bool is_type_union_maybe_pointer(Type *t) { t = base_type(t); if (t->kind == Type_Union && t->Union.variants.count == 1) { Type *v = t->Union.variants[0]; @@ -1735,7 +1698,7 @@ bool is_type_union_maybe_pointer(Type *t) { } -bool is_type_union_maybe_pointer_original_alignment(Type *t) { +gb_internal bool is_type_union_maybe_pointer_original_alignment(Type *t) { t = base_type(t); if (t->kind == Type_Union && t->Union.variants.count == 1) { Type *v = t->Union.variants[0]; @@ -1748,7 +1711,7 @@ bool is_type_union_maybe_pointer_original_alignment(Type *t) { -bool is_type_endian_big(Type *t) { +gb_internal bool is_type_endian_big(Type *t) { t = core_type(t); if (t->kind == Type_Basic) { if (t->Basic.flags & BasicFlag_EndianBig) { @@ -1764,7 +1727,7 @@ bool is_type_endian_big(Type *t) { } return build_context.endian_kind == TargetEndian_Big; } -bool is_type_endian_little(Type *t) { +gb_internal bool is_type_endian_little(Type *t) { t = core_type(t); if (t->kind == Type_Basic) { if (t->Basic.flags & BasicFlag_EndianLittle) { @@ -1781,7 +1744,7 @@ bool is_type_endian_little(Type *t) { return build_context.endian_kind == TargetEndian_Little; } -bool is_type_endian_platform(Type *t) { +gb_internal bool is_type_endian_platform(Type *t) { t = core_type(t); if (t->kind == Type_Basic) { return (t->Basic.flags & (BasicFlag_EndianLittle|BasicFlag_EndianBig)) == 0; @@ -1793,10 +1756,10 @@ bool is_type_endian_platform(Type *t) { return false; } -bool types_have_same_internal_endian(Type *a, Type *b) { +gb_internal bool types_have_same_internal_endian(Type *a, Type *b) { return is_type_endian_little(a) == is_type_endian_little(b); } -bool is_type_endian_specific(Type *t) { +gb_internal bool is_type_endian_specific(Type *t) { t = core_type(t); if (t->kind == Type_BitSet) { t = bit_set_to_int(t); @@ -1834,7 +1797,7 @@ bool is_type_endian_specific(Type *t) { return false; } -bool is_type_dereferenceable(Type *t) { +gb_internal bool is_type_dereferenceable(Type *t) { if (is_type_rawptr(t)) { return false; } @@ -1843,7 +1806,7 @@ bool is_type_dereferenceable(Type *t) { -bool is_type_different_to_arch_endianness(Type *t) { +gb_internal bool is_type_different_to_arch_endianness(Type *t) { switch (build_context.endian_kind) { case TargetEndian_Little: return !is_type_endian_little(t); @@ -1853,7 +1816,7 @@ bool is_type_different_to_arch_endianness(Type *t) { return false; } -Type *integer_endian_type_to_platform_type(Type *t) { +gb_internal Type *integer_endian_type_to_platform_type(Type *t) { t = core_type(t); if (t->kind == Type_BitSet) { t = bit_set_to_int(t); @@ -1893,35 +1856,31 @@ Type *integer_endian_type_to_platform_type(Type *t) { -bool is_type_any(Type *t) { +gb_internal bool is_type_any(Type *t) { t = base_type(t); return (t->kind == Type_Basic && t->Basic.kind == Basic_any); } -bool is_type_typeid(Type *t) { +gb_internal bool is_type_typeid(Type *t) { t = base_type(t); return (t->kind == Type_Basic && t->Basic.kind == Basic_typeid); } -bool is_type_untyped_nil(Type *t) { +gb_internal bool is_type_untyped_nil(Type *t) { t = base_type(t); - return (t->kind == Type_Basic && t->Basic.kind == Basic_UntypedNil); + // NOTE(bill): checking for `nil` or `---` at once is just to improve the error handling + return (t->kind == Type_Basic && (t->Basic.kind == Basic_UntypedNil || t->Basic.kind == Basic_UntypedUninit)); } -bool is_type_untyped_undef(Type *t) { +gb_internal bool is_type_untyped_uninit(Type *t) { t = base_type(t); - return (t->kind == Type_Basic && t->Basic.kind == Basic_UntypedUndef); + // NOTE(bill): checking for `nil` or `---` at once is just to improve the error handling + return (t->kind == Type_Basic && t->Basic.kind == Basic_UntypedUninit); } - -bool is_type_empty_union(Type *t) { +gb_internal bool is_type_empty_union(Type *t) { t = base_type(t); return t->kind == Type_Union && t->Union.variants.count == 0; } -bool is_type_empty_struct(Type *t) { - t = base_type(t); - return t->kind == Type_Struct && !t->Struct.is_raw_union && t->Struct.fields.count == 0; -} - -bool is_type_valid_for_keys(Type *t) { +gb_internal bool is_type_valid_for_keys(Type *t) { t = core_type(t); if (t->kind == Type_Generic) { return true; @@ -1932,7 +1891,7 @@ bool is_type_valid_for_keys(Type *t) { return type_size_of(t) > 0 && is_type_comparable(t); } -bool is_type_valid_bit_set_elem(Type *t) { +gb_internal bool is_type_valid_bit_set_elem(Type *t) { if (is_type_enum(t)) { return true; } @@ -1943,7 +1902,7 @@ bool is_type_valid_bit_set_elem(Type *t) { return false; } -Type *bit_set_to_int(Type *t) { +gb_internal Type *bit_set_to_int(Type *t) { GB_ASSERT(is_type_bit_set(t)); Type *bt = base_type(t); Type *underlying = bt->BitSet.underlying; @@ -1964,7 +1923,7 @@ Type *bit_set_to_int(Type *t) { return nullptr; } -bool is_type_valid_vector_elem(Type *t) { +gb_internal bool is_type_valid_vector_elem(Type *t) { t = base_type(t); if (t->kind == Type_Basic) { if (t->Basic.flags & BasicFlag_EndianLittle) { @@ -1987,7 +1946,7 @@ bool is_type_valid_vector_elem(Type *t) { } -bool is_type_indexable(Type *t) { +gb_internal bool is_type_indexable(Type *t) { Type *bt = base_type(t); switch (bt->kind) { case Type_Basic: @@ -2009,7 +1968,7 @@ bool is_type_indexable(Type *t) { return false; } -bool is_type_sliceable(Type *t) { +gb_internal bool is_type_sliceable(Type *t) { Type *bt = base_type(t); switch (bt->kind) { case Type_Basic: @@ -2029,7 +1988,7 @@ bool is_type_sliceable(Type *t) { } -bool is_type_polymorphic_record(Type *t) { +gb_internal bool is_type_polymorphic_record(Type *t) { t = base_type(t); if (t->kind == Type_Struct) { return t->Struct.is_polymorphic; @@ -2039,7 +1998,7 @@ bool is_type_polymorphic_record(Type *t) { return false; } -Scope *polymorphic_record_parent_scope(Type *t) { +gb_internal Scope *polymorphic_record_parent_scope(Type *t) { t = base_type(t); if (is_type_polymorphic_record(t)) { if (t->kind == Type_Struct) { @@ -2051,7 +2010,7 @@ Scope *polymorphic_record_parent_scope(Type *t) { return nullptr; } -bool is_type_polymorphic_record_specialized(Type *t) { +gb_internal bool is_type_polymorphic_record_specialized(Type *t) { t = base_type(t); if (t->kind == Type_Struct) { return t->Struct.is_poly_specialized; @@ -2061,7 +2020,7 @@ bool is_type_polymorphic_record_specialized(Type *t) { return false; } -bool is_type_polymorphic_record_unspecialized(Type *t) { +gb_internal bool is_type_polymorphic_record_unspecialized(Type *t) { t = base_type(t); if (t->kind == Type_Struct) { return t->Struct.is_polymorphic && !t->Struct.is_poly_specialized; @@ -2071,7 +2030,7 @@ bool is_type_polymorphic_record_unspecialized(Type *t) { return false; } -TypeTuple *get_record_polymorphic_params(Type *t) { +gb_internal TypeTuple *get_record_polymorphic_params(Type *t) { t = base_type(t); switch (t->kind) { case Type_Struct: @@ -2089,7 +2048,7 @@ TypeTuple *get_record_polymorphic_params(Type *t) { } -bool is_type_polymorphic(Type *t, bool or_specialized=false) { +gb_internal bool is_type_polymorphic(Type *t, bool or_specialized=false) { if (t == nullptr) { return false; } @@ -2149,8 +2108,12 @@ bool is_type_polymorphic(Type *t, bool or_specialized=false) { return is_type_polymorphic(t->Matrix.elem, or_specialized); case Type_Tuple: - for_array(i, t->Tuple.variables) { - if (is_type_polymorphic(t->Tuple.variables[i]->type, or_specialized)) { + for (Entity *e : t->Tuple.variables) { + if (e->kind == Entity_Constant) { + if (e->Constant.value.kind != ExactValue_Invalid) { + return or_specialized; + } + } else if (is_type_polymorphic(e->type, or_specialized)) { return true; } } @@ -2160,7 +2123,6 @@ bool is_type_polymorphic(Type *t, bool or_specialized=false) { if (t->Proc.is_polymorphic) { return true; } - #if 1 if (t->Proc.param_count > 0 && is_type_polymorphic(t->Proc.params, or_specialized)) { return true; @@ -2169,7 +2131,6 @@ bool is_type_polymorphic(Type *t, bool or_specialized=false) { is_type_polymorphic(t->Proc.results, or_specialized)) { return true; } - #endif break; case Type_Enum: @@ -2248,11 +2209,7 @@ bool is_type_polymorphic(Type *t, bool or_specialized=false) { } -bool type_has_undef(Type *t) { - return true; -} - -bool type_has_nil(Type *t) { +gb_internal bool type_has_nil(Type *t) { t = base_type(t); switch (t->kind) { case Type_Basic: { @@ -2297,7 +2254,7 @@ bool type_has_nil(Type *t) { return false; } -bool elem_type_can_be_constant(Type *t) { +gb_internal bool elem_type_can_be_constant(Type *t) { t = base_type(t); if (t == t_invalid) { return false; @@ -2308,7 +2265,7 @@ bool elem_type_can_be_constant(Type *t) { return true; } -bool is_type_lock_free(Type *t) { +gb_internal bool is_type_lock_free(Type *t) { t = core_type(t); if (t == t_invalid) { return false; @@ -2320,7 +2277,7 @@ bool is_type_lock_free(Type *t) { -bool is_type_comparable(Type *t) { +gb_internal bool is_type_comparable(Type *t) { t = base_type(t); switch (t->kind) { case Type_Basic: @@ -2359,9 +2316,6 @@ bool is_type_comparable(Type *t) { return true; case Type_Struct: - if (type_size_of(t) == 0) { - return false; - } if (t->Struct.soa_kind != StructSoa_None) { return false; } @@ -2377,9 +2331,6 @@ bool is_type_comparable(Type *t) { return true; case Type_Union: - if (type_size_of(t) == 0) { - return false; - } for_array(i, t->Union.variants) { Type *v = t->Union.variants[i]; if (!is_type_comparable(v)) { @@ -2395,7 +2346,7 @@ bool is_type_comparable(Type *t) { } // NOTE(bill): type can be easily compared using memcmp -bool is_type_simple_compare(Type *t) { +gb_internal bool is_type_simple_compare(Type *t) { t = core_type(t); switch (t->kind) { case Type_Array: @@ -2450,7 +2401,7 @@ bool is_type_simple_compare(Type *t) { return false; } -bool is_type_load_safe(Type *type) { +gb_internal bool is_type_load_safe(Type *type) { GB_ASSERT(type != nullptr); type = core_type(core_array_type(type)); switch (type->kind) { @@ -2501,7 +2452,7 @@ bool is_type_load_safe(Type *type) { return false; } -String lookup_subtype_polymorphic_field(Type *dst, Type *src) { +gb_internal String lookup_subtype_polymorphic_field(Type *dst, Type *src) { Type *prev_src = src; // Type *prev_dst = dst; src = base_type(type_deref(src)); @@ -2532,7 +2483,7 @@ String lookup_subtype_polymorphic_field(Type *dst, Type *src) { return str_lit(""); } -bool lookup_subtype_polymorphic_selection(Type *dst, Type *src, Selection *sel) { +gb_internal bool lookup_subtype_polymorphic_selection(Type *dst, Type *src, Selection *sel) { Type *prev_src = src; // Type *prev_dst = dst; src = base_type(type_deref(src)); @@ -2571,227 +2522,227 @@ bool lookup_subtype_polymorphic_selection(Type *dst, Type *src, Selection *sel) +gb_internal bool are_types_identical_internal(Type *x, Type *y, bool check_tuple_names); -Type *strip_type_aliasing(Type *x) { - if (x == nullptr) { - return x; +gb_internal bool are_types_identical(Type *x, Type *y) { + if (x == y) { + return true; + } + + if ((x == nullptr && y != nullptr) || + (x != nullptr && y == nullptr)) { + return false; } + if (x->kind == Type_Named) { Entity *e = x->Named.type_name; - if (e != nullptr && e->kind == Entity_TypeName && e->TypeName.is_type_alias) { - return x->Named.base; + if (e->TypeName.is_type_alias) { + x = x->Named.base; } } - return x; -} - -bool are_types_identical_internal(Type *x, Type *y, bool check_tuple_names); + if (y->kind == Type_Named) { + Entity *e = y->Named.type_name; + if (e->TypeName.is_type_alias) { + y = y->Named.base; + } + } + if (x->kind != y->kind) { + return false; + } -bool are_types_identical(Type *x, Type *y) { return are_types_identical_internal(x, y, false); } -bool are_types_identical_unique_tuples(Type *x, Type *y) { +gb_internal bool are_types_identical_unique_tuples(Type *x, Type *y) { + if (x == y) { + return true; + } + + if (!x | !y) { + return false; + } + + if (x->kind == Type_Named) { + Entity *e = x->Named.type_name; + if (e->TypeName.is_type_alias) { + x = x->Named.base; + } + } + if (y->kind == Type_Named) { + Entity *e = y->Named.type_name; + if (e->TypeName.is_type_alias) { + y = y->Named.base; + } + } + if (x->kind != y->kind) { + return false; + } + return are_types_identical_internal(x, y, true); } -bool are_types_identical_internal(Type *x, Type *y, bool check_tuple_names) { +gb_internal bool are_types_identical_internal(Type *x, Type *y, bool check_tuple_names) { if (x == y) { return true; } - if ((x == nullptr && y != nullptr) || - (x != nullptr && y == nullptr)) { + if (!x | !y) { return false; } - x = strip_type_aliasing(x); - y = strip_type_aliasing(y); + #if 0 + if (x->kind == Type_Named) { + Entity *e = x->Named.type_name; + if (e->TypeName.is_type_alias) { + x = x->Named.base; + } + } + if (y->kind == Type_Named) { + Entity *e = y->Named.type_name; + if (e->TypeName.is_type_alias) { + y = y->Named.base; + } + } + if (x->kind != y->kind) { + return false; + } + #endif switch (x->kind) { case Type_Generic: - if (y->kind == Type_Generic) { - return are_types_identical(x->Generic.specialized, y->Generic.specialized); - } - break; + return are_types_identical(x->Generic.specialized, y->Generic.specialized); case Type_Basic: - if (y->kind == Type_Basic) { - return x->Basic.kind == y->Basic.kind; - } - break; + return x->Basic.kind == y->Basic.kind; case Type_EnumeratedArray: - if (y->kind == Type_EnumeratedArray) { - return are_types_identical(x->EnumeratedArray.index, y->EnumeratedArray.index) && - are_types_identical(x->EnumeratedArray.elem, y->EnumeratedArray.elem); - } - break; + return are_types_identical(x->EnumeratedArray.index, y->EnumeratedArray.index) && + are_types_identical(x->EnumeratedArray.elem, y->EnumeratedArray.elem); case Type_Array: - if (y->kind == Type_Array) { - return (x->Array.count == y->Array.count) && are_types_identical(x->Array.elem, y->Array.elem); - } - break; + return (x->Array.count == y->Array.count) && are_types_identical(x->Array.elem, y->Array.elem); case Type_Matrix: - if (y->kind == Type_Matrix) { - return x->Matrix.row_count == y->Matrix.row_count && - x->Matrix.column_count == y->Matrix.column_count && - are_types_identical(x->Matrix.elem, y->Matrix.elem); - } - break; + return x->Matrix.row_count == y->Matrix.row_count && + x->Matrix.column_count == y->Matrix.column_count && + are_types_identical(x->Matrix.elem, y->Matrix.elem); case Type_DynamicArray: - if (y->kind == Type_DynamicArray) { - return are_types_identical(x->DynamicArray.elem, y->DynamicArray.elem); - } - break; + return are_types_identical(x->DynamicArray.elem, y->DynamicArray.elem); case Type_Slice: - if (y->kind == Type_Slice) { - return are_types_identical(x->Slice.elem, y->Slice.elem); - } - break; + return are_types_identical(x->Slice.elem, y->Slice.elem); case Type_BitSet: - if (y->kind == Type_BitSet) { - return are_types_identical(x->BitSet.elem, y->BitSet.elem) && - are_types_identical(x->BitSet.underlying, y->BitSet.underlying) && - x->BitSet.lower == y->BitSet.lower && - x->BitSet.upper == y->BitSet.upper; - } - break; + return are_types_identical(x->BitSet.elem, y->BitSet.elem) && + are_types_identical(x->BitSet.underlying, y->BitSet.underlying) && + x->BitSet.lower == y->BitSet.lower && + x->BitSet.upper == y->BitSet.upper; case Type_Enum: return x == y; // NOTE(bill): All enums are unique case Type_Union: - if (y->kind == Type_Union) { - if (x->Union.variants.count == y->Union.variants.count && - x->Union.custom_align == y->Union.custom_align && - x->Union.kind == y->Union.kind) { - // NOTE(bill): zeroth variant is nullptr - for_array(i, x->Union.variants) { - if (!are_types_identical(x->Union.variants[i], y->Union.variants[i])) { - return false; - } + if (x->Union.variants.count == y->Union.variants.count && + x->Union.custom_align == y->Union.custom_align && + x->Union.kind == y->Union.kind) { + // NOTE(bill): zeroth variant is nullptr + for_array(i, x->Union.variants) { + if (!are_types_identical(x->Union.variants[i], y->Union.variants[i])) { + return false; } - return true; } + return true; } break; case Type_Struct: - if (y->kind == Type_Struct) { - if (x->Struct.is_raw_union == y->Struct.is_raw_union && - x->Struct.fields.count == y->Struct.fields.count && - x->Struct.is_packed == y->Struct.is_packed && - x->Struct.custom_align == y->Struct.custom_align && - x->Struct.soa_kind == y->Struct.soa_kind && - x->Struct.soa_count == y->Struct.soa_count && - are_types_identical(x->Struct.soa_elem, y->Struct.soa_elem)) { - // TODO(bill); Fix the custom alignment rule - for_array(i, x->Struct.fields) { - Entity *xf = x->Struct.fields[i]; - Entity *yf = y->Struct.fields[i]; - if (xf->kind != yf->kind) { - return false; - } - if (!are_types_identical(xf->type, yf->type)) { - return false; - } - if (xf->token.string != yf->token.string) { - return false; - } - if (x->Struct.tags[i] != y->Struct.tags[i]) { - return false; - } - u64 xf_flags = (xf->flags&EntityFlags_IsSubtype); - u64 yf_flags = (yf->flags&EntityFlags_IsSubtype); - if (xf_flags != yf_flags) { - return false; - } + if (x->Struct.is_raw_union == y->Struct.is_raw_union && + x->Struct.is_no_copy == y->Struct.is_no_copy && + x->Struct.fields.count == y->Struct.fields.count && + x->Struct.is_packed == y->Struct.is_packed && + x->Struct.custom_align == y->Struct.custom_align && + x->Struct.soa_kind == y->Struct.soa_kind && + x->Struct.soa_count == y->Struct.soa_count && + are_types_identical(x->Struct.soa_elem, y->Struct.soa_elem)) { + // TODO(bill); Fix the custom alignment rule + for_array(i, x->Struct.fields) { + Entity *xf = x->Struct.fields[i]; + Entity *yf = y->Struct.fields[i]; + if (xf->kind != yf->kind) { + return false; + } + if (!are_types_identical(xf->type, yf->type)) { + return false; + } + if (xf->token.string != yf->token.string) { + return false; + } + if (x->Struct.tags[i] != y->Struct.tags[i]) { + return false; + } + u64 xf_flags = (xf->flags&EntityFlags_IsSubtype); + u64 yf_flags = (yf->flags&EntityFlags_IsSubtype); + if (xf_flags != yf_flags) { + return false; } - return true; } + return are_types_identical(x->Struct.polymorphic_params, y->Struct.polymorphic_params); } break; case Type_Pointer: - if (y->kind == Type_Pointer) { - return are_types_identical(x->Pointer.elem, y->Pointer.elem); - } - break; + return are_types_identical(x->Pointer.elem, y->Pointer.elem); case Type_MultiPointer: - if (y->kind == Type_MultiPointer) { - return are_types_identical(x->MultiPointer.elem, y->MultiPointer.elem); - } - break; + return are_types_identical(x->MultiPointer.elem, y->MultiPointer.elem); case Type_SoaPointer: - if (y->kind == Type_SoaPointer) { - return are_types_identical(x->SoaPointer.elem, y->SoaPointer.elem); - } - break; + return are_types_identical(x->SoaPointer.elem, y->SoaPointer.elem); case Type_Named: - if (y->kind == Type_Named) { - return x->Named.type_name == y->Named.type_name; - } - break; + return x->Named.type_name == y->Named.type_name; case Type_Tuple: - if (y->kind == Type_Tuple) { - if (x->Tuple.variables.count == y->Tuple.variables.count && - x->Tuple.is_packed == y->Tuple.is_packed) { - for_array(i, x->Tuple.variables) { - Entity *xe = x->Tuple.variables[i]; - Entity *ye = y->Tuple.variables[i]; - if (xe->kind != ye->kind || !are_types_identical(xe->type, ye->type)) { - return false; - } - if (check_tuple_names) { - if (xe->token.string != ye->token.string) { - return false; - } - } - if (xe->kind == Entity_Constant && !compare_exact_values(Token_CmpEq, xe->Constant.value, ye->Constant.value)) { - // NOTE(bill): This is needed for polymorphic procedures + if (x->Tuple.variables.count == y->Tuple.variables.count && + x->Tuple.is_packed == y->Tuple.is_packed) { + for_array(i, x->Tuple.variables) { + Entity *xe = x->Tuple.variables[i]; + Entity *ye = y->Tuple.variables[i]; + if (xe->kind != ye->kind || !are_types_identical(xe->type, ye->type)) { + return false; + } + if (check_tuple_names) { + if (xe->token.string != ye->token.string) { return false; } } - return true; + if (xe->kind == Entity_Constant && !compare_exact_values(Token_CmpEq, xe->Constant.value, ye->Constant.value)) { + // NOTE(bill): This is needed for polymorphic procedures + return false; + } } + return true; } break; case Type_Proc: - if (y->kind == Type_Proc) { - return x->Proc.calling_convention == y->Proc.calling_convention && - x->Proc.c_vararg == y->Proc.c_vararg && - x->Proc.variadic == y->Proc.variadic && - x->Proc.diverging == y->Proc.diverging && - x->Proc.optional_ok == y->Proc.optional_ok && - are_types_identical(x->Proc.params, y->Proc.params) && - are_types_identical(x->Proc.results, y->Proc.results); - } - break; + return x->Proc.calling_convention == y->Proc.calling_convention && + x->Proc.c_vararg == y->Proc.c_vararg && + x->Proc.variadic == y->Proc.variadic && + x->Proc.diverging == y->Proc.diverging && + x->Proc.optional_ok == y->Proc.optional_ok && + are_types_identical_internal(x->Proc.params, y->Proc.params, check_tuple_names) && + are_types_identical_internal(x->Proc.results, y->Proc.results, check_tuple_names); case Type_Map: - if (y->kind == Type_Map) { - return are_types_identical(x->Map.key, y->Map.key) && - are_types_identical(x->Map.value, y->Map.value); - } - break; + return are_types_identical(x->Map.key, y->Map.key) && + are_types_identical(x->Map.value, y->Map.value); case Type_SimdVector: - if (y->kind == Type_SimdVector) { - if (x->SimdVector.count == y->SimdVector.count) { - return are_types_identical(x->SimdVector.elem, y->SimdVector.elem); - } + if (x->SimdVector.count == y->SimdVector.count) { + return are_types_identical(x->SimdVector.elem, y->SimdVector.elem); } break; } @@ -2799,7 +2750,7 @@ bool are_types_identical_internal(Type *x, Type *y, bool check_tuple_names) { return false; } -Type *default_type(Type *type) { +gb_internal Type *default_type(Type *type) { if (type == nullptr) { return t_invalid; } @@ -2817,13 +2768,23 @@ Type *default_type(Type *type) { return type; } -i64 union_variant_index(Type *u, Type *v) { +gb_internal bool union_variant_index_types_equal(Type *v, Type *vt) { + if (are_types_identical(v, vt)) { + return true; + } + if (is_type_proc(v) && is_type_proc(vt)) { + return are_types_identical(base_type(v), base_type(vt)); + } + return false; +} + +gb_internal i64 union_variant_index(Type *u, Type *v) { u = base_type(u); GB_ASSERT(u->kind == Type_Union); for_array(i, u->Union.variants) { Type *vt = u->Union.variants[i]; - if (are_types_identical(v, vt)) { + if (union_variant_index_types_equal(v, vt)) { if (u->Union.kind == UnionType_no_nil) { return cast(i64)(i+0); } else { @@ -2834,7 +2795,7 @@ i64 union_variant_index(Type *u, Type *v) { return 0; } -i64 union_tag_size(Type *u) { +gb_internal i64 union_tag_size(Type *u) { u = base_type(u); GB_ASSERT(u->kind == Type_Union); if (u->Union.tag_size > 0) { @@ -2871,7 +2832,7 @@ i64 union_tag_size(Type *u) { return u->Union.tag_size; } -Type *union_tag_type(Type *u) { +gb_internal Type *union_tag_type(Type *u) { i64 s = union_tag_size(u); switch (s) { case 0: return t_u8; @@ -2901,7 +2862,7 @@ enum ProcTypeOverloadKind { }; -ProcTypeOverloadKind are_proc_types_overload_safe(Type *x, Type *y) { +gb_internal ProcTypeOverloadKind are_proc_types_overload_safe(Type *x, Type *y) { if (x == nullptr && y == nullptr) return ProcOverload_NotProcedure; if (x == nullptr && y != nullptr) return ProcOverload_NotProcedure; if (x != nullptr && y == nullptr) return ProcOverload_NotProcedure; @@ -2968,13 +2929,13 @@ ProcTypeOverloadKind are_proc_types_overload_safe(Type *x, Type *y) { -Selection lookup_field_with_selection(Type *type_, String field_name, bool is_type, Selection sel, bool allow_blank_ident=false); +gb_internal Selection lookup_field_with_selection(Type *type_, String field_name, bool is_type, Selection sel, bool allow_blank_ident=false); -Selection lookup_field(Type *type_, String field_name, bool is_type, bool allow_blank_ident=false) { +gb_internal Selection lookup_field(Type *type_, String field_name, bool is_type, bool allow_blank_ident=false) { return lookup_field_with_selection(type_, field_name, is_type, empty_selection, allow_blank_ident); } -Selection lookup_field_from_index(Type *type, i64 index) { +gb_internal Selection lookup_field_from_index(Type *type, i64 index) { GB_ASSERT(is_type_struct(type) || is_type_union(type) || is_type_tuple(type)); type = base_type(type); @@ -3018,10 +2979,10 @@ Selection lookup_field_from_index(Type *type, i64 index) { return empty_selection; } -Entity *scope_lookup_current(Scope *s, String const &name); -bool has_type_got_objc_class_attribute(Type *t); +gb_internal Entity *scope_lookup_current(Scope *s, String const &name); +gb_internal bool has_type_got_objc_class_attribute(Type *t); -Selection lookup_field_with_selection(Type *type_, String field_name, bool is_type, Selection sel, bool allow_blank_ident) { +gb_internal Selection lookup_field_with_selection(Type *type_, String field_name, bool is_type, Selection sel, bool allow_blank_ident) { GB_ASSERT(type_ != nullptr); if (!allow_blank_ident && is_blank_ident(field_name)) { @@ -3363,10 +3324,13 @@ Selection lookup_field_with_selection(Type *type_, String field_name, bool is_ty return sel; } -bool are_struct_fields_reordered(Type *type) { +gb_internal bool are_struct_fields_reordered(Type *type) { type = base_type(type); GB_ASSERT(type->kind == Type_Struct); type_set_offsets(type); + if (type->Struct.fields.count == 0) { + return false; + } GB_ASSERT(type->Struct.offsets != nullptr); i64 prev_offset = 0; @@ -3381,10 +3345,13 @@ bool are_struct_fields_reordered(Type *type) { return false; } -Slice<i32> struct_fields_index_by_increasing_offset(gbAllocator allocator, Type *type) { +gb_internal Slice<i32> struct_fields_index_by_increasing_offset(gbAllocator allocator, Type *type) { type = base_type(type); GB_ASSERT(type->kind == Type_Struct); type_set_offsets(type); + if (type->Struct.fields.count == 0) { + return {}; + } GB_ASSERT(type->Struct.offsets != nullptr); auto indices = slice_make<i32>(allocator, type->Struct.fields.count); @@ -3416,12 +3383,12 @@ Slice<i32> struct_fields_index_by_increasing_offset(gbAllocator allocator, Type -i64 type_size_of_internal (Type *t, TypePath *path); -i64 type_align_of_internal(Type *t, TypePath *path); -i64 type_size_of(Type *t); -i64 type_align_of(Type *t); +gb_internal i64 type_size_of_internal (Type *t, TypePath *path); +gb_internal i64 type_align_of_internal(Type *t, TypePath *path); +gb_internal i64 type_size_of(Type *t); +gb_internal i64 type_align_of(Type *t); -i64 type_size_of_struct_pretend_is_packed(Type *ot) { +gb_internal i64 type_size_of_struct_pretend_is_packed(Type *ot) { if (ot == nullptr) { return 0; } @@ -3450,49 +3417,70 @@ i64 type_size_of_struct_pretend_is_packed(Type *ot) { } -i64 type_size_of(Type *t) { +gb_internal i64 type_size_of(Type *t) { if (t == nullptr) { return 0; } - // NOTE(bill): Always calculate the size when it is a Type_Basic - if (t->kind == Type_Named && t->cached_size >= 0) { + i64 size = -1; + if (t->kind == Type_Basic) { + GB_ASSERT_MSG(is_type_typed(t), "%s", type_to_string(t)); + switch (t->Basic.kind) { + case Basic_string: size = 2*build_context.int_size; break; + case Basic_cstring: size = build_context.ptr_size; break; + case Basic_any: size = 2*build_context.ptr_size; break; + case Basic_typeid: size = build_context.ptr_size; break; - } else if (t->kind != Type_Basic && t->cached_size >= 0) { - return t->cached_size; + case Basic_int: case Basic_uint: + size = build_context.int_size; + break; + case Basic_uintptr: case Basic_rawptr: + size = build_context.ptr_size; + break; + default: + size = t->Basic.size; + break; + } + t->cached_size.store(size); + return size; + } else if (t->kind != Type_Named && t->cached_size >= 0) { + return t->cached_size.load(); + } else { + TypePath path{}; + type_path_init(&path); + { + MUTEX_GUARD(&g_type_mutex); + size = type_size_of_internal(t, &path); + t->cached_size.store(size); + } + type_path_free(&path); + return size; } - TypePath path = {0}; - type_path_init(&path); - t->cached_size = type_size_of_internal(t, &path); - type_path_free(&path); - return t->cached_size; } -i64 type_align_of(Type *t) { +gb_internal i64 type_align_of(Type *t) { if (t == nullptr) { return 1; } - // NOTE(bill): Always calculate the size when it is a Type_Basic - if (t->kind == Type_Named && t->cached_align >= 0) { - - } if (t->kind != Type_Basic && t->cached_align > 0) { - return t->cached_align; + if (t->kind != Type_Named && t->cached_align > 0) { + return t->cached_align.load(); } - TypePath path = {0}; + TypePath path{}; type_path_init(&path); - t->cached_align = type_align_of_internal(t, &path); + { + MUTEX_GUARD(&g_type_mutex); + t->cached_align.store(type_align_of_internal(t, &path)); + } type_path_free(&path); - return t->cached_align; + return t->cached_align.load(); } -i64 type_align_of_internal(Type *t, TypePath *path) { +gb_internal i64 type_align_of_internal(Type *t, TypePath *path) { GB_ASSERT(path != nullptr); if (t->failure) { return FAILURE_ALIGNMENT; } - mutex_lock(&g_type_mutex); - defer (mutex_unlock(&g_type_mutex)); t = base_type(t); @@ -3500,13 +3488,15 @@ i64 type_align_of_internal(Type *t, TypePath *path) { case Type_Basic: { GB_ASSERT(is_type_typed(t)); switch (t->Basic.kind) { - case Basic_string: return build_context.word_size; - case Basic_cstring: return build_context.word_size; - case Basic_any: return build_context.word_size; - case Basic_typeid: return build_context.word_size; + case Basic_string: return build_context.int_size; + case Basic_cstring: return build_context.ptr_size; + case Basic_any: return build_context.ptr_size; + case Basic_typeid: return build_context.ptr_size; - case Basic_int: case Basic_uint: case Basic_uintptr: case Basic_rawptr: - return build_context.word_size; + case Basic_int: case Basic_uint: + return build_context.int_size; + case Basic_uintptr: case Basic_rawptr: + return build_context.ptr_size; case Basic_complex32: case Basic_complex64: case Basic_complex128: return type_size_of_internal(t, path) / 2; @@ -3539,10 +3529,10 @@ i64 type_align_of_internal(Type *t, TypePath *path) { case Type_DynamicArray: // data, count, capacity, allocator - return build_context.word_size; + return build_context.int_size; case Type_Slice: - return build_context.word_size; + return build_context.int_size; case Type_Tuple: { @@ -3557,7 +3547,7 @@ i64 type_align_of_internal(Type *t, TypePath *path) { } break; case Type_Map: - return build_context.word_size; + return build_context.ptr_size; case Type_Enum: return type_align_of_internal(t->Enum.base_type, path); @@ -3589,39 +3579,25 @@ i64 type_align_of_internal(Type *t, TypePath *path) { if (t->Struct.custom_align > 0) { return gb_max(t->Struct.custom_align, 1); } - if (t->Struct.is_raw_union) { - i64 max = 1; - for_array(i, t->Struct.fields) { - Type *field_type = t->Struct.fields[i]->type; - bool pop = type_path_push(path, field_type); - if (path->failure) { - return FAILURE_ALIGNMENT; - } - i64 align = type_align_of_internal(field_type, path); - if (pop) type_path_pop(path); - if (max < align) { - max = align; - } - } - return max; - } else if (t->Struct.fields.count > 0) { - i64 max = 1; - // NOTE(bill): Check the fields to check for cyclic definitions - for_array(i, t->Struct.fields) { - Type *field_type = t->Struct.fields[i]->type; - bool pop = type_path_push(path, field_type); - if (path->failure) return FAILURE_ALIGNMENT; - i64 align = type_align_of_internal(field_type, path); - if (pop) type_path_pop(path); - if (max < align) { - max = align; - } + + if (t->Struct.is_packed) { + return 1; + } + + i64 max = 1; + for_array(i, t->Struct.fields) { + Type *field_type = t->Struct.fields[i]->type; + bool pop = type_path_push(path, field_type); + if (path->failure) { + return FAILURE_ALIGNMENT; } - if (t->Struct.is_packed) { - return 1; + i64 align = type_align_of_internal(field_type, path); + if (pop) type_path_pop(path); + if (max < align) { + max = align; } - return max; } + return max; } break; case Type_BitSet: { @@ -3651,15 +3627,15 @@ i64 type_align_of_internal(Type *t, TypePath *path) { return type_align_of_internal(t->RelativeSlice.base_integer, path); case Type_SoaPointer: - return build_context.word_size; + return build_context.int_size; } - // NOTE(bill): Things that are bigger than build_context.word_size, are actually comprised of smaller types + // NOTE(bill): Things that are bigger than build_context.ptr_size, are actually comprised of smaller types // TODO(bill): Is this correct for 128-bit types (integers)? return gb_clamp(next_pow2(type_size_of_internal(t, path)), 1, build_context.max_align); } -i64 *type_set_offsets_of(Slice<Entity *> const &fields, bool is_packed, bool is_raw_union) { +gb_internal i64 *type_set_offsets_of(Slice<Entity *> const &fields, bool is_packed, bool is_raw_union) { gbAllocator a = permanent_allocator(); auto offsets = gb_alloc_array(a, i64, fields.count); i64 curr_offset = 0; @@ -3686,9 +3662,8 @@ i64 *type_set_offsets_of(Slice<Entity *> const &fields, bool is_packed, bool is_ return offsets; } -bool type_set_offsets(Type *t) { - mutex_lock(&g_type_mutex); - defer (mutex_unlock(&g_type_mutex)); +gb_internal bool type_set_offsets(Type *t) { + MUTEX_GUARD(&g_type_mutex); // TODO(bill): only per struct t = base_type(t); if (t->kind == Type_Struct) { @@ -3713,13 +3688,10 @@ bool type_set_offsets(Type *t) { return false; } -i64 type_size_of_internal(Type *t, TypePath *path) { +gb_internal i64 type_size_of_internal(Type *t, TypePath *path) { if (t->failure) { return FAILURE_SIZE; } - mutex_lock(&g_type_mutex); - defer (mutex_unlock(&g_type_mutex)); - switch (t->kind) { case Type_Named: { @@ -3740,24 +3712,26 @@ i64 type_size_of_internal(Type *t, TypePath *path) { return size; } switch (kind) { - case Basic_string: return 2*build_context.word_size; - case Basic_cstring: return build_context.word_size; - case Basic_any: return 2*build_context.word_size; - case Basic_typeid: return build_context.word_size; + case Basic_string: return 2*build_context.int_size; + case Basic_cstring: return build_context.ptr_size; + case Basic_any: return 2*build_context.ptr_size; + case Basic_typeid: return build_context.ptr_size; - case Basic_int: case Basic_uint: case Basic_uintptr: case Basic_rawptr: - return build_context.word_size; + case Basic_int: case Basic_uint: + return build_context.int_size; + case Basic_uintptr: case Basic_rawptr: + return build_context.ptr_size; } } break; case Type_Pointer: - return build_context.word_size; + return build_context.ptr_size; case Type_MultiPointer: - return build_context.word_size; + return build_context.ptr_size; case Type_SoaPointer: - return build_context.word_size*2; + return build_context.int_size*2; case Type_Array: { i64 count, align, size, alignment; @@ -3790,11 +3764,11 @@ i64 type_size_of_internal(Type *t, TypePath *path) { } break; case Type_Slice: // ptr + len - return 2 * build_context.word_size; + return 2 * build_context.int_size; case Type_DynamicArray: // data + len + cap + allocator(procedure+data) - return (3 + 2)*build_context.word_size; + return 3*build_context.int_size + 2*build_context.ptr_size; case Type_Map: /* @@ -3804,7 +3778,7 @@ i64 type_size_of_internal(Type *t, TypePath *path) { allocator: runtime.Allocator, // 2 words } */ - return (1 + 1 + 2)*build_context.word_size; + return (1 + 1 + 2)*build_context.ptr_size; case Type_Tuple: { i64 count, align, size; @@ -3930,10 +3904,10 @@ i64 type_size_of_internal(Type *t, TypePath *path) { } // Catch all - return build_context.word_size; + return build_context.ptr_size; } -i64 type_offset_of(Type *t, i32 index) { +gb_internal i64 type_offset_of(Type *t, i32 index) { t = base_type(t); if (t->kind == Type_Struct) { type_set_offsets(t); @@ -3950,39 +3924,39 @@ i64 type_offset_of(Type *t, i32 index) { } else if (t->kind == Type_Basic) { if (t->Basic.kind == Basic_string) { switch (index) { - case 0: return 0; // data - case 1: return build_context.word_size; // len + case 0: return 0; // data + case 1: return build_context.int_size; // len } } else if (t->Basic.kind == Basic_any) { switch (index) { - case 0: return 0; // type_info - case 1: return build_context.word_size; // data + case 0: return 0; // type_info + case 1: return build_context.ptr_size; // data } } } else if (t->kind == Type_Slice) { switch (index) { - case 0: return 0; // data - case 1: return 1*build_context.word_size; // len - case 2: return 2*build_context.word_size; // cap + case 0: return 0; // data + case 1: return 1*build_context.int_size; // len + case 2: return 2*build_context.int_size; // cap } } else if (t->kind == Type_DynamicArray) { switch (index) { - case 0: return 0; // data - case 1: return 1*build_context.word_size; // len - case 2: return 2*build_context.word_size; // cap - case 3: return 3*build_context.word_size; // allocator + case 0: return 0; // data + case 1: return 1*build_context.int_size; // len + case 2: return 2*build_context.int_size; // cap + case 3: return 3*build_context.int_size; // allocator } } else if (t->kind == Type_Union) { /* i64 s = */ type_size_of(t); switch (index) { - case -1: return align_formula(t->Union.variant_block_size, build_context.word_size); // __type_info + case -1: return align_formula(t->Union.variant_block_size, build_context.ptr_size); // __type_info } } return 0; } -i64 type_offset_of_from_selection(Type *type, Selection sel) { +gb_internal i64 type_offset_of_from_selection(Type *type, Selection sel) { GB_ASSERT(sel.indirect == false); Type *t = type; @@ -4030,7 +4004,7 @@ i64 type_offset_of_from_selection(Type *type, Selection sel) { return offset; } -isize check_is_assignable_to_using_subtype(Type *src, Type *dst, isize level = 0, bool src_is_ptr = false) { +gb_internal isize check_is_assignable_to_using_subtype(Type *src, Type *dst, isize level = 0, bool src_is_ptr = false) { Type *prev_src = src; src = type_deref(src); if (!src_is_ptr) { @@ -4065,7 +4039,7 @@ isize check_is_assignable_to_using_subtype(Type *src, Type *dst, isize level = 0 return 0; } -bool is_type_subtype_of(Type *src, Type *dst) { +gb_internal bool is_type_subtype_of(Type *src, Type *dst) { if (are_types_identical(src, dst)) { return true; } @@ -4074,26 +4048,25 @@ bool is_type_subtype_of(Type *src, Type *dst) { } -bool has_type_got_objc_class_attribute(Type *t) { +gb_internal bool has_type_got_objc_class_attribute(Type *t) { return t->kind == Type_Named && t->Named.type_name != nullptr && t->Named.type_name->TypeName.objc_class_name != ""; } -bool is_type_objc_object(Type *t) { - bool internal_check_is_assignable_to(Type *src, Type *dst); - +gb_internal bool internal_check_is_assignable_to(Type *src, Type *dst); +gb_internal bool is_type_objc_object(Type *t) { return internal_check_is_assignable_to(t, t_objc_object); } -Type *get_struct_field_type(Type *t, isize index) { +gb_internal Type *get_struct_field_type(Type *t, isize index) { t = base_type(type_deref(t)); GB_ASSERT(t->kind == Type_Struct); return t->Struct.fields[index]->type; } -Type *reduce_tuple_to_single_type(Type *original_type) { +gb_internal Type *reduce_tuple_to_single_type(Type *original_type) { if (original_type != nullptr) { Type *t = core_type(original_type); if (t->kind == Type_Tuple && t->Tuple.variables.count == 1) { @@ -4103,21 +4076,7 @@ Type *reduce_tuple_to_single_type(Type *original_type) { return original_type; } - -Type *alloc_type_struct_from_field_types(Type **field_types, isize field_count, bool is_packed) { - Type *t = alloc_type_struct(); - t->Struct.fields = slice_make<Entity *>(heap_allocator(), field_count); - - Scope *scope = nullptr; - for_array(i, t->Struct.fields) { - t->Struct.fields[i] = alloc_entity_field(scope, blank_token, field_types[i], false, cast(i32)i, EntityState_Resolved); - } - t->Struct.is_packed = is_packed; - - return t; -} - -Type *alloc_type_tuple_from_field_types(Type **field_types, isize field_count, bool is_packed, bool must_be_tuple) { +gb_internal Type *alloc_type_tuple_from_field_types(Type **field_types, isize field_count, bool is_packed, bool must_be_tuple) { if (field_count == 0) { return nullptr; } @@ -4137,7 +4096,7 @@ Type *alloc_type_tuple_from_field_types(Type **field_types, isize field_count, b return t; } -Type *alloc_type_proc_from_types(Type **param_types, unsigned param_count, Type *results, bool is_c_vararg, ProcCallingConvention calling_convention) { +gb_internal Type *alloc_type_proc_from_types(Type **param_types, unsigned param_count, Type *results, bool is_c_vararg, ProcCallingConvention calling_convention) { Type *params = alloc_type_tuple_from_field_types(param_types, param_count, false, true); isize results_count = 0; @@ -4156,7 +4115,7 @@ Type *alloc_type_proc_from_types(Type **param_types, unsigned param_count, Type -gbString write_type_to_string(gbString str, Type *type, bool shorthand=false) { +gb_internal gbString write_type_to_string(gbString str, Type *type, bool shorthand=false) { if (type == nullptr) { return gb_string_appendc(str, "<no type>"); } @@ -4276,6 +4235,7 @@ gbString write_type_to_string(gbString str, Type *type, bool shorthand=false) { str = gb_string_appendc(str, "struct"); if (type->Struct.is_packed) str = gb_string_appendc(str, " #packed"); if (type->Struct.is_raw_union) str = gb_string_appendc(str, " #raw_union"); + if (type->Struct.is_no_copy) str = gb_string_appendc(str, " #no_copy"); if (type->Struct.custom_align != 0) str = gb_string_append_fmt(str, " #align %d", cast(int)type->Struct.custom_align); str = gb_string_appendc(str, " {"); @@ -4321,6 +4281,10 @@ gbString write_type_to_string(gbString str, Type *type, bool shorthand=false) { if (var == nullptr) { continue; } + if (comma_index++ > 0) { + str = gb_string_appendc(str, ", "); + } + String name = var->token.string; if (var->kind == Entity_Constant) { str = gb_string_appendc(str, "$"); @@ -4337,10 +4301,6 @@ gbString write_type_to_string(gbString str, Type *type, bool shorthand=false) { continue; } - if (comma_index++ > 0) { - str = gb_string_appendc(str, ", "); - } - if (var->kind == Entity_Variable) { if (var->flags&EntityFlag_CVarArg) { str = gb_string_appendc(str, "#c_vararg "); @@ -4467,14 +4427,14 @@ gbString write_type_to_string(gbString str, Type *type, bool shorthand=false) { } -gbString type_to_string(Type *type, gbAllocator allocator, bool shorthand) { +gb_internal gbString type_to_string(Type *type, gbAllocator allocator, bool shorthand) { return write_type_to_string(gb_string_make(allocator, ""), type, shorthand); } -gbString type_to_string(Type *type, bool shorthand) { +gb_internal gbString type_to_string(Type *type, bool shorthand) { return write_type_to_string(gb_string_make(heap_allocator(), ""), type, shorthand); } -gbString type_to_string_shorthand(Type *type) { +gb_internal gbString type_to_string_shorthand(Type *type) { return type_to_string(type, true); } |