diff options
Diffstat (limited to 'src/types.cpp')
| -rw-r--r-- | src/types.cpp | 191 |
1 files changed, 170 insertions, 21 deletions
diff --git a/src/types.cpp b/src/types.cpp index 574e628c5..90cb130b6 100644 --- a/src/types.cpp +++ b/src/types.cpp @@ -137,13 +137,14 @@ struct TypeStruct { Scope * scope; i64 custom_align; + i64 custom_field_align; Type * polymorphic_params; // Type_Tuple Type * polymorphic_parent; Type * soa_elem; i32 soa_count; StructSoaKind soa_kind; - RwMutex fields_mutex; + Wait_Signal fields_wait_signal; BlockingMutex offset_mutex; // for settings offsets bool is_polymorphic; @@ -230,6 +231,7 @@ struct TypeProc { Type *key; \ Type *value; \ Type *lookup_result_type; \ + Type *debug_metadata_type; \ }) \ TYPE_KIND(Struct, TypeStruct) \ TYPE_KIND(Union, TypeUnion) \ @@ -280,6 +282,15 @@ struct TypeProc { Type *generic_column_count; \ i64 stride_in_bytes; \ }) \ + TYPE_KIND(BitField, struct { \ + Scope * scope; \ + Type * backing_type; \ + Slice<Entity *> fields; \ + String * tags; /*count == fields.count*/ \ + Slice<u8> bit_sizes; \ + Slice<i64> bit_offsets; \ + Ast * node; \ + }) \ TYPE_KIND(SoaPointer, struct { Type *elem; }) @@ -353,6 +364,7 @@ enum Typeid_Kind : u8 { Typeid_Relative_Multi_Pointer, Typeid_Matrix, Typeid_SoaPointer, + Typeid_Bit_Field, }; // IMPORTANT NOTE(bill): This must match the same as the in core.odin @@ -374,6 +386,9 @@ enum : int { gb_internal bool is_type_comparable(Type *t); gb_internal bool is_type_simple_compare(Type *t); +gb_internal Type *type_deref(Type *t, bool allow_multi_pointer=false); +gb_internal Type *base_type(Type *t); +gb_internal Type *alloc_type_multi_pointer(Type *elem); gb_internal u32 type_info_flags_of_type(Type *type) { if (type == nullptr) { @@ -398,6 +413,7 @@ struct Selection { bool indirect; // Set if there was a pointer deref anywhere down the line u8 swizzle_count; // maximum components = 4 u8 swizzle_indices; // 2 bits per component, representing which swizzle index + bool is_bit_field; bool pseudo_field; }; gb_global Selection const empty_selection = {0}; @@ -639,6 +655,7 @@ gb_global Type *t_type_info_relative_pointer = nullptr; gb_global Type *t_type_info_relative_multi_pointer = nullptr; gb_global Type *t_type_info_matrix = nullptr; gb_global Type *t_type_info_soa_pointer = nullptr; +gb_global Type *t_type_info_bit_field = nullptr; gb_global Type *t_type_info_named_ptr = nullptr; gb_global Type *t_type_info_integer_ptr = nullptr; @@ -668,6 +685,7 @@ gb_global Type *t_type_info_relative_pointer_ptr = nullptr; gb_global Type *t_type_info_relative_multi_pointer_ptr = nullptr; gb_global Type *t_type_info_matrix_ptr = nullptr; gb_global Type *t_type_info_soa_pointer_ptr = nullptr; +gb_global Type *t_type_info_bit_field_ptr = nullptr; gb_global Type *t_allocator = nullptr; gb_global Type *t_allocator_ptr = nullptr; @@ -678,6 +696,10 @@ gb_global Type *t_allocator_error = nullptr; gb_global Type *t_source_code_location = nullptr; gb_global Type *t_source_code_location_ptr = nullptr; +gb_global Type *t_load_directory_file = nullptr; +gb_global Type *t_load_directory_file_ptr = nullptr; +gb_global Type *t_load_directory_file_slice = nullptr; + gb_global Type *t_map_info = nullptr; gb_global Type *t_map_cell_info = nullptr; gb_global Type *t_raw_map = nullptr; @@ -744,7 +766,6 @@ gb_internal bool is_type_proc(Type *t); gb_internal bool is_type_slice(Type *t); gb_internal bool is_type_integer(Type *t); gb_internal bool type_set_offsets(Type *t); -gb_internal Type *base_type(Type *t); gb_internal i64 type_size_of_internal(Type *t, TypePath *path); gb_internal i64 type_align_of_internal(Type *t, TypePath *path); @@ -825,11 +846,13 @@ gb_internal void type_path_pop(TypePath *tp) { #define FAILURE_SIZE 0 #define FAILURE_ALIGNMENT 0 +gb_internal bool type_ptr_set_exists(PtrSet<Type *> *s, Type *t); + gb_internal bool type_ptr_set_update(PtrSet<Type *> *s, Type *t) { if (t == nullptr) { return true; } - if (ptr_set_exists(s, t)) { + if (type_ptr_set_exists(s, t)) { return true; } ptr_set_add(s, t); @@ -1032,6 +1055,11 @@ gb_internal Type *alloc_type_enum() { return t; } +gb_internal Type *alloc_type_bit_field() { + Type *t = alloc_type(Type_BitField); + return t; +} + gb_internal Type *alloc_type_relative_pointer(Type *pointer_type, Type *base_integer) { GB_ASSERT(is_type_pointer(pointer_type)); GB_ASSERT(is_type_integer(base_integer)); @@ -1132,7 +1160,7 @@ gb_internal Type *alloc_type_simd_vector(i64 count, Type *elem, Type *generic_co //////////////////////////////////////////////////////////////// -gb_internal Type *type_deref(Type *t, bool allow_multi_pointer=false) { +gb_internal Type *type_deref(Type *t, bool allow_multi_pointer) { if (t != nullptr) { Type *bt = base_type(t); if (bt == nullptr) { @@ -1699,6 +1727,10 @@ gb_internal bool is_type_bit_set(Type *t) { t = base_type(t); return (t->kind == Type_BitSet); } +gb_internal bool is_type_bit_field(Type *t) { + t = base_type(t); + return (t->kind == Type_BitField); +} gb_internal bool is_type_map(Type *t) { t = base_type(t); return t->kind == Type_Map; @@ -2961,9 +2993,8 @@ gb_internal Selection lookup_field_from_index(Type *type, i64 index) { isize max_count = 0; switch (type->kind) { case Type_Struct: - rw_mutex_shared_lock(&type->Struct.fields_mutex); + wait_signal_until_available(&type->Struct.fields_wait_signal); max_count = type->Struct.fields.count; - rw_mutex_shared_unlock(&type->Struct.fields_mutex); break; case Type_Tuple: max_count = type->Tuple.variables.count; break; } @@ -2974,8 +3005,7 @@ gb_internal Selection lookup_field_from_index(Type *type, i64 index) { switch (type->kind) { case Type_Struct: { - rw_mutex_shared_lock(&type->Struct.fields_mutex); - defer (rw_mutex_shared_unlock(&type->Struct.fields_mutex)); + wait_signal_until_available(&type->Struct.fields_wait_signal); for (isize i = 0; i < max_count; i++) { Entity *f = type->Struct.fields[i]; if (f->kind == Entity_Variable) { @@ -3040,9 +3070,8 @@ gb_internal Selection lookup_field_with_selection(Type *type_, String field_name } } if (type->kind == Type_Struct) { - rw_mutex_shared_lock(&type->Struct.fields_mutex); + wait_signal_until_available(&type->Struct.fields_wait_signal); isize field_count = type->Struct.fields.count; - rw_mutex_shared_unlock(&type->Struct.fields_mutex); if (field_count != 0) for_array(i, type->Struct.fields) { Entity *f = type->Struct.fields[i]; if (f->flags&EntityFlag_Using) { @@ -3071,9 +3100,8 @@ gb_internal Selection lookup_field_with_selection(Type *type_, String field_name } if (type->kind == Type_Struct) { - rw_mutex_shared_lock(&type->Struct.fields_mutex); + wait_signal_until_available(&type->Struct.fields_wait_signal); Scope *s = type->Struct.scope; - rw_mutex_shared_unlock(&type->Struct.fields_mutex); if (s != nullptr) { Entity *found = scope_lookup_current(s, field_name); if (found != nullptr && found->kind != Entity_Variable) { @@ -3121,9 +3149,8 @@ gb_internal Selection lookup_field_with_selection(Type *type_, String field_name } } - rw_mutex_shared_lock(&type->Struct.fields_mutex); + wait_signal_until_available(&type->Struct.fields_wait_signal); isize field_count = type->Struct.fields.count; - rw_mutex_shared_unlock(&type->Struct.fields_mutex); if (field_count != 0) for_array(i, type->Struct.fields) { Entity *f = type->Struct.fields[i]; if (f->kind != Entity_Variable || (f->flags & EntityFlag_Field) == 0) { @@ -3165,6 +3192,21 @@ gb_internal Selection lookup_field_with_selection(Type *type_, String field_name else if (field_name == "a") mapped_field_name = str_lit("w"); return lookup_field_with_selection(type, mapped_field_name, is_type, sel, allow_blank_ident); } + } else if (type->kind == Type_BitField) { + for_array(i, type->BitField.fields) { + Entity *f = type->BitField.fields[i]; + if (f->kind != Entity_Variable || (f->flags & EntityFlag_Field) == 0) { + continue; + } + String str = f->token.string; + if (field_name == str) { + selection_add_index(&sel, i); // HACK(bill): Leaky memory + sel.entity = f; + sel.is_bit_field = true; + return sel; + } + } + } else if (type->kind == Type_Basic) { switch (type->Basic.kind) { case Basic_any: { @@ -3565,6 +3607,8 @@ gb_internal i64 type_align_of_internal(Type *t, TypePath *path) { case Type_Slice: return build_context.int_size; + case Type_BitField: + return type_align_of_internal(t->BitField.backing_type, path); case Type_Tuple: { i64 max = 1; @@ -3666,10 +3710,15 @@ gb_internal i64 type_align_of_internal(Type *t, TypePath *path) { return gb_clamp(next_pow2(type_size_of_internal(t, path)), 1, build_context.max_align); } -gb_internal i64 *type_set_offsets_of(Slice<Entity *> const &fields, bool is_packed, bool is_raw_union) { +gb_internal i64 *type_set_offsets_of(Slice<Entity *> const &fields, bool is_packed, bool is_raw_union, i64 min_field_align) { gbAllocator a = permanent_allocator(); auto offsets = gb_alloc_array(a, i64, fields.count); i64 curr_offset = 0; + + if (min_field_align == 0) { + min_field_align = 1; + } + if (is_raw_union) { for_array(i, fields) { offsets[i] = 0; @@ -3690,7 +3739,7 @@ gb_internal i64 *type_set_offsets_of(Slice<Entity *> const &fields, bool is_pack offsets[i] = -1; } else { Type *t = fields[i]->type; - i64 align = gb_max(type_align_of(t), 1); + i64 align = gb_max(type_align_of(t), min_field_align); i64 size = gb_max(type_size_of( t), 0); curr_offset = align_formula(curr_offset, align); offsets[i] = curr_offset; @@ -3707,7 +3756,7 @@ gb_internal bool type_set_offsets(Type *t) { MUTEX_GUARD(&t->Struct.offset_mutex); if (!t->Struct.are_offsets_set) { t->Struct.are_offsets_being_processed = true; - t->Struct.offsets = type_set_offsets_of(t->Struct.fields, t->Struct.is_packed, t->Struct.is_raw_union); + t->Struct.offsets = type_set_offsets_of(t->Struct.fields, t->Struct.is_packed, t->Struct.is_raw_union, t->Struct.custom_field_align); t->Struct.are_offsets_being_processed = false; t->Struct.are_offsets_set = true; return true; @@ -3716,7 +3765,7 @@ gb_internal bool type_set_offsets(Type *t) { MUTEX_GUARD(&t->Tuple.mutex); if (!t->Tuple.are_offsets_set) { t->Tuple.are_offsets_being_processed = true; - t->Tuple.offsets = type_set_offsets_of(t->Tuple.variables, t->Tuple.is_packed, false); + t->Tuple.offsets = type_set_offsets_of(t->Tuple.variables, t->Tuple.is_packed, false, 1); t->Tuple.are_offsets_being_processed = false; t->Tuple.are_offsets_set = true; return true; @@ -3935,6 +3984,9 @@ gb_internal i64 type_size_of_internal(Type *t, TypePath *path) { return stride_in_bytes * t->Matrix.column_count; } + case Type_BitField: + return type_size_of_internal(t->BitField.backing_type, path); + case Type_RelativePointer: return type_size_of_internal(t->RelativePointer.base_integer, path); case Type_RelativeMultiPointer: @@ -4085,7 +4137,7 @@ gb_internal i64 type_offset_of_from_selection(Type *type, Selection sel) { return offset; } -gb_internal isize check_is_assignable_to_using_subtype(Type *src, Type *dst, isize level = 0, bool src_is_ptr = false) { +gb_internal isize check_is_assignable_to_using_subtype(Type *src, Type *dst, isize level = 0, bool src_is_ptr = false, bool allow_polymorphic=false) { Type *prev_src = src; src = type_deref(src); if (!src_is_ptr) { @@ -4097,11 +4149,21 @@ gb_internal isize check_is_assignable_to_using_subtype(Type *src, Type *dst, isi return 0; } + bool dst_is_polymorphic = is_type_polymorphic(dst); + for_array(i, src->Struct.fields) { Entity *f = src->Struct.fields[i]; if (f->kind != Entity_Variable || (f->flags&EntityFlags_IsSubtype) == 0) { continue; } + if (allow_polymorphic && dst_is_polymorphic) { + Type *fb = base_type(type_deref(f->type)); + if (fb->kind == Type_Struct) { + if (fb->Struct.polymorphic_parent == dst) { + return true; + } + } + } if (are_types_identical(f->type, dst)) { return level+1; @@ -4111,7 +4173,7 @@ gb_internal isize check_is_assignable_to_using_subtype(Type *src, Type *dst, isi return level+1; } } - isize nested_level = check_is_assignable_to_using_subtype(f->type, dst, level+1, src_is_ptr); + isize nested_level = check_is_assignable_to_using_subtype(f->type, dst, level+1, src_is_ptr, allow_polymorphic); if (nested_level > 0) { return nested_level; } @@ -4127,6 +4189,13 @@ gb_internal bool is_type_subtype_of(Type *src, Type *dst) { return 0 < check_is_assignable_to_using_subtype(src, dst, 0, is_type_pointer(src)); } +gb_internal bool is_type_subtype_of_and_allow_polymorphic(Type *src, Type *dst) { + if (are_types_identical(src, dst)) { + return true; + } + + return 0 < check_is_assignable_to_using_subtype(src, dst, 0, is_type_pointer(src), true); +} gb_internal bool has_type_got_objc_class_attribute(Type *t) { @@ -4194,7 +4263,70 @@ gb_internal Type *alloc_type_proc_from_types(Type **param_types, unsigned param_ return t; } - +// gb_internal Type *type_from_selection(Type *type, Selection const &sel) { +// for (i32 index : sel.index) { +// Type *bt = base_type(type_deref(type)); +// switch (bt->kind) { +// case Type_Struct: +// type = bt->Struct.fields[index]->type; +// break; +// case Type_Tuple: +// type = bt->Tuple.variables[index]->type; +// break; +// case Type_BitField: +// type = bt->BitField.fields[index]->type; +// break; +// case Type_Array: +// type = bt->Array.elem; +// break; +// case Type_EnumeratedArray: +// type = bt->Array.elem; +// break; +// case Type_Slice: +// switch (index) { +// case 0: type = alloc_type_multi_pointer(bt->Slice.elem); break; +// case 1: type = t_int; break; +// } +// break; +// case Type_DynamicArray: +// switch (index) { +// case 0: type = alloc_type_multi_pointer(bt->DynamicArray.elem); break; +// case 1: type = t_int; break; +// case 2: type = t_int; break; +// case 3: type = t_allocator; break; +// } +// break; +// case Type_Map: +// switch (index) { +// case 0: type = t_uintptr; break; +// case 1: type = t_int; break; +// case 2: type = t_allocator; break; +// } +// break; +// case Type_Basic: +// if (is_type_complex_or_quaternion(bt)) { +// type = base_complex_elem_type(bt); +// } else { +// switch (type->Basic.kind) { +// case Basic_any: +// switch (index) { +// case 0: type = t_rawptr; break; +// case 1: type = t_typeid; break; +// } +// break; +// case Basic_string: +// switch (index) { +// case 0: type = t_u8_multi_ptr; break; +// case 1: type = t_int; break; +// } +// break; +// } +// } +// break; +// } +// } +// return type; +// } gb_internal gbString write_type_to_string(gbString str, Type *type, bool shorthand=false) { if (type == nullptr) { @@ -4502,6 +4634,23 @@ gb_internal gbString write_type_to_string(gbString str, Type *type, bool shortha str = gb_string_appendc(str, gb_bprintf("matrix[%d, %d]", cast(int)type->Matrix.row_count, cast(int)type->Matrix.column_count)); str = write_type_to_string(str, type->Matrix.elem); break; + + case Type_BitField: + str = gb_string_appendc(str, "bit_field "); + str = write_type_to_string(str, type->BitField.backing_type); + str = gb_string_appendc(str, " {"); + for (isize i = 0; i < type->BitField.fields.count; i++) { + Entity *f = type->BitField.fields[i]; + if (i > 0) { + str = gb_string_appendc(str, ", "); + } + str = gb_string_append_length(str, f->token.string.text, f->token.string.len); + str = gb_string_appendc(str, ": "); + str = write_type_to_string(str, f->type); + str = gb_string_append_fmt(str, " | %u", type->BitField.bit_sizes[i]); + } + str = gb_string_appendc(str, " }"); + break; } return str; |