6 #ifndef ABTI_THREAD_H_INCLUDED
7 #define ABTI_THREAD_H_INCLUDED
9 static inline ABTI_thread *ABTI_thread_get_ptr(
ABT_thread thread)
11 #ifndef ABT_CONFIG_DISABLE_ERROR_CHECK
12 ABTI_thread *p_thread;
16 p_thread = (ABTI_thread *)thread;
20 return (ABTI_thread *)thread;
24 static inline ABT_thread ABTI_thread_get_handle(ABTI_thread *p_thread)
26 #ifndef ABT_CONFIG_DISABLE_ERROR_CHECK
28 if (p_thread == NULL) {
41 static inline ABT_unit_type ABTI_thread_type_get_type(ABTI_thread_type type)
43 if (type & ABTI_THREAD_TYPE_YIELDABLE) {
45 }
else if (type == ABTI_THREAD_TYPE_EXT) {
52 static inline ABTI_ythread *ABTI_thread_get_ythread(ABTI_thread *p_thread)
54 ABTI_STATIC_ASSERT(offsetof(ABTI_ythread, thread) == 0);
55 return (ABTI_ythread *)p_thread;
58 static inline ABTI_ythread *
59 ABTI_thread_get_ythread_or_null(ABTI_thread *p_thread)
61 if (p_thread->type & ABTI_THREAD_TYPE_YIELDABLE) {
62 return ABTI_thread_get_ythread(p_thread);
68 static inline void ABTI_thread_set_request(ABTI_thread *p_thread, uint32_t req)
70 ABTD_atomic_fetch_or_uint32(&p_thread->request, req);
73 static inline void ABTI_thread_unset_request(ABTI_thread *p_thread,
76 ABTD_atomic_fetch_and_uint32(&p_thread->request, ~req);
79 #define ABTI_THREAD_HANDLE_REQUEST_NONE ((int)0x0)
80 #define ABTI_THREAD_HANDLE_REQUEST_CANCELLED ((int)0x1)
81 #define ABTI_THREAD_HANDLE_REQUEST_MIGRATED ((int)0x2)
83 static inline int ABTI_thread_handle_request(ABTI_thread *p_thread,
86 #if defined(ABT_CONFIG_DISABLE_CANCELLATION) && \
87 defined(ABT_CONFIG_DISABLE_MIGRATION)
88 return ABTI_THREAD_HANDLE_REQUEST_NONE;
91 const uint32_t request =
92 ABTD_atomic_acquire_load_uint32(&p_thread->request);
95 #ifndef ABT_CONFIG_DISABLE_CANCELLATION
96 if (allow_termination &&
ABTU_unlikely(request & ABTI_THREAD_REQ_CANCEL)) {
97 ABTI_thread_handle_request_cancel(ABTI_global_get_global(),
98 p_thread->p_last_xstream, p_thread);
99 return ABTI_THREAD_HANDLE_REQUEST_CANCELLED;
104 #ifndef ABT_CONFIG_DISABLE_MIGRATION
108 ABTI_thread_handle_request_migrate(ABTI_global_get_global(),
109 ABTI_xstream_get_local(
110 p_thread->p_last_xstream),
113 return ABTI_THREAD_HANDLE_REQUEST_MIGRATED;
119 return ABTI_THREAD_HANDLE_REQUEST_NONE;
124 ABTI_mem_alloc_ythread_mempool_stack(ABTI_xstream *p_local_xstream,
125 ABTI_ythread *p_ythread);
127 ABTI_mem_free_ythread_mempool_stack(ABTI_xstream *p_local_xstream,
128 ABTI_ythread *p_ythread);
130 static inline void ABTI_thread_terminate(ABTI_global *p_global,
131 ABTI_xstream *p_local_xstream,
132 ABTI_thread *p_thread)
134 const ABTI_thread_type thread_type = p_thread->type;
135 if (thread_type & (ABTI_THREAD_TYPE_MEM_MEMPOOL_DESC_MEMPOOL_LAZY_STACK |
136 ABTI_THREAD_TYPE_MEM_MALLOC_DESC_MEMPOOL_LAZY_STACK)) {
137 ABTI_ythread *p_ythread = ABTI_thread_get_ythread(p_thread);
138 if (ABTD_ythread_context_has_stack(&p_ythread->ctx)) {
139 ABTI_mem_free_ythread_mempool_stack(p_local_xstream, p_ythread);
142 if (!(thread_type & ABTI_THREAD_TYPE_NAMED)) {
143 ABTD_atomic_release_store_int(&p_thread->state,
145 ABTI_thread_free(p_global, ABTI_xstream_get_local(p_local_xstream),
152 ABTD_atomic_release_store_int(&p_thread->state,