6 #ifndef ABTI_YTHREAD_H_INCLUDED
7 #define ABTI_YTHREAD_H_INCLUDED
11 static inline ABTI_ythread *ABTI_ythread_get_ptr(
ABT_thread thread)
13 #ifndef ABT_CONFIG_DISABLE_ERROR_CHECK
14 ABTI_ythread *p_ythread;
18 p_ythread = (ABTI_ythread *)thread;
22 return (ABTI_ythread *)thread;
26 static inline ABT_thread ABTI_ythread_get_handle(ABTI_ythread *p_ythread)
28 #ifndef ABT_CONFIG_DISABLE_ERROR_CHECK
30 if (p_ythread == NULL) {
41 static inline void ABTI_ythread_resume_and_push(ABTI_local *p_local,
42 ABTI_ythread *p_ythread)
45 ABTI_ASSERT(ABTD_atomic_acquire_load_int(&p_ythread->thread.state) ==
48 ABTI_event_ythread_resume(p_local, p_ythread,
49 ABTI_local_get_xstream_or_null(p_local)
50 ? ABTI_local_get_xstream(p_local)->p_thread
56 ABTI_pool *p_pool = p_ythread->thread.p_pool;
62 ABTI_pool_dec_num_blocked(p_pool);
65 static inline ABTI_ythread *
66 ABTI_ythread_context_get_ythread(ABTD_ythread_context *p_ctx)
68 return (ABTI_ythread *)(((
char *)p_ctx) - offsetof(ABTI_ythread, ctx));
72 ABTI_ythread_context_jump(ABTI_xstream *p_local_xstream, ABTI_ythread *p_new)
74 if (ABTD_ythread_context_is_started(&p_new->ctx)) {
75 ABTD_ythread_context_jump(&p_new->ctx);
77 if (!ABTD_ythread_context_has_stack(&p_new->ctx)) {
79 ABTI_mem_alloc_ythread_mempool_stack(p_local_xstream, p_new);
83 ABTD_ythread_context_start_and_jump(&p_new->ctx);
88 static inline void ABTI_ythread_context_switch(ABTI_xstream *p_local_xstream,
92 if (ABTD_ythread_context_is_started(&p_new->ctx)) {
93 ABTD_ythread_context_switch(&p_old->ctx, &p_new->ctx);
95 if (!ABTD_ythread_context_has_stack(&p_new->ctx)) {
97 ABTI_mem_alloc_ythread_mempool_stack(p_local_xstream, p_new);
101 ABTD_ythread_context_start_and_switch(&p_old->ctx, &p_new->ctx);
106 ABTI_ythread_context_jump_with_call(ABTI_xstream *p_local_xstream,
107 ABTI_ythread *p_new,
void (*f_cb)(
void *),
110 if (ABTD_ythread_context_is_started(&p_new->ctx)) {
111 ABTD_ythread_context_jump_with_call(&p_new->ctx, f_cb, cb_arg);
113 if (!ABTD_ythread_context_has_stack(&p_new->ctx)) {
115 ABTI_mem_alloc_ythread_mempool_stack(p_local_xstream, p_new);
119 ABTD_ythread_context_start_and_jump_with_call(&p_new->ctx, f_cb,
126 ABTI_ythread_context_switch_with_call(ABTI_xstream *p_local_xstream,
127 ABTI_ythread *p_old, ABTI_ythread *p_new,
128 void (*f_cb)(
void *),
void *cb_arg)
130 if (ABTD_ythread_context_is_started(&p_new->ctx)) {
131 ABTD_ythread_context_switch_with_call(&p_old->ctx, &p_new->ctx, f_cb,
134 if (!ABTD_ythread_context_has_stack(&p_new->ctx)) {
136 ABTI_mem_alloc_ythread_mempool_stack(p_local_xstream, p_new);
140 ABTD_ythread_context_start_and_switch_with_call(&p_old->ctx,
147 ABTI_ythread_switch_to_child_internal(ABTI_xstream **pp_local_xstream,
148 ABTI_ythread *p_old, ABTI_ythread *p_new)
150 p_new->thread.p_parent = &p_old->thread;
151 ABTI_xstream *p_local_xstream = *pp_local_xstream;
152 ABTI_event_thread_run(p_local_xstream, &p_new->thread, &p_old->thread,
153 p_new->thread.p_parent);
154 p_local_xstream->p_thread = &p_new->thread;
155 p_new->thread.p_last_xstream = p_local_xstream;
157 ABTI_ythread_context_switch(p_local_xstream, p_old, p_new);
159 *pp_local_xstream = p_old->thread.p_last_xstream;
163 ABTI_ythread_jump_to_sibling_internal(ABTI_xstream *p_local_xstream,
164 ABTI_ythread *p_old, ABTI_ythread *p_new,
165 void (*f_cb)(
void *),
void *cb_arg)
167 p_new->thread.p_parent = p_old->thread.p_parent;
168 ABTI_event_thread_run(p_local_xstream, &p_new->thread, &p_old->thread,
169 p_new->thread.p_parent);
170 p_local_xstream->p_thread = &p_new->thread;
171 p_new->thread.p_last_xstream = p_local_xstream;
172 ABTI_ythread_context_jump_with_call(p_local_xstream, p_new, f_cb, cb_arg);
176 static inline void ABTI_ythread_switch_to_sibling_internal(
177 ABTI_xstream **pp_local_xstream, ABTI_ythread *p_old, ABTI_ythread *p_new,
178 void (*f_cb)(
void *),
void *cb_arg)
180 p_new->thread.p_parent = p_old->thread.p_parent;
181 ABTI_xstream *p_local_xstream = *pp_local_xstream;
182 ABTI_event_thread_run(p_local_xstream, &p_new->thread, &p_old->thread,
183 p_new->thread.p_parent);
184 p_local_xstream->p_thread = &p_new->thread;
185 p_new->thread.p_last_xstream = p_local_xstream;
187 ABTI_ythread_context_switch_with_call(p_local_xstream, p_old, p_new, f_cb,
190 *pp_local_xstream = p_old->thread.p_last_xstream;
194 ABTI_ythread_jump_to_parent_internal(ABTI_xstream *p_local_xstream,
195 ABTI_ythread *p_old,
void (*f_cb)(
void *),
198 ABTI_ythread *p_new = ABTI_thread_get_ythread(p_old->thread.p_parent);
199 p_local_xstream->p_thread = &p_new->thread;
200 ABTI_ASSERT(p_new->thread.p_last_xstream == p_local_xstream);
201 ABTI_ythread_context_jump_with_call(p_local_xstream, p_new, f_cb, cb_arg);
206 ABTI_ythread_switch_to_parent_internal(ABTI_xstream **pp_local_xstream,
208 void (*f_cb)(
void *),
void *cb_arg)
210 ABTI_ythread *p_new = ABTI_thread_get_ythread(p_old->thread.p_parent);
211 ABTI_xstream *p_local_xstream = *pp_local_xstream;
212 p_local_xstream->p_thread = &p_new->thread;
213 ABTI_ASSERT(p_new->thread.p_last_xstream == p_local_xstream);
215 ABTI_ythread_context_switch_with_call(p_local_xstream, p_old, p_new, f_cb,
218 *pp_local_xstream = p_old->thread.p_last_xstream;
221 static inline ABT_bool ABTI_ythread_context_peek(ABTI_ythread *p_ythread,
222 void (*f_peek)(
void *),
225 return ABTD_ythread_context_peek(&p_ythread->ctx, f_peek, arg);
228 static inline void ABTI_ythread_run_child(ABTI_xstream **pp_local_xstream,
229 ABTI_ythread *p_self,
230 ABTI_ythread *p_child)
232 ABTD_atomic_release_store_int(&p_child->thread.state,
234 ABTI_ythread_switch_to_child_internal(pp_local_xstream, p_self, p_child);
238 ABTI_YTHREAD_YIELD_KIND_USER,
239 ABTI_YTHREAD_YIELD_KIND_YIELD_LOOP,
240 } ABTI_ythread_yield_kind;
243 ABTI_YTHREAD_YIELD_TO_KIND_USER,
244 ABTI_YTHREAD_YIELD_TO_KIND_CREATE_TO,
245 ABTI_YTHREAD_YIELD_TO_KIND_REVIVE_TO,
246 } ABTI_ythread_yield_to_kind;
248 void ABTI_ythread_callback_yield_user_yield(
void *arg);
249 void ABTI_ythread_callback_yield_loop(
void *arg);
250 void ABTI_ythread_callback_yield_user_yield_to(
void *arg);
251 void ABTI_ythread_callback_yield_create_to(
void *arg);
252 void ABTI_ythread_callback_yield_revive_to(
void *arg);
254 static inline void ABTI_ythread_yield(ABTI_xstream **pp_local_xstream,
255 ABTI_ythread *p_self,
256 ABTI_ythread_yield_kind kind,
260 ABTI_event_ythread_yield(*pp_local_xstream, p_self, p_self->thread.p_parent,
261 sync_event_type, p_sync);
262 if (kind == ABTI_YTHREAD_YIELD_KIND_USER) {
263 ABTI_ythread_switch_to_parent_internal(
264 pp_local_xstream, p_self, ABTI_ythread_callback_yield_user_yield,
267 ABTI_UB_ASSERT(kind == ABTI_YTHREAD_YIELD_KIND_YIELD_LOOP);
268 ABTI_ythread_switch_to_parent_internal(pp_local_xstream, p_self,
269 ABTI_ythread_callback_yield_loop,
275 ABTI_ythread_yield_to(ABTI_xstream **pp_local_xstream, ABTI_ythread *p_self,
276 ABTI_ythread *p_target, ABTI_ythread_yield_to_kind kind,
279 ABTI_event_ythread_yield(*pp_local_xstream, p_self, p_self->thread.p_parent,
280 sync_event_type, p_sync);
281 ABTD_atomic_release_store_int(&p_target->thread.state,
283 if (kind == ABTI_YTHREAD_YIELD_TO_KIND_USER) {
284 ABTI_ythread_switch_to_sibling_internal(
285 pp_local_xstream, p_self, p_target,
286 ABTI_ythread_callback_yield_user_yield_to, (
void *)p_self);
287 }
else if (kind == ABTI_YTHREAD_YIELD_TO_KIND_CREATE_TO) {
288 ABTI_ythread_switch_to_sibling_internal(
289 pp_local_xstream, p_self, p_target,
290 ABTI_ythread_callback_yield_create_to, (
void *)p_self);
292 ABTI_UB_ASSERT(kind == ABTI_YTHREAD_YIELD_TO_KIND_REVIVE_TO);
293 ABTI_ythread_switch_to_sibling_internal(
294 pp_local_xstream, p_self, p_target,
295 ABTI_ythread_callback_yield_revive_to, (
void *)p_self);
300 void ABTI_ythread_callback_thread_yield_to(
void *arg);
303 ABTI_ythread_thread_yield_to(ABTI_xstream **pp_local_xstream,
304 ABTI_ythread *p_self, ABTI_ythread *p_target,
307 ABTI_event_ythread_yield(*pp_local_xstream, p_self, p_self->thread.p_parent,
308 sync_event_type, p_sync);
309 ABTD_atomic_release_store_int(&p_target->thread.state,
312 ABTI_ythread_switch_to_sibling_internal(
313 pp_local_xstream, p_self, p_target,
314 ABTI_ythread_callback_thread_yield_to, (
void *)p_self);
318 ABTI_ythread *p_prev;
319 ABTI_ythread *p_next;
320 } ABTI_ythread_callback_resume_yield_to_arg;
322 void ABTI_ythread_callback_resume_yield_to(
void *arg);
325 ABTI_YTHREAD_RESUME_YIELD_TO_KIND_USER,
326 } ABTI_ythread_resume_yield_to_kind;
329 ABTI_ythread_resume_yield_to(ABTI_xstream **pp_local_xstream,
330 ABTI_ythread *p_self, ABTI_ythread *p_target,
331 ABTI_ythread_resume_yield_to_kind kind,
335 ABTI_UB_ASSERT(ABTD_atomic_acquire_load_int(&p_target->thread.state) ==
338 ABTI_event_ythread_resume(ABTI_xstream_get_local(*pp_local_xstream),
339 p_target, &p_self->thread);
340 ABTI_event_ythread_yield(*pp_local_xstream, p_self, p_self->thread.p_parent,
341 sync_event_type, p_sync);
342 ABTD_atomic_release_store_int(&p_target->thread.state,
344 ABTI_UB_ASSERT(kind == ABTI_YTHREAD_RESUME_YIELD_TO_KIND_USER);
345 ABTI_ythread_callback_resume_yield_to_arg arg = { p_self, p_target };
346 ABTI_ythread_switch_to_sibling_internal(
347 pp_local_xstream, p_self, p_target,
348 ABTI_ythread_callback_resume_yield_to, (
void *)&arg);
351 void ABTI_ythread_callback_suspend(
void *arg);
353 static inline void ABTI_ythread_suspend(ABTI_xstream **pp_local_xstream,
354 ABTI_ythread *p_self,
358 ABTI_event_ythread_suspend(*pp_local_xstream, p_self,
359 p_self->thread.p_parent, sync_event_type,
361 ABTI_ythread_switch_to_parent_internal(pp_local_xstream, p_self,
362 ABTI_ythread_callback_suspend,
366 static inline void ABTI_ythread_suspend_to(ABTI_xstream **pp_local_xstream,
367 ABTI_ythread *p_self,
368 ABTI_ythread *p_target,
372 ABTI_event_ythread_suspend(*pp_local_xstream, p_self,
373 p_self->thread.p_parent, sync_event_type,
375 ABTI_ythread_switch_to_sibling_internal(pp_local_xstream, p_self, p_target,
376 ABTI_ythread_callback_suspend,
381 ABTI_ythread *p_prev;
382 ABTI_ythread *p_next;
383 } ABTI_ythread_callback_resume_suspend_to_arg;
385 void ABTI_ythread_callback_resume_suspend_to(
void *arg);
387 static inline void ABTI_ythread_resume_suspend_to(
388 ABTI_xstream **pp_local_xstream, ABTI_ythread *p_self,
392 ABTI_UB_ASSERT(ABTD_atomic_acquire_load_int(&p_target->thread.state) ==
395 ABTI_event_ythread_resume(ABTI_xstream_get_local(*pp_local_xstream),
396 p_target, &p_self->thread);
397 ABTI_event_ythread_suspend(*pp_local_xstream, p_self,
398 p_self->thread.p_parent, sync_event_type,
400 ABTD_atomic_release_store_int(&p_target->thread.state,
402 ABTI_ythread_callback_resume_suspend_to_arg arg = { p_self, p_target };
403 ABTI_ythread_switch_to_sibling_internal(
404 pp_local_xstream, p_self, p_target,
405 ABTI_ythread_callback_resume_suspend_to, (
void *)&arg);
408 void ABTI_ythread_callback_exit(
void *arg);
410 static inline ABTI_ythread *
411 ABTI_ythread_atomic_get_joiner(ABTI_ythread *p_ythread)
413 ABTD_ythread_context *p_ctx = &p_ythread->ctx;
414 ABTD_ythread_context *p_link =
415 ABTD_atomic_acquire_load_ythread_context_ptr(&p_ctx->p_link);
417 uint32_t req = ABTD_atomic_fetch_or_uint32(&p_ythread->thread.request,
418 ABTI_THREAD_REQ_JOIN);
419 if (!(req & ABTI_THREAD_REQ_JOIN)) {
426 p_link = ABTD_atomic_acquire_load_ythread_context_ptr(
429 return ABTI_ythread_context_get_ythread(p_link);
433 return ABTI_ythread_context_get_ythread(p_link);
437 static inline void ABTI_ythread_resume_joiner(ABTI_xstream *p_local_xstream,
438 ABTI_ythread *p_ythread)
440 ABTI_ythread *p_joiner = ABTI_ythread_atomic_get_joiner(p_ythread);
442 #ifndef ABT_CONFIG_ACTIVE_WAIT_POLICY
443 if (p_joiner->thread.type == ABTI_THREAD_TYPE_EXT) {
447 ABTD_futex_single *p_futex =
448 (ABTD_futex_single *)p_joiner->thread.p_arg;
449 ABTD_futex_resume(p_futex);
454 ABTI_ythread_resume_and_push(ABTI_xstream_get_local(p_local_xstream),
460 ABTI_ythread_exit(ABTI_xstream *p_local_xstream, ABTI_ythread *p_self)
462 ABTI_event_thread_finish(p_local_xstream, &p_self->thread,
463 p_self->thread.p_parent);
464 ABTI_ythread *p_joiner = ABTI_ythread_atomic_get_joiner(p_self);
466 #ifndef ABT_CONFIG_ACTIVE_WAIT_POLICY
467 if (p_joiner->thread.type == ABTI_THREAD_TYPE_EXT) {
471 ABTD_futex_single *p_futex =
472 (ABTD_futex_single *)p_joiner->thread.p_arg;
473 ABTD_futex_resume(p_futex);
476 if (p_self->thread.p_last_xstream ==
477 p_joiner->thread.p_last_xstream &&
478 !(p_self->thread.type & ABTI_THREAD_TYPE_MAIN_SCHED)) {
482 ABTI_pool_dec_num_blocked(p_joiner->thread.p_pool);
483 ABTI_event_ythread_resume(ABTI_xstream_get_local(p_local_xstream),
484 p_joiner, &p_self->thread);
485 ABTD_atomic_release_store_int(&p_joiner->thread.state,
487 ABTI_ythread_jump_to_sibling_internal(p_local_xstream, p_self,
489 ABTI_ythread_callback_exit,
498 ABTI_ythread_resume_and_push(ABTI_xstream_get_local(
504 ABTI_ythread_jump_to_parent_internal(p_local_xstream, p_self,
505 ABTI_ythread_callback_exit,
511 ABTI_ythread_exit_to(ABTI_xstream *p_local_xstream, ABTI_ythread *p_self,
512 ABTI_ythread *p_target)
519 ABTI_ythread_resume_joiner(p_local_xstream, p_self);
520 ABTI_event_thread_finish(p_local_xstream, &p_self->thread,
521 p_self->thread.p_parent);
522 ABTD_atomic_release_store_int(&p_target->thread.state,
524 ABTI_ythread_jump_to_sibling_internal(p_local_xstream, p_self, p_target,
525 ABTI_ythread_callback_exit,
530 ABTU_noreturn static inline void ABTI_ythread_exit_to_primary(
531 ABTI_global *p_global, ABTI_xstream *p_local_xstream, ABTI_ythread *p_self)
534 ABTI_ythread *p_primary = p_global->p_primary_ythread;
535 p_local_xstream->p_thread = &p_primary->thread;
536 p_primary->thread.p_last_xstream = p_local_xstream;
537 ABTD_atomic_release_store_int(&p_primary->thread.state,
539 ABTI_ythread_context_jump_with_call(p_local_xstream, p_primary,
540 ABTI_ythread_callback_exit, p_self);
545 ABTI_ythread *p_prev;
546 ABTI_ythread *p_next;
547 } ABTI_ythread_callback_resume_exit_to_arg;
549 void ABTI_ythread_callback_resume_exit_to(
void *arg);
552 ABTI_ythread_resume_exit_to(ABTI_xstream *p_local_xstream, ABTI_ythread *p_self,
553 ABTI_ythread *p_target)
556 ABTI_UB_ASSERT(ABTD_atomic_acquire_load_int(&p_target->thread.state) ==
559 ABTI_event_ythread_resume(ABTI_xstream_get_local(p_local_xstream), p_target,
562 ABTI_ythread_resume_joiner(p_local_xstream, p_self);
563 ABTI_event_thread_finish(p_local_xstream, &p_self->thread,
564 p_self->thread.p_parent);
565 ABTD_atomic_release_store_int(&p_target->thread.state,
567 ABTI_ythread_callback_resume_exit_to_arg arg = { p_self, p_target };
568 ABTI_ythread_jump_to_sibling_internal(p_local_xstream, p_self, p_target,
569 ABTI_ythread_callback_resume_exit_to,
575 ABTI_ythread *p_prev;
576 ABTD_spinlock *p_lock;
577 } ABTI_ythread_callback_suspend_unlock_arg;
579 void ABTI_ythread_callback_suspend_unlock(
void *arg);
582 ABTI_ythread_suspend_unlock(ABTI_xstream **pp_local_xstream,
583 ABTI_ythread *p_self, ABTD_spinlock *p_lock,
586 ABTI_event_ythread_suspend(*pp_local_xstream, p_self,
587 p_self->thread.p_parent, sync_event_type,
589 ABTI_ythread_callback_suspend_unlock_arg arg = { p_self, p_lock };
590 ABTI_ythread_switch_to_parent_internal(pp_local_xstream, p_self,
591 ABTI_ythread_callback_suspend_unlock,
596 ABTI_ythread *p_prev;
597 ABTI_ythread *p_target;
598 } ABTI_ythread_callback_suspend_join_arg;
600 void ABTI_ythread_callback_suspend_join(
void *arg);
603 ABTI_ythread_suspend_join(ABTI_xstream **pp_local_xstream, ABTI_ythread *p_self,
604 ABTI_ythread *p_target,
607 ABTI_event_ythread_suspend(*pp_local_xstream, p_self,
608 p_self->thread.p_parent, sync_event_type,
610 ABTI_ythread_callback_suspend_join_arg arg = { p_self, p_target };
611 ABTI_ythread_switch_to_parent_internal(pp_local_xstream, p_self,
612 ABTI_ythread_callback_suspend_join,
617 ABTI_ythread *p_prev;
618 ABTI_sched *p_main_sched;
619 } ABTI_ythread_callback_suspend_replace_sched_arg;
621 void ABTI_ythread_callback_suspend_replace_sched(
void *arg);
623 static inline void ABTI_ythread_suspend_replace_sched(
624 ABTI_xstream **pp_local_xstream, ABTI_ythread *p_self,
627 ABTI_event_ythread_suspend(*pp_local_xstream, p_self,
628 p_self->thread.p_parent, sync_event_type,
630 ABTI_ythread_callback_suspend_replace_sched_arg arg = { p_self,
632 ABTI_ythread_switch_to_parent_internal(
633 pp_local_xstream, p_self, ABTI_ythread_callback_suspend_replace_sched,
637 void ABTI_ythread_callback_orphan(
void *arg);
640 ABTI_ythread_yield_orphan(ABTI_xstream **pp_local_xstream, ABTI_ythread *p_self,
643 ABTI_event_ythread_suspend(*pp_local_xstream, p_self,
644 p_self->thread.p_parent, sync_event_type,
646 ABTI_ythread_switch_to_parent_internal(pp_local_xstream, p_self,
647 ABTI_ythread_callback_orphan,
650 static inline void ABTI_ythread_schedule(ABTI_global *p_global,
651 ABTI_xstream **pp_local_xstream,
652 ABTI_thread *p_thread)
654 ABTI_xstream *p_local_xstream = *pp_local_xstream;
655 const int request_op = ABTI_thread_handle_request(p_thread,
ABT_TRUE);
656 if (
ABTU_likely(request_op == ABTI_THREAD_HANDLE_REQUEST_NONE)) {
658 ABTI_ythread *p_ythread = ABTI_thread_get_ythread_or_null(p_thread);
663 ABTI_ythread *p_self =
664 ABTI_thread_get_ythread(p_local_xstream->p_thread);
665 ABTI_ythread_run_child(pp_local_xstream, p_self, p_ythread);
671 ABTD_atomic_release_store_int(&p_thread->state,
675 p_thread->p_last_xstream = p_local_xstream;
678 ABTI_thread *p_sched_thread = p_local_xstream->p_thread;
679 p_local_xstream->p_thread = p_thread;
680 p_thread->p_parent = p_sched_thread;
683 ABTI_event_thread_run(p_local_xstream, p_thread, p_sched_thread,
685 p_thread->f_thread(p_thread->p_arg);
686 ABTI_event_thread_finish(p_local_xstream, p_thread, p_sched_thread);
689 p_local_xstream->p_thread = p_sched_thread;
692 ABTI_thread_terminate(p_global, p_local_xstream, p_thread);
694 }
else if (request_op == ABTI_THREAD_HANDLE_REQUEST_CANCELLED) {
696 }
else if (request_op == ABTI_THREAD_HANDLE_REQUEST_MIGRATED) {