7#ifndef _HARDWARE_SYNC_H
8#define _HARDWARE_SYNC_H
12#include "hardware/regs/sio.h"
49#ifndef PARAM_ASSERTIONS_ENABLED_SYNC
50#define PARAM_ASSERTIONS_ENABLED_SYNC 0
59#ifndef PICO_SPINLOCK_ID_IRQ
60#define PICO_SPINLOCK_ID_IRQ 9
64#ifndef PICO_SPINLOCK_ID_TIMER
65#define PICO_SPINLOCK_ID_TIMER 10
69#ifndef PICO_SPINLOCK_ID_HARDWARE_CLAIM
70#define PICO_SPINLOCK_ID_HARDWARE_CLAIM 11
74#ifndef PICO_SPINLOCK_ID_OS1
75#define PICO_SPINLOCK_ID_OS1 14
79#ifndef PICO_SPINLOCK_ID_OS2
80#define PICO_SPINLOCK_ID_OS2 15
84#ifndef PICO_SPINLOCK_ID_STRIPED_FIRST
85#define PICO_SPINLOCK_ID_STRIPED_FIRST 16
89#ifndef PICO_SPINLOCK_ID_STRIPED_LAST
90#define PICO_SPINLOCK_ID_STRIPED_LAST 23
94#ifndef PICO_SPINLOCK_ID_CLAIM_FREE_FIRST
95#define PICO_SPINLOCK_ID_CLAIM_FREE_FIRST 24
98#ifdef PICO_SPINLOCK_ID_CLAIM_FREE_END
99#warning PICO_SPINLOCK_ID_CLAIM_FREE_END has been renamed to PICO_SPINLOCK_ID_CLAIM_FREE_LAST
103#ifndef PICO_SPINLOCK_ID_CLAIM_FREE_LAST
104#define PICO_SPINLOCK_ID_CLAIM_FREE_LAST 31
113 __asm
volatile (
"sev");
123 __asm
volatile (
"wfe");
132 __asm
volatile (
"wfi");
142 __asm
volatile (
"dmb" : : :
"memory");
153 __asm
volatile (
"dsb" : : :
"memory");
164 __asm
volatile (
"isb");
205 __asm
volatile (
"mrs %0, PRIMASK" :
"=r" (status)::);
206 __asm
volatile (
"cpsid i");
216 __asm
volatile (
"msr PRIMASK,%0"::
"r" (status) : );
226 invalid_params_if(SYNC, lock_num >= NUM_SPIN_LOCKS);
227 return (
spin_lock_t *) (SIO_BASE + SIO_SPINLOCK0_OFFSET + lock_num * 4);
237 invalid_params_if(SYNC, (uint) lock < SIO_BASE + SIO_SPINLOCK0_OFFSET ||
238 (uint) lock >= NUM_SPIN_LOCKS *
sizeof(
spin_lock_t) + SIO_BASE + SIO_SPINLOCK0_OFFSET ||
239 ((uint) lock - SIO_BASE + SIO_SPINLOCK0_OFFSET) %
sizeof(
spin_lock_t) != 0);
240 return (uint) (lock - (
spin_lock_t *) (SIO_BASE + SIO_SPINLOCK0_OFFSET));
252 while (__builtin_expect(!*lock, 0));
288 return 0 != (*(io_ro_32 *) (SIO_BASE + SIO_SPINLOCK_ST_OFFSET) & (1u << lock_num));
313 return (*(uint32_t *) (SIO_BASE + SIO_CPUID_OFFSET));
397#define remove_volatile_cast(t, x) ({__mem_fence_acquire(); (t)(x); })
static __force_inline uint32_t spin_lock_blocking(spin_lock_t *lock)
Acquire a spin lock safely.
Definition: sync.h:274
int spin_lock_claim_unused(bool required)
Claim a free spin lock.
Definition: sync_core0_only.c:133
static __force_inline uint32_t save_and_disable_interrupts(void)
Save and disable interrupts.
Definition: sync.h:203
void spin_lock_unclaim(uint lock_num)
Mark a spin lock as no longer used.
Definition: sync_core0_only.c:129
static __force_inline void __mem_fence_release(void)
Release a memory fence.
Definition: sync.h:186
static __force_inline void spin_unlock_unsafe(spin_lock_t *lock)
Release a spin lock without re-enabling interrupts.
Definition: sync.h:261
static __force_inline void spin_unlock(spin_lock_t *lock, uint32_t saved_irq)
Release a spin lock safely.
Definition: sync.h:302
void spin_lock_claim_mask(uint32_t lock_num_mask)
Mark multiple spin locks as used.
Definition: sync_core0_only.c:125
static __force_inline void __sev(void)
Insert a SEV instruction in to the code path.
Definition: sync.h:112
void spin_lock_claim(uint lock_num)
Mark a spin lock as used.
Definition: sync_core0_only.c:121
static __force_inline void __dmb(void)
Insert a DMB instruction in to the code path.
Definition: sync.h:141
static __force_inline uint get_core_num(void)
Get the current core number.
Definition: sync.h:312
void spin_locks_reset(void)
Release all spin locks.
Definition: sync.c:18
static __force_inline void __wfe(void)
Insert a WFE instruction in to the code path.
Definition: sync.h:122
static __force_inline void restore_interrupts(uint32_t status)
Restore interrupts to a specified state.
Definition: sync.h:215
static __force_inline spin_lock_t * spin_lock_instance(uint lock_num)
Get HW Spinlock instance from number.
Definition: sync.h:225
uint next_striped_spin_lock_num(void)
Return a spin lock number from the striped range.
Definition: sync_core0_only.c:116
bool spin_lock_is_claimed(uint lock_num)
Determine if a spin lock is claimed.
Definition: sync.c:60
static __force_inline void __mem_fence_acquire(void)
Acquire a memory fence.
Definition: sync.h:170
static __force_inline void __wfi(void)
Insert a WFI instruction in to the code path.
Definition: sync.h:131
static __force_inline void spin_lock_unsafe_blocking(spin_lock_t *lock)
Acquire a spin lock without disabling interrupts (hence unsafe)
Definition: sync.h:248
volatile uint32_t spin_lock_t
A spin lock identifier.
Definition: sync.h:56
spin_lock_t * spin_lock_init(uint lock_num)
Initialise a spin lock.
Definition: sync_core0_only.c:44
static __force_inline uint spin_lock_get_num(spin_lock_t *lock)
Get HW Spinlock number from instance.
Definition: sync.h:236
static bool is_spin_locked(spin_lock_t *lock)
Check to see if a spinlock is currently acquired elsewhere.
Definition: sync.h:285
static __force_inline void __isb(void)
Insert a ISB instruction in to the code path.
Definition: sync.h:163
static __force_inline void __dsb(void)
Insert a DSB instruction in to the code path.
Definition: sync.h:152
Definition: sync_core0_only.c:12