1/*
2 * Copyright 2018, Jaroslaw Pelczar <jarek@jpelczar.com>
3 * Distributed under the terms of the MIT License.
4 */
5#ifndef _KERNEL_ARCH_ARM64_ARCH_INT_H_
6#define _KERNEL_ARCH_ARM64_ARCH_INT_H_
7
8
9#include <SupportDefs.h>
10#include <kernel/arch/arm64/arm_registers.h>
11
12
13#define NUM_IO_VECTORS			1024
14
15static inline void
16arch_int_enable_interrupts_inline(void)
17{
18	asm volatile("msr daifclr, #0xf" : : : "memory");
19}
20
21
22static inline int
23arch_int_disable_interrupts_inline(void)
24{
25	uint32 flags;
26
27	asm volatile("mrs %0, daif\n" "msr daifset, #0xf" : "=r"(flags) : : "memory");
28
29	return flags;
30}
31
32
33static inline void
34arch_int_restore_interrupts_inline(int oldState)
35{
36	asm volatile("msr daif, %0" : : "r"(oldState) : "memory");
37}
38
39
40static inline bool
41arch_int_are_interrupts_enabled_inline(void)
42{
43	return (READ_SPECIALREG(DAIF) & PSR_I) == 0;
44}
45
46
47// map the functions to the inline versions
48#define arch_int_enable_interrupts()	arch_int_enable_interrupts_inline()
49#define arch_int_disable_interrupts()	arch_int_disable_interrupts_inline()
50#define arch_int_restore_interrupts(status)	\
51	arch_int_restore_interrupts_inline(status)
52#define arch_int_are_interrupts_enabled()	\
53	arch_int_are_interrupts_enabled_inline()
54
55
56#endif /* _KERNEL_ARCH_ARM64_ARCH_INT_H_ */
57