1
2
3
4
5
6
7
8#ifndef __ASSEMBLY__
9#error "Only include this from assembly code"
10#endif
11
12#ifndef __ASM_ASSEMBLER_H
13#define __ASM_ASSEMBLER_H
14
15#include <asm-generic/export.h>
16
17#include <asm/asm-offsets.h>
18#include <asm/alternative.h>
19#include <asm/asm-bug.h>
20#include <asm/cpufeature.h>
21#include <asm/cputype.h>
22#include <asm/debug-monitors.h>
23#include <asm/page.h>
24#include <asm/pgtable-hwdef.h>
25#include <asm/ptrace.h>
26#include <asm/thread_info.h>
27
28
29
30
31
32 .irp n,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30
33 wx\n .req w\n
34 .endr
35
36 .macro save_and_disable_daif, flags
37 mrs \flags, daif
38 msr daifset, #0xf
39 .endm
40
41 .macro disable_daif
42 msr daifset, #0xf
43 .endm
44
45 .macro enable_daif
46 msr daifclr, #0xf
47 .endm
48
49 .macro restore_daif, flags:req
50 msr daif, \flags
51 .endm
52
53
54 .macro enable_da
55 msr daifclr, #(8 | 4)
56 .endm
57
58
59
60
61 .macro save_and_disable_irq, flags
62 mrs \flags, daif
63 msr daifset, #3
64 .endm
65
66 .macro restore_irq, flags
67 msr daif, \flags
68 .endm
69
70 .macro enable_dbg
71 msr daifclr, #8
72 .endm
73
74 .macro disable_step_tsk, flgs, tmp
75 tbz \flgs, #TIF_SINGLESTEP, 9990f
76 mrs \tmp, mdscr_el1
77 bic \tmp, \tmp, #DBG_MDSCR_SS
78 msr mdscr_el1, \tmp
79 isb
809990:
81 .endm
82
83
84 .macro enable_step_tsk, flgs, tmp
85 tbz \flgs, #TIF_SINGLESTEP, 9990f
86 mrs \tmp, mdscr_el1
87 orr \tmp, \tmp, #DBG_MDSCR_SS
88 msr mdscr_el1, \tmp
899990:
90 .endm
91
92
93
94
95 .macro esb
96#ifdef CONFIG_ARM64_RAS_EXTN
97 hint #16
98#else
99 nop
100#endif
101 .endm
102
103
104
105
106 .macro csdb
107 hint #20
108 .endm
109
110
111
112
113 .macro sb
114alternative_if_not ARM64_HAS_SB
115 dsb nsh
116 isb
117alternative_else
118 SB_BARRIER_INSN
119 nop
120alternative_endif
121 .endm
122
123
124
125
126 .macro nops, num
127 .rept \num
128 nop
129 .endr
130 .endm
131
132
133
134
135
136 .macro _asm_extable, insn, fixup
137 .pushsection __ex_table, "a"
138 .align 3
139 .long (\insn - .), (\fixup - .)
140 .popsection
141 .endm
142
143
144
145
146
147 .macro _cond_extable, insn, fixup
148 .ifnc \fixup,
149 _asm_extable \insn, \fixup
150 .endif
151 .endm
152
153
154#define USER(l, x...) \
1559999: x; \
156 _asm_extable 9999b, l
157
158
159
160
161lr .req x30
162
163
164
165
166 .macro ventry label
167 .align 7
168 b \label
169 .endm
170
171
172
173
174#ifdef CONFIG_CPU_BIG_ENDIAN
175#define CPU_BE(code...) code
176#else
177#define CPU_BE(code...)
178#endif
179
180
181
182
183#ifdef CONFIG_CPU_BIG_ENDIAN
184#define CPU_LE(code...)
185#else
186#define CPU_LE(code...) code
187#endif
188
189
190
191
192
193
194#ifndef CONFIG_CPU_BIG_ENDIAN
195 .macro regs_to_64, rd, lbits, hbits
196#else
197 .macro regs_to_64, rd, hbits, lbits
198#endif
199 orr \rd, \lbits, \hbits, lsl #32
200 .endm
201
202
203
204
205
206
207
208
209
210 .macro adr_l, dst, sym
211 adrp \dst, \sym
212 add \dst, \dst, :lo12:\sym
213 .endm
214
215
216
217
218
219
220
221
222 .macro ldr_l, dst, sym, tmp=
223 .ifb \tmp
224 adrp \dst, \sym
225 ldr \dst, [\dst, :lo12:\sym]
226 .else
227 adrp \tmp, \sym
228 ldr \dst, [\tmp, :lo12:\sym]
229 .endif
230 .endm
231
232
233
234
235
236
237
238 .macro str_l, src, sym, tmp
239 adrp \tmp, \sym
240 str \src, [\tmp, :lo12:\sym]
241 .endm
242
243
244
245
246#if defined(__KVM_NVHE_HYPERVISOR__) || defined(__KVM_VHE_HYPERVISOR__)
247 .macro get_this_cpu_offset, dst
248 mrs \dst, tpidr_el2
249 .endm
250#else
251 .macro get_this_cpu_offset, dst
252alternative_if_not ARM64_HAS_VIRT_HOST_EXTN
253 mrs \dst, tpidr_el1
254alternative_else
255 mrs \dst, tpidr_el2
256alternative_endif
257 .endm
258
259 .macro set_this_cpu_offset, src
260alternative_if_not ARM64_HAS_VIRT_HOST_EXTN
261 msr tpidr_el1, \src
262alternative_else
263 msr tpidr_el2, \src
264alternative_endif
265 .endm
266#endif
267
268
269
270
271
272
273 .macro adr_this_cpu, dst, sym, tmp
274 adrp \tmp, \sym
275 add \dst, \tmp, #:lo12:\sym
276 get_this_cpu_offset \tmp
277 add \dst, \dst, \tmp
278 .endm
279
280
281
282
283
284
285 .macro ldr_this_cpu dst, sym, tmp
286 adr_l \dst, \sym
287 get_this_cpu_offset \tmp
288 ldr \dst, [\dst, \tmp]
289 .endm
290
291
292
293
294 .macro vma_vm_mm, rd, rn
295 ldr \rd, [\rn, #VMA_VM_MM]
296 .endm
297
298
299
300
301
302 .macro read_ctr, reg
303#ifndef __KVM_NVHE_HYPERVISOR__
304alternative_if_not ARM64_MISMATCHED_CACHE_TYPE
305 mrs \reg, ctr_el0
306 nop
307alternative_else
308 ldr_l \reg, arm64_ftr_reg_ctrel0 + ARM64_FTR_SYSVAL
309alternative_endif
310#else
311alternative_if_not ARM64_KVM_PROTECTED_MODE
312 ASM_BUG()
313alternative_else_nop_endif
314alternative_cb kvm_compute_final_ctr_el0
315 movz \reg, #0
316 movk \reg, #0, lsl #16
317 movk \reg, #0, lsl #32
318 movk \reg, #0, lsl #48
319alternative_cb_end
320#endif
321 .endm
322
323
324
325
326
327
328 .macro raw_dcache_line_size, reg, tmp
329 mrs \tmp, ctr_el0
330 ubfm \tmp, \tmp, #16, #19
331 mov \reg, #4
332 lsl \reg, \reg, \tmp
333 .endm
334
335
336
337
338 .macro dcache_line_size, reg, tmp
339 read_ctr \tmp
340 ubfm \tmp, \tmp, #16, #19
341 mov \reg, #4
342 lsl \reg, \reg, \tmp
343 .endm
344
345
346
347
348
349 .macro raw_icache_line_size, reg, tmp
350 mrs \tmp, ctr_el0
351 and \tmp, \tmp, #0xf
352 mov \reg, #4
353 lsl \reg, \reg, \tmp
354 .endm
355
356
357
358
359 .macro icache_line_size, reg, tmp
360 read_ctr \tmp
361 and \tmp, \tmp, #0xf
362 mov \reg, #4
363 lsl \reg, \reg, \tmp
364 .endm
365
366
367
368
369 .macro tcr_set_t0sz, valreg, t0sz
370 bfi \valreg, \t0sz, #TCR_T0SZ_OFFSET, #TCR_TxSZ_WIDTH
371 .endm
372
373
374
375
376 .macro tcr_set_t1sz, valreg, t1sz
377 bfi \valreg, \t1sz, #TCR_T1SZ_OFFSET, #TCR_TxSZ_WIDTH
378 .endm
379
380
381
382
383
384
385
386
387
388 .macro tcr_compute_pa_size, tcr, pos, tmp0, tmp1
389 mrs \tmp0, ID_AA64MMFR0_EL1
390
391 ubfx \tmp0, \tmp0, #ID_AA64MMFR0_PARANGE_SHIFT, #3
392 mov \tmp1, #ID_AA64MMFR0_PARANGE_MAX
393 cmp \tmp0, \tmp1
394 csel \tmp0, \tmp1, \tmp0, hi
395 bfi \tcr, \tmp0, \pos, #3
396 .endm
397
398 .macro __dcache_op_workaround_clean_cache, op, addr
399alternative_if_not ARM64_WORKAROUND_CLEAN_CACHE
400 dc \op, \addr
401alternative_else
402 dc civac, \addr
403alternative_endif
404 .endm
405
406
407
408
409
410
411
412
413
414
415
416
417 .macro dcache_by_line_op op, domain, start, end, tmp1, tmp2, fixup
418 dcache_line_size \tmp1, \tmp2
419 sub \tmp2, \tmp1, #1
420 bic \start, \start, \tmp2
421.Ldcache_op\@:
422 .ifc \op, cvau
423 __dcache_op_workaround_clean_cache \op, \start
424 .else
425 .ifc \op, cvac
426 __dcache_op_workaround_clean_cache \op, \start
427 .else
428 .ifc \op, cvap
429 sys 3, c7, c12, 1, \start
430 .else
431 .ifc \op, cvadp
432 sys 3, c7, c13, 1, \start
433 .else
434 dc \op, \start
435 .endif
436 .endif
437 .endif
438 .endif
439 add \start, \start, \tmp1
440 cmp \start, \end
441 b.lo .Ldcache_op\@
442 dsb \domain
443
444 _cond_extable .Ldcache_op\@, \fixup
445 .endm
446
447
448
449
450
451
452
453
454
455 .macro invalidate_icache_by_line start, end, tmp1, tmp2, fixup
456 icache_line_size \tmp1, \tmp2
457 sub \tmp2, \tmp1, #1
458 bic \tmp2, \start, \tmp2
459.Licache_op\@:
460 ic ivau, \tmp2
461 add \tmp2, \tmp2, \tmp1
462 cmp \tmp2, \end
463 b.lo .Licache_op\@
464 dsb ish
465 isb
466
467 _cond_extable .Licache_op\@, \fixup
468 .endm
469
470
471
472
473 .macro reset_pmuserenr_el0, tmpreg
474 mrs \tmpreg, id_aa64dfr0_el1
475 sbfx \tmpreg, \tmpreg, #ID_AA64DFR0_PMUVER_SHIFT, #4
476 cmp \tmpreg, #1
477 b.lt 9000f
478 msr pmuserenr_el0, xzr
4799000:
480 .endm
481
482
483
484
485 .macro reset_amuserenr_el0, tmpreg
486 mrs \tmpreg, id_aa64pfr0_el1
487 ubfx \tmpreg, \tmpreg, #ID_AA64PFR0_AMU_SHIFT, #4
488 cbz \tmpreg, .Lskip_\@
489 msr_s SYS_AMUSERENR_EL0, xzr
490.Lskip_\@:
491 .endm
492
493
494
495 .macro copy_page dest:req src:req t1:req t2:req t3:req t4:req t5:req t6:req t7:req t8:req
4969998: ldp \t1, \t2, [\src]
497 ldp \t3, \t4, [\src, #16]
498 ldp \t5, \t6, [\src, #32]
499 ldp \t7, \t8, [\src, #48]
500 add \src, \src, #64
501 stnp \t1, \t2, [\dest]
502 stnp \t3, \t4, [\dest, #16]
503 stnp \t5, \t6, [\dest, #32]
504 stnp \t7, \t8, [\dest, #48]
505 add \dest, \dest, #64
506 tst \src, #(PAGE_SIZE - 1)
507 b.ne 9998b
508 .endm
509
510
511
512
513#ifdef CONFIG_KPROBES
514#define NOKPROBE(x) \
515 .pushsection "_kprobe_blacklist", "aw"; \
516 .quad x; \
517 .popsection;
518#else
519#define NOKPROBE(x)
520#endif
521
522#if defined(CONFIG_KASAN_GENERIC) || defined(CONFIG_KASAN_SW_TAGS)
523#define EXPORT_SYMBOL_NOKASAN(name)
524#else
525#define EXPORT_SYMBOL_NOKASAN(name) EXPORT_SYMBOL(name)
526#endif
527
528
529
530
531
532
533
534 .macro le64sym, sym
535 .long \sym\()_lo32
536 .long \sym\()_hi32
537 .endm
538
539
540
541
542
543
544 .macro mov_q, reg, val
545 .if (((\val) >> 31) == 0 || ((\val) >> 31) == 0x1ffffffff)
546 movz \reg, :abs_g1_s:\val
547 .else
548 .if (((\val) >> 47) == 0 || ((\val) >> 47) == 0x1ffff)
549 movz \reg, :abs_g2_s:\val
550 .else
551 movz \reg, :abs_g3:\val
552 movk \reg, :abs_g2_nc:\val
553 .endif
554 movk \reg, :abs_g1_nc:\val
555 .endif
556 movk \reg, :abs_g0_nc:\val
557 .endm
558
559
560
561
562 .macro get_current_task, rd
563 mrs \rd, sp_el0
564 .endm
565
566
567
568
569
570
571
572 .macro offset_ttbr1, ttbr, tmp
573#ifdef CONFIG_ARM64_VA_BITS_52
574 mrs_s \tmp, SYS_ID_AA64MMFR2_EL1
575 and \tmp, \tmp, #(0xf << ID_AA64MMFR2_LVA_SHIFT)
576 cbnz \tmp, .Lskipoffs_\@
577 orr \ttbr, \ttbr, #TTBR1_BADDR_4852_OFFSET
578.Lskipoffs_\@ :
579#endif
580 .endm
581
582
583
584
585
586
587 .macro restore_ttbr1, ttbr
588#ifdef CONFIG_ARM64_VA_BITS_52
589 bic \ttbr, \ttbr, #TTBR1_BADDR_4852_OFFSET
590#endif
591 .endm
592
593
594
595
596
597
598
599
600 .macro phys_to_ttbr, ttbr, phys
601#ifdef CONFIG_ARM64_PA_BITS_52
602 orr \ttbr, \phys, \phys, lsr #46
603 and \ttbr, \ttbr, #TTBR_BADDR_MASK_52
604#else
605 mov \ttbr, \phys
606#endif
607 .endm
608
609 .macro phys_to_pte, pte, phys
610#ifdef CONFIG_ARM64_PA_BITS_52
611
612
613
614
615 orr \pte, \phys, \phys, lsr #36
616 and \pte, \pte, #PTE_ADDR_MASK
617#else
618 mov \pte, \phys
619#endif
620 .endm
621
622 .macro pte_to_phys, phys, pte
623#ifdef CONFIG_ARM64_PA_BITS_52
624 ubfiz \phys, \pte, #(48 - 16 - 12), #16
625 bfxil \phys, \pte, #16, #32
626 lsl \phys, \phys, #16
627#else
628 and \phys, \pte, #PTE_ADDR_MASK
629#endif
630 .endm
631
632
633
634
635 .macro tcr_clear_errata_bits, tcr, tmp1, tmp2
636#ifdef CONFIG_FUJITSU_ERRATUM_010001
637 mrs \tmp1, midr_el1
638
639 mov_q \tmp2, MIDR_FUJITSU_ERRATUM_010001_MASK
640 and \tmp1, \tmp1, \tmp2
641 mov_q \tmp2, MIDR_FUJITSU_ERRATUM_010001
642 cmp \tmp1, \tmp2
643 b.ne 10f
644
645 mov_q \tmp2, TCR_CLEAR_FUJITSU_ERRATUM_010001
646 bic \tcr, \tcr, \tmp2
64710:
648#endif
649 .endm
650
651
652
653
654
655 .macro pre_disable_mmu_workaround
656#ifdef CONFIG_QCOM_FALKOR_ERRATUM_E1041
657 isb
658#endif
659 .endm
660
661
662
663
664
665
666
667 .macro frame_push, regcount:req, extra
668 __frame st, \regcount, \extra
669 .endm
670
671
672
673
674
675
676
677 .macro frame_pop
678 __frame ld
679 .endm
680
681 .macro __frame_regs, reg1, reg2, op, num
682 .if .Lframe_regcount == \num
683 \op\()r \reg1, [sp, #(\num + 1) * 8]
684 .elseif .Lframe_regcount > \num
685 \op\()p \reg1, \reg2, [sp, #(\num + 1) * 8]
686 .endif
687 .endm
688
689 .macro __frame, op, regcount, extra=0
690 .ifc \op, st
691 .if (\regcount) < 0 || (\regcount) > 10
692 .error "regcount should be in the range [0 ... 10]"
693 .endif
694 .if ((\extra) % 16) != 0
695 .error "extra should be a multiple of 16 bytes"
696 .endif
697 .ifdef .Lframe_regcount
698 .if .Lframe_regcount != -1
699 .error "frame_push/frame_pop may not be nested"
700 .endif
701 .endif
702 .set .Lframe_regcount, \regcount
703 .set .Lframe_extra, \extra
704 .set .Lframe_local_offset, ((\regcount + 3) / 2) * 16
705 stp x29, x30, [sp, #-.Lframe_local_offset - .Lframe_extra]!
706 mov x29, sp
707 .endif
708
709 __frame_regs x19, x20, \op, 1
710 __frame_regs x21, x22, \op, 3
711 __frame_regs x23, x24, \op, 5
712 __frame_regs x25, x26, \op, 7
713 __frame_regs x27, x28, \op, 9
714
715 .ifc \op, ld
716 .if .Lframe_regcount == -1
717 .error "frame_push/frame_pop may not be nested"
718 .endif
719 ldp x29, x30, [sp], #.Lframe_local_offset + .Lframe_extra
720 .set .Lframe_regcount, -1
721 .endif
722 .endm
723
724
725
726
727
728.macro set_sctlr, sreg, reg
729 msr \sreg, \reg
730 isb
731
732
733
734
735
736 ic iallu
737 dsb nsh
738 isb
739.endm
740
741.macro set_sctlr_el1, reg
742 set_sctlr sctlr_el1, \reg
743.endm
744
745.macro set_sctlr_el2, reg
746 set_sctlr sctlr_el2, \reg
747.endm
748
749
750
751
752
753
754
755
756
757
758 .macro cond_yield, lbl:req, tmp:req, tmp2:req
759 get_current_task \tmp
760 ldr \tmp, [\tmp, #TSK_TI_PREEMPT]
761
762
763
764
765
766 tbnz \tmp, #SOFTIRQ_SHIFT, .Lnoyield_\@
767#ifdef CONFIG_PREEMPTION
768 sub \tmp, \tmp, #PREEMPT_DISABLE_OFFSET
769 cbz \tmp, \lbl
770#endif
771 adr_l \tmp, irq_stat + IRQ_CPUSTAT_SOFTIRQ_PENDING
772 get_this_cpu_offset \tmp2
773 ldr w\tmp, [\tmp, \tmp2]
774 cbnz w\tmp, \lbl
775.Lnoyield_\@:
776 .endm
777
778
779
780
781
782
783
784#define NT_GNU_PROPERTY_TYPE_0 5
785#define GNU_PROPERTY_AARCH64_FEATURE_1_AND 0xc0000000
786
787#define GNU_PROPERTY_AARCH64_FEATURE_1_BTI (1U << 0)
788#define GNU_PROPERTY_AARCH64_FEATURE_1_PAC (1U << 1)
789
790#ifdef CONFIG_ARM64_BTI_KERNEL
791#define GNU_PROPERTY_AARCH64_FEATURE_1_DEFAULT \
792 ((GNU_PROPERTY_AARCH64_FEATURE_1_BTI | \
793 GNU_PROPERTY_AARCH64_FEATURE_1_PAC))
794#endif
795
796#ifdef GNU_PROPERTY_AARCH64_FEATURE_1_DEFAULT
797.macro emit_aarch64_feature_1_and, feat=GNU_PROPERTY_AARCH64_FEATURE_1_DEFAULT
798 .pushsection .note.gnu.property, "a"
799 .align 3
800 .long 2f - 1f
801 .long 6f - 3f
802 .long NT_GNU_PROPERTY_TYPE_0
8031: .string "GNU"
8042:
805 .align 3
8063: .long GNU_PROPERTY_AARCH64_FEATURE_1_AND
807 .long 5f - 4f
8084:
809
810
811
812
813
814
815 .long \feat
8165:
817 .align 3
8186:
819 .popsection
820.endm
821
822#else
823.macro emit_aarch64_feature_1_and, feat=0
824.endm
825
826#endif
827
828#endif
829