linux/arch/parisc/include/asm/bitops.h
<<
>>
Prefs
   1/* SPDX-License-Identifier: GPL-2.0 */
   2#ifndef _PARISC_BITOPS_H
   3#define _PARISC_BITOPS_H
   4
   5#ifndef _LINUX_BITOPS_H
   6#error only <linux/bitops.h> can be included directly
   7#endif
   8
   9#include <linux/compiler.h>
  10#include <asm/types.h>
  11#include <asm/byteorder.h>
  12#include <asm/barrier.h>
  13#include <linux/atomic.h>
  14
  15/* See http://marc.theaimsgroup.com/?t=108826637900003 for discussion
  16 * on use of volatile and __*_bit() (set/clear/change):
  17 *      *_bit() want use of volatile.
  18 *      __*_bit() are "relaxed" and don't use spinlock or volatile.
  19 */
  20
  21static __inline__ void set_bit(int nr, volatile unsigned long * addr)
  22{
  23        unsigned long mask = BIT_MASK(nr);
  24        unsigned long flags;
  25
  26        addr += BIT_WORD(nr);
  27        _atomic_spin_lock_irqsave(addr, flags);
  28        *addr |= mask;
  29        _atomic_spin_unlock_irqrestore(addr, flags);
  30}
  31
  32static __inline__ void clear_bit(int nr, volatile unsigned long * addr)
  33{
  34        unsigned long mask = BIT_MASK(nr);
  35        unsigned long flags;
  36
  37        addr += BIT_WORD(nr);
  38        _atomic_spin_lock_irqsave(addr, flags);
  39        *addr &= ~mask;
  40        _atomic_spin_unlock_irqrestore(addr, flags);
  41}
  42
  43static __inline__ void change_bit(int nr, volatile unsigned long * addr)
  44{
  45        unsigned long mask = BIT_MASK(nr);
  46        unsigned long flags;
  47
  48        addr += BIT_WORD(nr);
  49        _atomic_spin_lock_irqsave(addr, flags);
  50        *addr ^= mask;
  51        _atomic_spin_unlock_irqrestore(addr, flags);
  52}
  53
  54static __inline__ int test_and_set_bit(int nr, volatile unsigned long * addr)
  55{
  56        unsigned long mask = BIT_MASK(nr);
  57        unsigned long old;
  58        unsigned long flags;
  59        int set;
  60
  61        addr += BIT_WORD(nr);
  62        _atomic_spin_lock_irqsave(addr, flags);
  63        old = *addr;
  64        set = (old & mask) ? 1 : 0;
  65        if (!set)
  66                *addr = old | mask;
  67        _atomic_spin_unlock_irqrestore(addr, flags);
  68
  69        return set;
  70}
  71
  72static __inline__ int test_and_clear_bit(int nr, volatile unsigned long * addr)
  73{
  74        unsigned long mask = BIT_MASK(nr);
  75        unsigned long old;
  76        unsigned long flags;
  77        int set;
  78
  79        addr += BIT_WORD(nr);
  80        _atomic_spin_lock_irqsave(addr, flags);
  81        old = *addr;
  82        set = (old & mask) ? 1 : 0;
  83        if (set)
  84                *addr = old & ~mask;
  85        _atomic_spin_unlock_irqrestore(addr, flags);
  86
  87        return set;
  88}
  89
  90static __inline__ int test_and_change_bit(int nr, volatile unsigned long * addr)
  91{
  92        unsigned long mask = BIT_MASK(nr);
  93        unsigned long oldbit;
  94        unsigned long flags;
  95
  96        addr += BIT_WORD(nr);
  97        _atomic_spin_lock_irqsave(addr, flags);
  98        oldbit = *addr;
  99        *addr = oldbit ^ mask;
 100        _atomic_spin_unlock_irqrestore(addr, flags);
 101
 102        return (oldbit & mask) ? 1 : 0;
 103}
 104
 105#include <asm-generic/bitops/non-atomic.h>
 106
 107/**
 108 * __ffs - find first bit in word. returns 0 to "BITS_PER_LONG-1".
 109 * @word: The word to search
 110 *
 111 * __ffs() return is undefined if no bit is set.
 112 *
 113 * 32-bit fast __ffs by LaMont Jones "lamont At hp com".
 114 * 64-bit enhancement by Grant Grundler "grundler At parisc-linux org".
 115 * (with help from willy/jejb to get the semantics right)
 116 *
 117 * This algorithm avoids branches by making use of nullification.
 118 * One side effect of "extr" instructions is it sets PSW[N] bit.
 119 * How PSW[N] (nullify next insn) gets set is determined by the 
 120 * "condition" field (eg "<>" or "TR" below) in the extr* insn.
 121 * Only the 1st and one of either the 2cd or 3rd insn will get executed.
 122 * Each set of 3 insn will get executed in 2 cycles on PA8x00 vs 16 or so
 123 * cycles for each mispredicted branch.
 124 */
 125
 126static __inline__ unsigned long __ffs(unsigned long x)
 127{
 128        unsigned long ret;
 129
 130        __asm__(
 131#ifdef CONFIG_64BIT
 132                " ldi       63,%1\n"
 133                " extrd,u,*<>  %0,63,32,%%r0\n"
 134                " extrd,u,*TR  %0,31,32,%0\n"   /* move top 32-bits down */
 135                " addi    -32,%1,%1\n"
 136#else
 137                " ldi       31,%1\n"
 138#endif
 139                " extru,<>  %0,31,16,%%r0\n"
 140                " extru,TR  %0,15,16,%0\n"      /* xxxx0000 -> 0000xxxx */
 141                " addi    -16,%1,%1\n"
 142                " extru,<>  %0,31,8,%%r0\n"
 143                " extru,TR  %0,23,8,%0\n"       /* 0000xx00 -> 000000xx */
 144                " addi    -8,%1,%1\n"
 145                " extru,<>  %0,31,4,%%r0\n"
 146                " extru,TR  %0,27,4,%0\n"       /* 000000x0 -> 0000000x */
 147                " addi    -4,%1,%1\n"
 148                " extru,<>  %0,31,2,%%r0\n"
 149                " extru,TR  %0,29,2,%0\n"       /* 0000000y, 1100b -> 0011b */
 150                " addi    -2,%1,%1\n"
 151                " extru,=  %0,31,1,%%r0\n"      /* check last bit */
 152                " addi    -1,%1,%1\n"
 153                        : "+r" (x), "=r" (ret) );
 154        return ret;
 155}
 156
 157#include <asm-generic/bitops/ffz.h>
 158
 159/*
 160 * ffs: find first bit set. returns 1 to BITS_PER_LONG or 0 (if none set)
 161 * This is defined the same way as the libc and compiler builtin
 162 * ffs routines, therefore differs in spirit from the above ffz (man ffs).
 163 */
 164static __inline__ int ffs(int x)
 165{
 166        return x ? (__ffs((unsigned long)x) + 1) : 0;
 167}
 168
 169/*
 170 * fls: find last (most significant) bit set.
 171 * fls(0) = 0, fls(1) = 1, fls(0x80000000) = 32.
 172 */
 173
 174static __inline__ int fls(unsigned int x)
 175{
 176        int ret;
 177        if (!x)
 178                return 0;
 179
 180        __asm__(
 181        "       ldi             1,%1\n"
 182        "       extru,<>        %0,15,16,%%r0\n"
 183        "       zdep,TR         %0,15,16,%0\n"          /* xxxx0000 */
 184        "       addi            16,%1,%1\n"
 185        "       extru,<>        %0,7,8,%%r0\n"
 186        "       zdep,TR         %0,23,24,%0\n"          /* xx000000 */
 187        "       addi            8,%1,%1\n"
 188        "       extru,<>        %0,3,4,%%r0\n"
 189        "       zdep,TR         %0,27,28,%0\n"          /* x0000000 */
 190        "       addi            4,%1,%1\n"
 191        "       extru,<>        %0,1,2,%%r0\n"
 192        "       zdep,TR         %0,29,30,%0\n"          /* y0000000 (y&3 = 0) */
 193        "       addi            2,%1,%1\n"
 194        "       extru,=         %0,0,1,%%r0\n"
 195        "       addi            1,%1,%1\n"              /* if y & 8, add 1 */
 196                : "+r" (x), "=r" (ret) );
 197
 198        return ret;
 199}
 200
 201#include <asm-generic/bitops/__fls.h>
 202#include <asm-generic/bitops/fls64.h>
 203#include <asm-generic/bitops/hweight.h>
 204#include <asm-generic/bitops/lock.h>
 205#include <asm-generic/bitops/sched.h>
 206#include <asm-generic/bitops/le.h>
 207#include <asm-generic/bitops/ext2-atomic-setbit.h>
 208
 209#endif /* _PARISC_BITOPS_H */
 210