!C99Shell v. 2.0 [PHP 7 Update] [25.02.2019]!

Software: nginx/1.23.4. PHP/5.6.40-65+ubuntu20.04.1+deb.sury.org+1 

uname -a: Linux foro-restaurado-2 5.15.0-1040-oracle #46-Ubuntu SMP Fri Jul 14 21:47:21 UTC 2023
aarch64
 

uid=33(www-data) gid=33(www-data) groups=33(www-data) 

Safe-mode: OFF (not secure)

/usr/src/linux-oracle-headers-5.15.0-1040/arch/arm64/include/asm/   drwxr-xr-x
Free 83.22 GB of 96.73 GB (86.04%)
Home    Back    Forward    UPDIR    Refresh    Search    Buffer    Encoder    Tools    Proc.    FTP brute    Sec.    SQL    PHP-code    Update    Feedback    Self remove    Logout    


Viewing file:     barrier.h (4.76 KB)      -rw-r--r--
Select action/file-type:
(+) | (+) | (+) | Code (+) | Session (+) | (+) | SDB (+) | (+) | (+) | (+) | (+) | (+) |
/* SPDX-License-Identifier: GPL-2.0-only */
/*
 * Based on arch/arm/include/asm/barrier.h
 *
 * Copyright (C) 2012 ARM Ltd.
 */
#ifndef __ASM_BARRIER_H
#define __ASM_BARRIER_H

#ifndef __ASSEMBLY__

#include <linux/kasan-checks.h>

#define __nops(n)    ".rept    " #n "\nnop\n.endr\n"
#define nops(n)        asm volatile(__nops(n))

#define sev()        asm volatile("sev" : : : "memory")
#define wfe()        asm volatile("wfe" : : : "memory")
#define wfi()        asm volatile("wfi" : : : "memory")

#define isb()        asm volatile("isb" : : : "memory")
#define dmb(opt)    asm volatile("dmb " #opt : : : "memory")
#define dsb(opt)    asm volatile("dsb " #opt : : : "memory")

#define psb_csync()    asm volatile("hint #17" : : : "memory")
#define tsb_csync()    asm volatile("hint #18" : : : "memory")
#define csdb()        asm volatile("hint #20" : : : "memory")

#ifdef CONFIG_ARM64_PSEUDO_NMI
#define pmr_sync()                        \
    do {                            \
        extern struct static_key_false gic_pmr_sync;    \
                                \
        if (static_branch_unlikely(&gic_pmr_sync))    \
            dsb(sy);                \
    } while(0)
#else
#define pmr_sync()    do {} while (0)
#endif

#define mb()        dsb(sy)
#define rmb()        dsb(ld)
#define wmb()        dsb(st)

#define dma_mb()    dmb(osh)
#define dma_rmb()    dmb(oshld)
#define dma_wmb()    dmb(oshst)

/*
 * Generate a mask for array_index__nospec() that is ~0UL when 0 <= idx < sz
 * and 0 otherwise.
 */
#define array_index_mask_nospec array_index_mask_nospec
static inline unsigned long array_index_mask_nospec(unsigned long idx,
                            unsigned long sz)
{
    unsigned long mask;

    asm volatile(
    "    cmp    %1, %2\n"
    "    sbc    %0, xzr, xzr\n"
    : "=r" (mask)
    : "r" (idx), "Ir" (sz)
    : "cc");

    csdb();
    return mask;
}

/*
 * Ensure that reads of the counter are treated the same as memory reads
 * for the purposes of ordering by subsequent memory barriers.
 *
 * This insanity brought to you by speculative system register reads,
 * out-of-order memory accesses, sequence locks and Thomas Gleixner.
 *
 * https://lore.kernel.org/r/[email protected]/
 */
#define arch_counter_enforce_ordering(val) do {                \
    u64 tmp, _val = (val);                        \
                                    \
    asm volatile(                            \
    "    eor    %0, %1, %1\n"                    \
    "    add    %0, sp, %0\n"                    \
    "    ldr    xzr, [%0]"                    \
    : "=r" (tmp) : "r" (_val));                    \
} while (0)

#define __smp_mb()    dmb(ish)
#define __smp_rmb()    dmb(ishld)
#define __smp_wmb()    dmb(ishst)

#define __smp_store_release(p, v)                    \
do {                                    \
    typeof(p) __p = (p);                        \
    union { __unqual_scalar_typeof(*p) __val; char __c[1]; } __u =    \
        { .__val = (__force __unqual_scalar_typeof(*p)) (v) };    \
    compiletime_assert_atomic_type(*p);                \
    kasan_check_write(__p, sizeof(*p));                \
    switch (sizeof(*p)) {                        \
    case 1:                                \
        asm volatile ("stlrb %w1, %0"                \
                : "=Q" (*__p)                \
                : "r" (*(__u8 *)__u.__c)        \
                : "memory");                \
        break;                            \
    case 2:                                \
        asm volatile ("stlrh %w1, %0"                \
                : "=Q" (*__p)                \
                : "r" (*(__u16 *)__u.__c)        \
                : "memory");                \
        break;                            \
    case 4:                                \
        asm volatile ("stlr %w1, %0"                \
                : "=Q" (*__p)                \
                : "r" (*(__u32 *)__u.__c)        \
                : "memory");                \
        break;                            \
    case 8:                                \
        asm volatile ("stlr %1, %0"                \
                : "=Q" (*__p)                \
                : "r" (*(__u64 *)__u.__c)        \
                : "memory");                \
        break;                            \
    }                                \
} while (0)

#define __smp_load_acquire(p)                        \
({                                    \
    union { __unqual_scalar_typeof(*p) __val; char __c[1]; } __u;    \
    typeof(p) __p = (p);                        \
    compiletime_assert_atomic_type(*p);                \
    kasan_check_read(__p, sizeof(*p));                \
    switch (sizeof(*p)) {                        \
    case 1:                                \
        asm volatile ("ldarb %w0, %1"                \
            : "=r" (*(__u8 *)__u.__c)            \
            : "Q" (*__p) : "memory");            \
        break;                            \
    case 2:                                \
        asm volatile ("ldarh %w0, %1"                \
            : "=r" (*(__u16 *)__u.__c)            \
            : "Q" (*__p) : "memory");            \
        break;                            \
    case 4:                                \
        asm volatile ("ldar %w0, %1"                \
            : "=r" (*(__u32 *)__u.__c)            \
            : "Q" (*__p) : "memory");            \
        break;                            \
    case 8:                                \
        asm volatile ("ldar %0, %1"                \
            : "=r" (*(__u64 *)__u.__c)            \
            : "Q" (*__p) : "memory");            \
        break;                            \
    }                                \
    (typeof(*p))__u.__val;                        \
})

#define smp_cond_load_relaxed(ptr, cond_expr)                \
({                                    \
    typeof(ptr) __PTR = (ptr);                    \
    __unqual_scalar_typeof(*ptr) VAL;                \
    for (;;) {                            \
        VAL = READ_ONCE(*__PTR);                \
        if (cond_expr)                        \
            break;                        \
        __cmpwait_relaxed(__PTR, VAL);                \
    }                                \
    (typeof(*ptr))VAL;                        \
})

#define smp_cond_load_acquire(ptr, cond_expr)                \
({                                    \
    typeof(ptr) __PTR = (ptr);                    \
    __unqual_scalar_typeof(*ptr) VAL;                \
    for (;;) {                            \
        VAL = smp_load_acquire(__PTR);                \
        if (cond_expr)                        \
            break;                        \
        __cmpwait_relaxed(__PTR, VAL);                \
    }                                \
    (typeof(*ptr))VAL;                        \
})

#include <asm-generic/barrier.h>

#endif    /* __ASSEMBLY__ */

#endif    /* __ASM_BARRIER_H */

:: Command execute ::

Enter:
 
Select:
 

:: Search ::
  - regexp 

:: Upload ::
 
[ Read-Only ]

:: Make Dir ::
 
[ Read-Only ]
:: Make File ::
 
[ Read-Only ]

:: Go Dir ::
 
:: Go File ::
 

--[ c99shell v. 2.0 [PHP 7 Update] [25.02.2019] maintained by HackingTool | HackingTool | Generation time: 0.0045 ]--