qemu

FORK: QEMU emulator
git clone https://git.neptards.moe/neptards/qemu.git
Log | Files | Refs | Submodules | LICENSE

memory_ldst_cached.h.inc (3902B)


      1 /*
      2  *  Memory access templates for MemoryRegionCache
      3  *
      4  *  Copyright (c) 2018 Red Hat, Inc.
      5  *
      6  * This library is free software; you can redistribute it and/or
      7  * modify it under the terms of the GNU Lesser General Public
      8  * License as published by the Free Software Foundation; either
      9  * version 2.1 of the License, or (at your option) any later version.
     10  *
     11  * This library is distributed in the hope that it will be useful,
     12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
     13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
     14  * Lesser General Public License for more details.
     15  *
     16  * You should have received a copy of the GNU Lesser General Public
     17  * License along with this library; if not, see <http://www.gnu.org/licenses/>.
     18  */
     19 
     20 #define ADDRESS_SPACE_LD_CACHED(size) \
     21     glue(glue(address_space_ld, size), glue(ENDIANNESS, _cached))
     22 #define ADDRESS_SPACE_LD_CACHED_SLOW(size) \
     23     glue(glue(address_space_ld, size), glue(ENDIANNESS, _cached_slow))
     24 #define LD_P(size) \
     25     glue(glue(ld, size), glue(ENDIANNESS, _p))
     26 
     27 static inline uint16_t ADDRESS_SPACE_LD_CACHED(uw)(MemoryRegionCache *cache,
     28     hwaddr addr, MemTxAttrs attrs, MemTxResult *result)
     29 {
     30     assert(addr < cache->len && 2 <= cache->len - addr);
     31     fuzz_dma_read_cb(cache->xlat + addr, 2, cache->mrs.mr);
     32     if (likely(cache->ptr)) {
     33         return LD_P(uw)(cache->ptr + addr);
     34     } else {
     35         return ADDRESS_SPACE_LD_CACHED_SLOW(uw)(cache, addr, attrs, result);
     36     }
     37 }
     38 
     39 static inline uint32_t ADDRESS_SPACE_LD_CACHED(l)(MemoryRegionCache *cache,
     40     hwaddr addr, MemTxAttrs attrs, MemTxResult *result)
     41 {
     42     assert(addr < cache->len && 4 <= cache->len - addr);
     43     fuzz_dma_read_cb(cache->xlat + addr, 4, cache->mrs.mr);
     44     if (likely(cache->ptr)) {
     45         return LD_P(l)(cache->ptr + addr);
     46     } else {
     47         return ADDRESS_SPACE_LD_CACHED_SLOW(l)(cache, addr, attrs, result);
     48     }
     49 }
     50 
     51 static inline uint64_t ADDRESS_SPACE_LD_CACHED(q)(MemoryRegionCache *cache,
     52     hwaddr addr, MemTxAttrs attrs, MemTxResult *result)
     53 {
     54     assert(addr < cache->len && 8 <= cache->len - addr);
     55     fuzz_dma_read_cb(cache->xlat + addr, 8, cache->mrs.mr);
     56     if (likely(cache->ptr)) {
     57         return LD_P(q)(cache->ptr + addr);
     58     } else {
     59         return ADDRESS_SPACE_LD_CACHED_SLOW(q)(cache, addr, attrs, result);
     60     }
     61 }
     62 
     63 #undef ADDRESS_SPACE_LD_CACHED
     64 #undef ADDRESS_SPACE_LD_CACHED_SLOW
     65 #undef LD_P
     66 
     67 #define ADDRESS_SPACE_ST_CACHED(size) \
     68     glue(glue(address_space_st, size), glue(ENDIANNESS, _cached))
     69 #define ADDRESS_SPACE_ST_CACHED_SLOW(size) \
     70     glue(glue(address_space_st, size), glue(ENDIANNESS, _cached_slow))
     71 #define ST_P(size) \
     72     glue(glue(st, size), glue(ENDIANNESS, _p))
     73 
     74 static inline void ADDRESS_SPACE_ST_CACHED(w)(MemoryRegionCache *cache,
     75     hwaddr addr, uint16_t val, MemTxAttrs attrs, MemTxResult *result)
     76 {
     77     assert(addr < cache->len && 2 <= cache->len - addr);
     78     if (likely(cache->ptr)) {
     79         ST_P(w)(cache->ptr + addr, val);
     80     } else {
     81         ADDRESS_SPACE_ST_CACHED_SLOW(w)(cache, addr, val, attrs, result);
     82     }
     83 }
     84 
     85 static inline void ADDRESS_SPACE_ST_CACHED(l)(MemoryRegionCache *cache,
     86     hwaddr addr, uint32_t val, MemTxAttrs attrs, MemTxResult *result)
     87 {
     88     assert(addr < cache->len && 4 <= cache->len - addr);
     89     if (likely(cache->ptr)) {
     90         ST_P(l)(cache->ptr + addr, val);
     91     } else {
     92         ADDRESS_SPACE_ST_CACHED_SLOW(l)(cache, addr, val, attrs, result);
     93     }
     94 }
     95 
     96 static inline void ADDRESS_SPACE_ST_CACHED(q)(MemoryRegionCache *cache,
     97     hwaddr addr, uint64_t val, MemTxAttrs attrs, MemTxResult *result)
     98 {
     99     assert(addr < cache->len && 8 <= cache->len - addr);
    100     if (likely(cache->ptr)) {
    101         ST_P(q)(cache->ptr + addr, val);
    102     } else {
    103         ADDRESS_SPACE_ST_CACHED_SLOW(q)(cache, addr, val, attrs, result);
    104     }
    105 }
    106 
    107 #undef ADDRESS_SPACE_ST_CACHED
    108 #undef ADDRESS_SPACE_ST_CACHED_SLOW
    109 #undef ST_P
    110 
    111 #undef ENDIANNESS