|
| int | ksceKernelCpuId (void) |
| | Returns the CPU ID of the calling processor.
|
| |
| SceKernelIntrStatus | ksceKernelCpuSuspendIntr (void) |
| | Suspend intr of calling processor.
|
| |
| SceKernelIntrStatus | ksceKernelCpuResumeIntr (SceKernelIntrStatus prev_state) |
| | Suspend intr of calling processor.
|
| |
| void | ksceKernelGetVmaccessRange (SceUIntPtr *pRangeStart, SceUIntPtr *pRangeEnd) |
| |
| void | ksceKernelCpuBranchPredictorInvalidateAll (void) |
| |
| void | ksceKernelCpuBranchPredictorInvalidateAllIS (void) |
| |
| SceUInt8 | ksceKernelCpuGetCONTEXTIDR (void) |
| |
| void | ksceKernelCpuUpdateSCTLR (void) |
| |
| SceBool | ksceKernelIsUncacheAddressInTmpFsGame (void *address) |
| |
| SceInt32 | ksceKernelAtomicAdd32AndGet64InHiLoRange (SceUInt64 *val, SceInt32 add_val, SceInt32 limit) |
| |
| SceInt32 | ksceKernelAtomicAdd32AndGet64InRange (SceUInt64 *val, SceInt32 add_val, SceInt32 limit) |
| |
| SceInt32 | ksceKernelAtomicAddAndGetPositive32InRange (SceInt32 *val, SceInt32 add_val, SceInt32 limit) |
| |
| SceInt32 | ksceKernelAtomicDecIfLowPositive32 (SceInt32 *addr) |
| |
| SceInt32 | ksceKernelAtomicGet32AndSet64 (SceUInt64 *result, SceInt32 src) |
| |
| SceInt32 | ksceKernelAtomicGet32AndSet64_2 (SceUInt64 *result, SceInt32 src) |
| |
| SceUInt32 | ksceKernelAtomicIncrementHighwaterCounter (SceUInt32 *counter) |
| |
| SceInt32 | ksceKernelAtomicLimit64 (SceUInt64 *result, SceInt32 limit) |
| |
| SceInt32 | ksceKernelAtomicSubIfGreater64 (SceUInt64 *a1, SceUInt32 a2) |
| |
| void | ksceKernelAtomicSet8 (SceInt8 *store, SceInt8 value) |
| |
| void | ksceKernelAtomicSet16 (SceInt16 *store, SceInt16 value) |
| |
| void | ksceKernelAtomicSet32 (SceInt32 *store, SceInt32 value) |
| |
| void | ksceKernelAtomicSet64 (SceInt64 *store, SceInt64 value) |
| |
| SceInt8 | ksceKernelAtomicCompareAndSet8 (SceInt8 *store, SceInt8 value, SceInt8 new_value) |
| |
| SceInt16 | ksceKernelAtomicCompareAndSet16 (SceInt16 *store, SceInt16 value, SceInt16 new_value) |
| |
| SceInt32 | ksceKernelAtomicCompareAndSet32 (SceInt32 *store, SceInt32 value, SceInt32 new_value) |
| |
| SceInt64 | ksceKernelAtomicCompareAndSet64 (SceInt64 *store, SceInt64 value, SceInt64 new_value) |
| |
| SceInt8 | ksceKernelAtomicAddAndGet8 (SceInt8 *store, SceInt8 value) |
| |
| SceInt16 | ksceKernelAtomicAddAndGet16 (SceInt16 *store, SceInt16 value) |
| |
| SceInt32 | ksceKernelAtomicAddAndGet32 (SceInt32 *store, SceInt32 value) |
| |
| SceInt64 | ksceKernelAtomicAddAndGet64 (SceInt64 *store, SceInt64 value) |
| |
| SceBool | ksceKernelAtomicAddUnless8 (SceInt8 *store, SceInt8 value, SceInt8 cmpv) |
| |
| SceBool | ksceKernelAtomicAddUnless16 (SceInt16 *store, SceInt16 value, SceInt16 cmpv) |
| |
| SceBool | ksceKernelAtomicAddUnless32 (SceInt32 *store, SceInt32 value, SceInt32 cmpv) |
| |
| SceBool | ksceKernelAtomicAddUnless64 (SceInt64 *store, SceInt64 value, SceInt64 cmpv) |
| |
| SceInt8 | ksceKernelAtomicSubAndGet8 (SceInt8 *store, SceInt8 value) |
| |
| SceInt16 | ksceKernelAtomicSubAndGet16 (SceInt16 *store, SceInt16 value) |
| |
| SceInt32 | ksceKernelAtomicSubAndGet32 (SceInt32 *store, SceInt32 value) |
| |
| SceInt64 | ksceKernelAtomicSubAndGet64 (SceInt64 *store, SceInt64 value) |
| |
| SceInt8 | ksceKernelAtomicAndAndGet8 (SceInt8 *store, SceInt8 value) |
| |
| SceInt16 | ksceKernelAtomicAndAndGet16 (SceInt16 *store, SceInt16 value) |
| |
| SceInt32 | ksceKernelAtomicAndAndGet32 (SceInt32 *store, SceInt32 value) |
| |
| SceInt64 | ksceKernelAtomicAndAndGet64 (SceInt64 *store, SceInt64 value) |
| |
| SceInt8 | ksceKernelAtomicOrAndGet8 (SceInt8 *store, SceInt8 value) |
| |
| SceInt16 | ksceKernelAtomicOrAndGet16 (SceInt16 *store, SceInt16 value) |
| |
| SceInt32 | ksceKernelAtomicOrAndGet32 (SceInt32 *store, SceInt32 value) |
| |
| SceInt64 | ksceKernelAtomicOrAndGet64 (SceInt64 *store, SceInt64 value) |
| |
| SceInt8 | ksceKernelAtomicXorAndGet8 (SceInt8 *store, SceInt8 value) |
| |
| SceInt16 | ksceKernelAtomicXorAndGet16 (SceInt16 *store, SceInt16 value) |
| |
| SceInt32 | ksceKernelAtomicXorAndGet32 (SceInt32 *store, SceInt32 value) |
| |
| SceInt64 | ksceKernelAtomicXorAndGet64 (SceInt64 *store, SceInt64 value) |
| |
| SceInt8 | ksceKernelAtomicClearAndGet8 (SceInt8 *store, SceInt8 value) |
| |
| SceInt16 | ksceKernelAtomicClearAndGet16 (SceInt16 *store, SceInt16 value) |
| |
| SceInt32 | ksceKernelAtomicClearAndGet32 (SceInt32 *store, SceInt32 value) |
| |
| SceInt64 | ksceKernelAtomicClearAndGet64 (SceInt64 *store, SceInt64 value) |
| |
| SceInt8 | ksceKernelAtomicGetAndSet8 (SceInt8 *store, SceInt8 value) |
| |
| SceInt16 | ksceKernelAtomicGetAndSet16 (SceInt16 *store, SceInt16 value) |
| |
| SceInt32 | ksceKernelAtomicGetAndSet32 (SceInt32 *store, SceInt32 value) |
| |
| SceInt64 | ksceKernelAtomicGetAndSet64 (SceInt64 *store, SceInt64 value) |
| |
| SceInt8 | ksceKernelAtomicGetAndAdd8 (SceInt8 *store, SceInt8 value) |
| |
| SceInt16 | ksceKernelAtomicGetAndAdd16 (SceInt16 *store, SceInt16 value) |
| |
| SceInt32 | ksceKernelAtomicGetAndAdd32 (SceInt32 *store, SceInt32 value) |
| |
| SceInt64 | ksceKernelAtomicGetAndAdd64 (SceInt64 *store, SceInt64 value) |
| |
| SceInt8 | ksceKernelAtomicGetAndSub8 (SceInt8 *store, SceInt8 value) |
| |
| SceInt16 | ksceKernelAtomicGetAndSub16 (SceInt16 *store, SceInt16 value) |
| |
| SceInt32 | ksceKernelAtomicGetAndSub32 (SceInt32 *store, SceInt32 value) |
| |
| SceInt64 | ksceKernelAtomicGetAndSub64 (SceInt64 *store, SceInt64 value) |
| |
| SceInt8 | ksceKernelAtomicGetAndAnd8 (SceInt8 *store, SceInt8 value) |
| |
| SceInt16 | ksceKernelAtomicGetAndAnd16 (SceInt16 *store, SceInt16 value) |
| |
| SceInt32 | ksceKernelAtomicGetAndAnd32 (SceInt32 *store, SceInt32 value) |
| |
| SceInt64 | ksceKernelAtomicGetAndAnd64 (SceInt64 *store, SceInt64 value) |
| |
| SceInt8 | ksceKernelAtomicGetAndOr8 (SceInt8 *store, SceInt8 value) |
| |
| SceInt16 | ksceKernelAtomicGetAndOr16 (SceInt16 *store, SceInt16 value) |
| |
| SceInt32 | ksceKernelAtomicGetAndOr32 (SceInt32 *store, SceInt32 value) |
| |
| SceInt64 | ksceKernelAtomicGetAndOr64 (SceInt64 *store, SceInt64 value) |
| |
| SceInt8 | ksceKernelAtomicGetAndXor8 (SceInt8 *store, SceInt8 value) |
| |
| SceInt16 | ksceKernelAtomicGetAndXor16 (SceInt16 *store, SceInt16 value) |
| |
| SceInt32 | ksceKernelAtomicGetAndXor32 (SceInt32 *store, SceInt32 value) |
| |
| SceInt64 | ksceKernelAtomicGetAndXor64 (SceInt64 *store, SceInt64 value) |
| |
| SceInt8 | ksceKernelAtomicGetAndClear8 (SceInt8 *store, SceInt8 value) |
| |
| SceInt16 | ksceKernelAtomicGetAndClear16 (SceInt16 *store, SceInt16 value) |
| |
| SceInt32 | ksceKernelAtomicGetAndClear32 (SceInt32 *store, SceInt32 value) |
| |
| SceInt64 | ksceKernelAtomicGetAndClear64 (SceInt64 *store, SceInt64 value) |
| |
| void | ksceKernelAtomicClearMask8 (SceInt8 *store, SceInt8 value) |
| |
| void | ksceKernelAtomicClearMask16 (SceInt16 *store, SceInt16 value) |
| |
| void | ksceKernelAtomicClearMask32 (SceInt32 *store, SceInt32 value) |
| |
| void | ksceKernelAtomicClearMask64 (SceInt64 *store, SceInt64 value) |
| |
| SceInt8 | ksceKernelAtomicDecIfPositive8 (SceInt8 *store) |
| |
| SceInt16 | ksceKernelAtomicDecIfPositive16 (SceInt16 *store) |
| |
| SceInt32 | ksceKernelAtomicDecIfPositive32 (SceInt32 *store) |
| |
| SceInt64 | ksceKernelAtomicDecIfPositive64 (SceInt64 *store) |
| |
| void | ksceKernelDcacheCleanInvalidateRange (void *start, unsigned int size) |
| | The clean and invalidate a range the L1 dcache and L2.
|
| |
| void | ksceKernelDcacheCleanInvalidateRangeForL1WBWA (void *start, unsigned int size) |
| | The clean and invalidate a range the L1 dcache.
|
| |
| void | ksceKernelDcacheCleanRange (void *start, unsigned int size) |
| | The clean a range the L1 dcache and L2.
|
| |
| void | ksceKernelDcacheCleanRangeForL1WBWA (void *start, unsigned int size) |
| | The clean a range the L1 dcache.
|
| |
| void | ksceKernelDcacheInvalidateRange (void *start, unsigned int size) |
| | The invalidate a range the L1 dcache and L2.
|
| |
| void | ksceKernelDcacheInvalidateRangeForL1WBWA (void *start, unsigned int size) |
| | The invalidate a range the L1 dcache.
|
| |
| void | ksceKernelIcacheInvalidateRange (void *start, unsigned int size) |
| | The invalidate a range the L1 icache and L2.
|
| |
| void | ksceKernelL1DcacheClean (void *start) |
| | The clean a one line the L1 icache.
|
| |
| void | ksceKernelL1DcacheCleanRange (void *start, unsigned int size) |
| | The clean a range the L1 dcache.
|
| |
| void | ksceKernelL1DcacheCleanAll (void) |
| | The clean all the L1 dcache.
|
| |
| void | ksceKernelL1DcacheCleanInvalidate (void *start) |
| | The invalidate a one line the L1 icache.
|
| |
| void | ksceKernelL1DcacheCleanInvalidateRange (void *start, unsigned int size) |
| | The clean and invalidate a range the L1 dcache.
|
| |
| void | ksceKernelL1DcacheCleanInvalidateAll (void) |
| | The clean and invalidate all the L1 dcache.
|
| |
| void | ksceKernelL1DcacheInvalidate (void *start) |
| | The invalidate a one line the L1 dcache.
|
| |
| void | ksceKernelL1DcacheInvalidateRange (void *start, unsigned int size) |
| | The invalidate a range the L1 dcache.
|
| |
| void | ksceKernelL1DcacheInvalidateAll (void) |
| | The invalidate all the L1 dcache.
|
| |
| void | ksceKernelL1IcacheInvalidateRange (void *start, unsigned int size) |
| | The invalidate a range the L1 icache.
|
| |
| void | ksceKernelL1IcacheInvalidateEntire (void) |
| | The invalidate entire the L1 icache.
|
| |
| void | ksceKernelL1IcacheInvalidateEntireAllCore (void) |
| | The invalidate entire the L1 icache of all cores.
|
| |
| | VITASDK_BUILD_ASSERT_EQ (8, SceCorelockContext) |
| |
| void | ksceKernelCorelockInitialize (SceCorelockContext *ctx) |
| |
| void | ksceKernelCorelockLock (SceCorelockContext *ctx, SceUInt32 core) |
| |
| void | ksceKernelCorelockUnlock (SceCorelockContext *ctx) |
| |
| int | ksceKernelMMUGetMemoryType (void *vaddr) |
| |
| int | ksceKernelMMUVAtoPA (void *va, SceUIntPtr *pa) |
| |
| int | ksceKernelMMUVAtoPAWithMode (int mode, void *va, SceUIntPtr *pa) |
| |
| void | ksceKernelPleFlushRequest (void) |
| |
| void | ksceKernelRWSpinlockLowReadLock (SceKernelRWSpinlock *lock) |
| | Acquire a RWSpinlock for reading.
|
| |
| int | ksceKernelRWSpinlockLowTryReadLock (SceKernelRWSpinlock *lock) |
| | Attempt to acquire a RWSpinlock for reading.
|
| |
| void | ksceKernelRWSpinlockLowReadUnlock (SceKernelRWSpinlock *lock) |
| | Unlock a RWSpinlock previously acquired for reading.
|
| |
| SceKernelIntrStatus | ksceKernelRWSpinlockLowReadLockCpuSuspendIntr (SceKernelRWSpinlock *lock) |
| | Acquire a RWSpinlock for reading and suspend interrupts if necessary.
|
| |
| SceKernelIntrStatus | ksceKernelRWSpinlockLowTryReadLockCpuSuspendIntr (SceKernelRWSpinlock *lock) |
| | Attempt to acquire a RWSpinlock for reading and suspend interrupts if necessary.
|
| |
| void | ksceKernelRWSpinlockLowReadUnlockCpuResumeIntr (SceKernelRWSpinlock *lock, SceKernelIntrStatus intr_status) |
| | Release a RWSpinlock previously acquired for reading and resume interrupts if necessary.
|
| |
| void | ksceKernelRWSpinlockLowWriteLock (SceKernelRWSpinlock *lock) |
| | Acquire a RWSpinlock for writing.
|
| |
| int | ksceKernelRWSpinlockLowTryWriteLock (SceKernelRWSpinlock *lock) |
| | Attempt to acquire a RWSpinlock for writing.
|
| |
| void | ksceKernelRWSpinlockLowWriteUnlock (SceKernelRWSpinlock *lock) |
| | Unlock a RWSpinlock previously acquired for writing.
|
| |
| SceKernelIntrStatus | ksceKernelRWSpinlockLowWriteLockCpuSuspendIntr (SceKernelRWSpinlock *lock) |
| | Acquire a RWSpinlock for writing and suspend interrupts if necessary.
|
| |
| SceKernelIntrStatus | ksceKernelRWSpinlockLowTryWriteLockCpuSuspendIntr (SceKernelRWSpinlock *lock) |
| | Attempt to acquire a RWSpinlock for writing and suspend interrupts if necessary.
|
| |
| void | ksceKernelRWSpinlockLowWriteUnlockCpuResumeIntr (SceKernelRWSpinlock *lock, SceKernelIntrStatus intr_status) |
| | Release a RWSpinlock acquired for writing and resume interrupts if necessary.
|
| |
| void | ksceKernelSpinlockLowLock (SceKernelSpinlock *lock) |
| | Acquire a spinlock.
|
| |
| int | ksceKernelSpinlockLowTrylock (SceKernelSpinlock *lock) |
| | Attempt to acquire a spinlock.
|
| |
| void | ksceKernelSpinlockLowUnlock (SceKernelSpinlock *lock) |
| | Release a previously acquired spinlock.
|
| |
| SceKernelIntrStatus | ksceKernelSpinlockLowLockCpuSuspendIntr (SceKernelSpinlock *lock) |
| | Acquire a spinlock and suspend interrupts if necessary.
|
| |
| SceKernelIntrStatus | ksceKernelSpinlockLowTrylockCpuSuspendIntr (SceKernelSpinlock *lock) |
| | Attempt to acquire a spinlock and suspend interrupts if necessary.
|
| |
| void | ksceKernelSpinlockLowUnlockCpuResumeIntr (SceKernelSpinlock *lock, SceKernelIntrStatus intr_status) |
| | Release a previously acquired spinlock and resume interrupts if necessary.
|
| |
Exports for Kernel.