diff --git a/libraries/libmesosphere/include/mesosphere/arch/arm64/kern_cpu.hpp b/libraries/libmesosphere/include/mesosphere/arch/arm64/kern_cpu.hpp index d0dd41d49..03311a65d 100644 --- a/libraries/libmesosphere/include/mesosphere/arch/arm64/kern_cpu.hpp +++ b/libraries/libmesosphere/include/mesosphere/arch/arm64/kern_cpu.hpp @@ -16,7 +16,7 @@ #pragma once #include #include -#include +#include namespace ams::kern::arch::arm64::cpu { diff --git a/libraries/libmesosphere/include/mesosphere/arch/arm64/kern_k_process_page_table.hpp b/libraries/libmesosphere/include/mesosphere/arch/arm64/kern_k_process_page_table.hpp index 0639a11c9..1376a865f 100644 --- a/libraries/libmesosphere/include/mesosphere/arch/arm64/kern_k_process_page_table.hpp +++ b/libraries/libmesosphere/include/mesosphere/arch/arm64/kern_k_process_page_table.hpp @@ -52,6 +52,10 @@ namespace ams::kern::arch::arm64 { return this->page_table.SetMaxHeapSize(size); } + Result QueryInfo(KMemoryInfo *out_info, ams::svc::PageInfo *out_page_info, KProcessAddress addr) const { + return this->page_table.QueryInfo(out_info, out_page_info, addr); + } + Result MapIo(KPhysicalAddress phys_addr, size_t size, KMemoryPermission perm) { return this->page_table.MapIo(phys_addr, size, perm); } @@ -84,7 +88,7 @@ namespace ams::kern::arch::arm64 { return this->page_table.GetPhysicalAddress(out, address); } - bool CanContain(KProcessAddress addr, size_t size) const { return this->page_table.CanContain(addr, size); } + bool Contains(KProcessAddress addr, size_t size) const { return this->page_table.Contains(addr, size); } bool CanContain(KProcessAddress addr, size_t size, KMemoryState state) const { return this->page_table.CanContain(addr, size, state); } KProcessAddress GetAddressSpaceStart() const { return this->page_table.GetAddressSpaceStart(); } diff --git a/libraries/libmesosphere/include/mesosphere/arch/arm64/kern_userspace_memory_access.hpp b/libraries/libmesosphere/include/mesosphere/arch/arm64/kern_userspace_memory_access.hpp index 828b49f17..2fe110342 100644 --- a/libraries/libmesosphere/include/mesosphere/arch/arm64/kern_userspace_memory_access.hpp +++ b/libraries/libmesosphere/include/mesosphere/arch/arm64/kern_userspace_memory_access.hpp @@ -18,13 +18,40 @@ namespace ams::kern::arch::arm64 { - void UserspaceMemoryAccessFunctionAreaBegin(); + void UserspaceAccessFunctionAreaBegin(); - bool StoreDataCache(uintptr_t start, uintptr_t end); - bool FlushDataCache(uintptr_t start, uintptr_t end); - bool InvalidateDataCache(uintptr_t start, uintptr_t end); - bool InvalidateInstructionCache(uintptr_t start, uintptr_t end); + class UserspaceAccess { + public: + static bool CopyMemoryFromUser(void *dst, const void *src, size_t size); + static bool CopyMemoryFromUserAligned32Bit(void *dst, const void *src, size_t size); + static bool CopyMemoryFromUserAligned64Bit(void *dst, const void *src, size_t size); + static bool CopyMemoryFromUserSize32Bit(void *dst, const void *src); + static s32 CopyStringFromUser(void *dst, const void *src, size_t size); - void UserspaceMemoryAccessFunctionAreaEnd(); + static bool CopyMemoryToUser(void *dst, const void *src, size_t size); + static bool CopyMemoryToUserAligned32Bit(void *dst, const void *src, size_t size); + static bool CopyMemoryToUserAligned64Bit(void *dst, const void *src, size_t size); + static bool CopyMemoryToUserSize32Bit(void *dst, const void *src); + static s32 CopyStringToUser(void *dst, const void *src, size_t size); + + static bool ClearMemory(void *dst, size_t size); + static bool ClearMemoryAligned32Bit(void *dst, size_t size); + static bool ClearMemorySize32Bit(void *dst); + + static bool StoreDataCache(uintptr_t start, uintptr_t end); + static bool FlushDataCache(uintptr_t start, uintptr_t end); + static bool InvalidateDataCache(uintptr_t start, uintptr_t end); + static bool InvalidateInstructionCache(uintptr_t start, uintptr_t end); + + static bool ReadIoMemory32Bit(void *dst, const void *src, size_t size); + static bool ReadIoMemory16Bit(void *dst, const void *src, size_t size); + static bool ReadIoMemory8Bit(void *dst, const void *src, size_t size); + static bool WriteIoMemory32Bit(void *dst, const void *src, size_t size); + static bool WriteIoMemory16Bit(void *dst, const void *src, size_t size); + static bool WriteIoMemory8Bit(void *dst, const void *src, size_t size); + }; + + + void UserspaceAccessFunctionAreaEnd(); } \ No newline at end of file diff --git a/libraries/libmesosphere/include/mesosphere/kern_k_auto_object.hpp b/libraries/libmesosphere/include/mesosphere/kern_k_auto_object.hpp index 7a87ea9d6..460dbe0f3 100644 --- a/libraries/libmesosphere/include/mesosphere/kern_k_auto_object.hpp +++ b/libraries/libmesosphere/include/mesosphere/kern_k_auto_object.hpp @@ -221,6 +221,9 @@ namespace ams::kern { constexpr ALWAYS_INLINE void Reset(T *o) { KScopedAutoObject(o).Swap(*this); } + + constexpr ALWAYS_INLINE bool IsNull() const { return this->obj == nullptr; } + constexpr ALWAYS_INLINE bool IsNotNull() const { return this->obj != nullptr; } }; diff --git a/libraries/libmesosphere/include/mesosphere/kern_k_handle_table.hpp b/libraries/libmesosphere/include/mesosphere/kern_k_handle_table.hpp index 9f07f34b7..6f8ceeebc 100644 --- a/libraries/libmesosphere/include/mesosphere/kern_k_handle_table.hpp +++ b/libraries/libmesosphere/include/mesosphere/kern_k_handle_table.hpp @@ -26,6 +26,9 @@ namespace ams::kern { return util::BitPack32{handle}; } + class KProcess; + class KThread; + class KHandleTable { NON_COPYABLE(KHandleTable); NON_MOVEABLE(KHandleTable); @@ -125,6 +128,19 @@ namespace ams::kern { template ALWAYS_INLINE KScopedAutoObject GetObject(ams::svc::Handle handle) const { MESOSPHERE_ASSERT_THIS(); + + /* Handle pseudo-handles. */ + if constexpr (std::is_same::value) { + if (handle == ams::svc::PseudoHandle::CurrentProcess) { + return GetCurrentProcessPointer(); + } + } else if constexpr (std::is_same::value) { + if (handle == ams::svc::PseudoHandle::CurrentThread) { + return GetCurrentThreadPointer(); + } + } + + /* Lock and look up in table. */ KScopedDisableDispatch dd; KScopedSpinLock lk(this->lock); @@ -139,6 +155,21 @@ namespace ams::kern { ALWAYS_INLINE KScopedAutoObject GetObjectForIpc(ams::svc::Handle handle) const { static_assert(!std::is_base_of::value); + /* Handle pseudo-handles. */ + if constexpr (std::is_same::value) { + if (handle == ams::svc::PseudoHandle::CurrentProcess) { + return GetCurrentProcessPointer(); + } + } else if constexpr (std::is_same::value) { + if (handle == ams::svc::PseudoHandle::CurrentThread) { + return GetCurrentThreadPointer(); + } + } + + /* Lock and look up in table. */ + KScopedDisableDispatch dd; + KScopedSpinLock lk(this->lock); + KAutoObject *obj = this->GetObjectImpl(handle); if (obj->DynamicCast() != nullptr) { return nullptr; diff --git a/libraries/libmesosphere/include/mesosphere/kern_k_page_table_base.hpp b/libraries/libmesosphere/include/mesosphere/kern_k_page_table_base.hpp index 2b0de139b..aa121f6a0 100644 --- a/libraries/libmesosphere/include/mesosphere/kern_k_page_table_base.hpp +++ b/libraries/libmesosphere/include/mesosphere/kern_k_page_table_base.hpp @@ -172,11 +172,11 @@ namespace ams::kern { constexpr bool IsKernel() const { return this->is_kernel; } constexpr bool IsAslrEnabled() const { return this->enable_aslr; } - constexpr bool CanContain(KProcessAddress addr) const { + constexpr bool Contains(KProcessAddress addr) const { return this->address_space_start <= addr && addr <= this->address_space_end - 1; } - constexpr bool CanContain(KProcessAddress addr, size_t size) const { + constexpr bool Contains(KProcessAddress addr, size_t size) const { return this->address_space_start <= addr && addr < addr + size && addr + size - 1 <= this->address_space_end - 1; } @@ -249,6 +249,7 @@ namespace ams::kern { Result SetProcessMemoryPermission(KProcessAddress addr, size_t size, ams::svc::MemoryPermission perm); Result SetHeapSize(KProcessAddress *out, size_t size); Result SetMaxHeapSize(size_t size); + Result QueryInfo(KMemoryInfo *out_info, ams::svc::PageInfo *out_page_info, KProcessAddress addr) const; Result MapIo(KPhysicalAddress phys_addr, size_t size, KMemoryPermission perm); Result MapStatic(KPhysicalAddress phys_addr, size_t size, KMemoryPermission perm); Result MapRegion(KMemoryRegionType region_type, KMemoryPermission perm); diff --git a/libraries/libmesosphere/include/mesosphere/kern_k_process.hpp b/libraries/libmesosphere/include/mesosphere/kern_k_process.hpp index 7c35d8963..1b173d86f 100644 --- a/libraries/libmesosphere/include/mesosphere/kern_k_process.hpp +++ b/libraries/libmesosphere/include/mesosphere/kern_k_process.hpp @@ -123,6 +123,8 @@ namespace ams::kern { Result Initialize(const ams::svc::CreateProcessParameter ¶ms, const KPageGroup &pg, const u32 *caps, s32 num_caps, KResourceLimit *res_limit, KMemoryManager::Pool pool); + constexpr const char *GetName() const { return this->name; } + constexpr u64 GetProcessId() const { return this->process_id; } constexpr u64 GetCoreMask() const { return this->capabilities.GetCoreMask(); } diff --git a/libraries/libmesosphere/include/mesosphere/kern_select_userspace_memory_access.hpp b/libraries/libmesosphere/include/mesosphere/kern_select_userspace_memory_access.hpp new file mode 100644 index 000000000..121eb456f --- /dev/null +++ b/libraries/libmesosphere/include/mesosphere/kern_select_userspace_memory_access.hpp @@ -0,0 +1,31 @@ +/* + * Copyright (c) 2018-2020 Atmosphère-NX + * + * This program is free software; you can redistribute it and/or modify it + * under the terms and conditions of the GNU General Public License, + * version 2, as published by the Free Software Foundation. + * + * This program is distributed in the hope it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for + * more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ +#pragma once +#include + +#ifdef ATMOSPHERE_ARCH_ARM64 + #include + + namespace ams::kern { + + using ams::kern::arch::arm64::UserspaceAccess; + + } + +#else + #error "Unknown architecture for CPU" +#endif + diff --git a/libraries/libmesosphere/include/mesosphere/svc/kern_svc_k_user_pointer.hpp b/libraries/libmesosphere/include/mesosphere/svc/kern_svc_k_user_pointer.hpp index 1c0968b8d..35f290f37 100644 --- a/libraries/libmesosphere/include/mesosphere/svc/kern_svc_k_user_pointer.hpp +++ b/libraries/libmesosphere/include/mesosphere/svc/kern_svc_k_user_pointer.hpp @@ -16,18 +16,187 @@ #pragma once #include #include +#include namespace ams::kern::svc { - /* TODO: Actually implement this type. */ - template - struct KUserPointer : impl::KUserPointerTag { + namespace impl { + + /* TODO: C++20 + template + concept Pointer = std::is_pointer::value; + + template + concept NonConstPointer = Pointer && !std::is_const::type>::value; + + template + concept ConstPointer = Pointer && std::is_const::type>::value; + + template + concept AlignedNPointer = Pointer && alignof(typename std::remove_pointer::type) >= N && util::IsAligned(sizeof(typename std::remove_pointer::type), N); + + template + concept Aligned8Pointer = AlignedNPointer; + + template + concept Aligned16Pointer = AlignedNPointer && Aligned8; + + template + concept Aligned32Pointer = AlignedNPointer && Aligned16; + + template + concept Aligned64Pointer = AlignedNPointer && Aligned32; + */ + + template + constexpr inline bool IsPointer = std::is_pointer::value; + + template + constexpr inline bool IsConstPointer = IsPointer && std::is_const::type>::value; + + template + constexpr inline bool IsNonConstPointer = IsPointer && !std::is_const::type>::value; + + template + constexpr inline bool IsAlignedNPointer = IsPointer && alignof(typename std::remove_pointer::type) >= N && util::IsAligned(sizeof(typename std::remove_pointer::type), N); + + template /* requires Aligned8Pointer<_T> */ + class KUserPointerImplTraits { + static_assert(IsAlignedNPointer<_T, sizeof(u8)>); + public: + using T = typename std::remove_const::type>::type; + public: + static Result CopyFromUserspace(void *dst, const void *src, size_t size) { + R_UNLESS(UserspaceAccess::CopyMemoryFromUser(dst, src, size), svc::ResultInvalidPointer()); + return ResultSuccess(); + } + + static Result CopyToUserspace(void *dst, const void *src, size_t size) { + R_UNLESS(UserspaceAccess::CopyMemoryToUser(dst, src, size), svc::ResultInvalidPointer()); + return ResultSuccess(); + } + }; + + template /* requires Aligned32Pointer<_T> */ + class KUserPointerImplTraits<_T, typename std::enable_if && !IsAlignedNPointer<_T, sizeof(u64)>>::type> { + static_assert(IsAlignedNPointer<_T, sizeof(u32)>); + public: + using T = typename std::remove_const::type>::type; + public: + static Result CopyFromUserspace(void *dst, const void *src, size_t size) { + R_UNLESS(UserspaceAccess::CopyMemoryFromUserAligned32Bit(dst, src, size), svc::ResultInvalidPointer()); + return ResultSuccess(); + } + + static Result CopyToUserspace(void *dst, const void *src, size_t size) { + R_UNLESS(UserspaceAccess::CopyMemoryToUserAligned32Bit(dst, src, size), svc::ResultInvalidPointer()); + return ResultSuccess(); + } + }; + + template /* requires Aligned64Pointer<_T> */ + class KUserPointerImplTraits<_T, typename std::enable_if>::type> { + static_assert(IsAlignedNPointer<_T, sizeof(u64)>); + public: + using T = typename std::remove_const::type>::type; + public: + static Result CopyFromUserspace(void *dst, const void *src, size_t size) { + R_UNLESS(UserspaceAccess::CopyMemoryFromUserAligned64Bit(dst, src, size), svc::ResultInvalidPointer()); + return ResultSuccess(); + } + + static Result CopyToUserspace(void *dst, const void *src, size_t size) { + R_UNLESS(UserspaceAccess::CopyMemoryToUserAligned64Bit(dst, src, size), svc::ResultInvalidPointer()); + return ResultSuccess(); + } + }; + + template /* requires Aligned8Pointer<_T> */ + class KUserPointerImpl : impl::KUserPointerTag { + private: + using Traits = KUserPointerImplTraits<_T>; + protected: + using T = typename std::remove_const::type>::type; + private: + _T *ptr; + private: + Result CopyToImpl(void *p, size_t size) const { + return Traits::CopyFromUserspace(p, this->ptr, size); + } + + Result CopyFromImpl(const void *p, size_t size) const { + return Traits::CopyToUserspace(this->ptr, p, size); + } + protected: + Result CopyTo(T *p) const { return this->CopyToImpl(p, sizeof(*p)); } + Result CopyFrom(const T *p) const { return this->CopyFromImpl(p, sizeof(*p)); } + + Result CopyArrayElementTo(T *p, size_t index) const { return Traits::CopyFromUserspace(p, this->ptr + index, sizeof(*p)); } + Result CopyArrayElementFrom(const T *p, size_t index) const { return Traits::CopyToUserspace(this->ptr + index, p, sizeof(*p)); } + + Result CopyArrayTo(T *arr, size_t count) const { return this->CopyToImpl(arr, sizeof(*arr) * count); } + Result CopyArrayFrom(const T *arr, size_t count) const { return this->CopyFromImpl(arr, sizeof(*arr) * count); } + + constexpr bool IsNull() const { return this->ptr == nullptr; } + }; + + template<> + class KUserPointerImpl : impl::KUserPointerTag { + private: + using Traits = KUserPointerImplTraits; + protected: + using T = char; + private: + const char *ptr; + protected: + Result CopyStringTo(char *dst, size_t size) const { + static_assert(sizeof(char) == 1); + R_UNLESS(UserspaceAccess::CopyStringFromUser(dst, this->ptr, size) > 0, svc::ResultInvalidPointer()); + return ResultSuccess(); + } + + Result CopyArrayElementTo(char *dst, size_t index) const { + return Traits::CopyFromUserspace(dst, this->ptr + index, sizeof(*dst)); + } + + constexpr bool IsNull() const { return this->ptr == nullptr; } + }; + + } + + template + class KUserPointer; + + template /* requires impl::ConstPointer */ + struct KUserPointer>::type> : public impl::KUserPointerImpl { public: - static_assert(std::is_pointer::value); - static constexpr bool IsInput = std::is_const::type>::value; - private: - T pointer; + static constexpr bool IsInput = true; + public: + using impl::KUserPointerImpl::CopyTo; + using impl::KUserPointerImpl::CopyArrayElementTo; + using impl::KUserPointerImpl::CopyArrayTo; + using impl::KUserPointerImpl::IsNull; }; + template /* requires impl::NonConstPointer */ + struct KUserPointer>::type> : public impl::KUserPointerImpl { + public: + static constexpr bool IsInput = false; + public: + using impl::KUserPointerImpl::CopyFrom; + using impl::KUserPointerImpl::CopyArrayElementFrom; + using impl::KUserPointerImpl::CopyArrayFrom; + using impl::KUserPointerImpl::IsNull; + }; + + template<> + struct KUserPointer : public impl::KUserPointerImpl { + public: + static constexpr bool IsInput = true; + public: + using impl::KUserPointerImpl::CopyStringTo; + using impl::KUserPointerImpl::CopyArrayElementTo; + using impl::KUserPointerImpl::IsNull; + }; } diff --git a/libraries/libmesosphere/source/arch/arm64/kern_cpu.cpp b/libraries/libmesosphere/source/arch/arm64/kern_cpu.cpp index 482e37915..733ba1744 100644 --- a/libraries/libmesosphere/source/arch/arm64/kern_cpu.cpp +++ b/libraries/libmesosphere/source/arch/arm64/kern_cpu.cpp @@ -294,7 +294,7 @@ namespace ams::kern::arch::arm64::cpu { ALWAYS_INLINE Result InvalidateDataCacheRange(uintptr_t start, uintptr_t end) { MESOSPHERE_ASSERT(util::IsAligned(start, DataCacheLineSize)); MESOSPHERE_ASSERT(util::IsAligned(end, DataCacheLineSize)); - R_UNLESS(arm64::InvalidateDataCache(start, end), svc::ResultInvalidCurrentMemory()); + R_UNLESS(UserspaceAccess::InvalidateDataCache(start, end), svc::ResultInvalidCurrentMemory()); DataSynchronizationBarrier(); return ResultSuccess(); } @@ -302,7 +302,7 @@ namespace ams::kern::arch::arm64::cpu { ALWAYS_INLINE Result StoreDataCacheRange(uintptr_t start, uintptr_t end) { MESOSPHERE_ASSERT(util::IsAligned(start, DataCacheLineSize)); MESOSPHERE_ASSERT(util::IsAligned(end, DataCacheLineSize)); - R_UNLESS(arm64::StoreDataCache(start, end), svc::ResultInvalidCurrentMemory()); + R_UNLESS(UserspaceAccess::StoreDataCache(start, end), svc::ResultInvalidCurrentMemory()); DataSynchronizationBarrier(); return ResultSuccess(); } @@ -310,7 +310,7 @@ namespace ams::kern::arch::arm64::cpu { ALWAYS_INLINE Result FlushDataCacheRange(uintptr_t start, uintptr_t end) { MESOSPHERE_ASSERT(util::IsAligned(start, DataCacheLineSize)); MESOSPHERE_ASSERT(util::IsAligned(end, DataCacheLineSize)); - R_UNLESS(arm64::FlushDataCache(start, end), svc::ResultInvalidCurrentMemory()); + R_UNLESS(UserspaceAccess::FlushDataCache(start, end), svc::ResultInvalidCurrentMemory()); DataSynchronizationBarrier(); return ResultSuccess(); } @@ -318,7 +318,7 @@ namespace ams::kern::arch::arm64::cpu { ALWAYS_INLINE Result InvalidateInstructionCacheRange(uintptr_t start, uintptr_t end) { MESOSPHERE_ASSERT(util::IsAligned(start, InstructionCacheLineSize)); MESOSPHERE_ASSERT(util::IsAligned(end, InstructionCacheLineSize)); - R_UNLESS(arm64::InvalidateInstructionCache(start, end), svc::ResultInvalidCurrentMemory()); + R_UNLESS(UserspaceAccess::InvalidateInstructionCache(start, end), svc::ResultInvalidCurrentMemory()); EnsureInstructionConsistency(); return ResultSuccess(); } diff --git a/libraries/libmesosphere/source/arch/arm64/kern_userspace_memory_access_asm.s b/libraries/libmesosphere/source/arch/arm64/kern_userspace_memory_access_asm.s index 99c474d27..6dd53045f 100644 --- a/libraries/libmesosphere/source/arch/arm64/kern_userspace_memory_access_asm.s +++ b/libraries/libmesosphere/source/arch/arm64/kern_userspace_memory_access_asm.s @@ -14,20 +14,58 @@ * along with this program. If not, see . */ -/* ams::kern::arch::arm64::UserspaceMemoryAccessFunctionAreaBegin() */ -.section .text._ZN3ams4kern4arch5arm6438UserspaceMemoryAccessFunctionAreaBeginEv, "ax", %progbits -.global _ZN3ams4kern4arch5arm6438UserspaceMemoryAccessFunctionAreaBeginEv -.type _ZN3ams4kern4arch5arm6438UserspaceMemoryAccessFunctionAreaBeginEv, %function -_ZN3ams4kern4arch5arm6438UserspaceMemoryAccessFunctionAreaBeginEv: +/* ams::kern::arch::arm64::UserspaceAccessFunctionAreaBegin() */ +.section .text._ZN3ams4kern4arch5arm6432UserspaceAccessFunctionAreaBeginEv, "ax", %progbits +.global _ZN3ams4kern4arch5arm6432UserspaceAccessFunctionAreaBeginEv +.type _ZN3ams4kern4arch5arm6432UserspaceAccessFunctionAreaBeginEv, %function +_ZN3ams4kern4arch5arm6432UserspaceAccessFunctionAreaBeginEv: /* NOTE: This is not a real function, and only exists as a label for safety. */ -/* ================ All Userspace Memory Functions after this line. ================ */ +/* ================ All Userspace Access Functions after this line. ================ */ -/* ams::kern::arch::arm64::StoreDataCache(uintptr_t start, uintptr_t end) */ -.section .text._ZN3ams4kern4arch5arm6414StoreDataCacheEmm, "ax", %progbits -.global _ZN3ams4kern4arch5arm6414StoreDataCacheEmm -.type _ZN3ams4kern4arch5arm6414StoreDataCacheEmm, %function -_ZN3ams4kern4arch5arm6414StoreDataCacheEmm: +/* ams::kern::arch::arm64::UserspaceAccess::CopyMemoryToUserAligned64Bit(void *dst, const void *src, size_t size) */ +.section .text._ZN3ams4kern4arch5arm6415UserspaceAccess28CopyMemoryToUserAligned64BitEPvPKvm, "ax", %progbits +.global _ZN3ams4kern4arch5arm6415UserspaceAccess28CopyMemoryToUserAligned64BitEPvPKvm +.type _ZN3ams4kern4arch5arm6415UserspaceAccess28CopyMemoryToUserAligned64BitEPvPKvm, %function +_ZN3ams4kern4arch5arm6415UserspaceAccess28CopyMemoryToUserAligned64BitEPvPKvm: + /* Check if there are 0x40 bytes to copy */ + cmp x2, #0x3F + b.ls 1f + ldp x4, x5, [x1, #0x00] + ldp x6, x7, [x1, #0x10] + ldp x8, x9, [x1, #0x20] + ldp x10, x11, [x1, #0x30] + sttr x4, [x0, #0x00] + sttr x5, [x0, #0x08] + sttr x6, [x0, #0x10] + sttr x7, [x0, #0x18] + sttr x8, [x0, #0x20] + sttr x9, [x0, #0x28] + sttr x10, [x0, #0x30] + sttr x11, [x0, #0x38] + add x0, x0, #0x40 + add x1, x1, #0x40 + sub x2, x2, #0x40 + b _ZN3ams4kern4arch5arm6415UserspaceAccess28CopyMemoryToUserAligned64BitEPvPKvm + +1: /* We have less than 0x40 bytes to copy. */ + cmp x2, #0x0 + b.eq 2f + ldr x4, [x1], #0x8 + sttr x4, [x0] + add x0, x0, #0x8 + sub x2, x2, #0x8 + b 1b + +2: /* We're done. */ + mov x0, #1 + ret + +/* ams::kern::arch::arm64::UserspaceAccess::StoreDataCache(uintptr_t start, uintptr_t end) */ +.section .text._ZN3ams4kern4arch5arm6415UserspaceAccess14StoreDataCacheEmm, "ax", %progbits +.global _ZN3ams4kern4arch5arm6415UserspaceAccess14StoreDataCacheEmm +.type _ZN3ams4kern4arch5arm6415UserspaceAccess14StoreDataCacheEmm, %function +_ZN3ams4kern4arch5arm6415UserspaceAccess14StoreDataCacheEmm: /* Check if we have any work to do. */ cmp x1, x0 b.eq 2f @@ -42,11 +80,11 @@ _ZN3ams4kern4arch5arm6414StoreDataCacheEmm: mov x0, #1 ret -/* ams::kern::arch::arm64::FlushDataCache(uintptr_t start, uintptr_t end) */ -.section .text._ZN3ams4kern4arch5arm6414FlushDataCacheEmm, "ax", %progbits -.global _ZN3ams4kern4arch5arm6414FlushDataCacheEmm -.type _ZN3ams4kern4arch5arm6414FlushDataCacheEmm, %function -_ZN3ams4kern4arch5arm6414FlushDataCacheEmm: +/* ams::kern::arch::arm64::UserspaceAccess::FlushDataCache(uintptr_t start, uintptr_t end) */ +.section .text._ZN3ams4kern4arch5arm6415UserspaceAccess14FlushDataCacheEmm, "ax", %progbits +.global _ZN3ams4kern4arch5arm6415UserspaceAccess14FlushDataCacheEmm +.type _ZN3ams4kern4arch5arm6415UserspaceAccess14FlushDataCacheEmm, %function +_ZN3ams4kern4arch5arm6415UserspaceAccess14FlushDataCacheEmm: /* Check if we have any work to do. */ cmp x1, x0 b.eq 2f @@ -61,11 +99,11 @@ _ZN3ams4kern4arch5arm6414FlushDataCacheEmm: mov x0, #1 ret -/* ams::kern::arch::arm64::InvalidateDataCache(uintptr_t start, uintptr_t end) */ -.section .text._ZN3ams4kern4arch5arm6419InvalidateDataCacheEmm, "ax", %progbits -.global _ZN3ams4kern4arch5arm6419InvalidateDataCacheEmm -.type _ZN3ams4kern4arch5arm6419InvalidateDataCacheEmm, %function -_ZN3ams4kern4arch5arm6419InvalidateDataCacheEmm: +/* ams::kern::arch::arm64::UserspaceAccess::InvalidateDataCache(uintptr_t start, uintptr_t end) */ +.section .text._ZN3ams4kern4arch5arm6415UserspaceAccess19InvalidateDataCacheEmm, "ax", %progbits +.global _ZN3ams4kern4arch5arm6415UserspaceAccess19InvalidateDataCacheEmm +.type _ZN3ams4kern4arch5arm6415UserspaceAccess19InvalidateDataCacheEmm, %function +_ZN3ams4kern4arch5arm6415UserspaceAccess19InvalidateDataCacheEmm: /* Check if we have any work to do. */ cmp x1, x0 b.eq 2f @@ -80,11 +118,11 @@ _ZN3ams4kern4arch5arm6419InvalidateDataCacheEmm: mov x0, #1 ret -/* ams::kern::arch::arm64::InvalidateInstructionCache(uintptr_t start, uintptr_t end) */ -.section .text._ZN3ams4kern4arch5arm6426InvalidateInstructionCacheEmm, "ax", %progbits -.global _ZN3ams4kern4arch5arm6426InvalidateInstructionCacheEmm -.type _ZN3ams4kern4arch5arm6426InvalidateInstructionCacheEmm, %function -_ZN3ams4kern4arch5arm6426InvalidateInstructionCacheEmm: +/* ams::kern::arch::arm64::UserspaceAccess::InvalidateInstructionCache(uintptr_t start, uintptr_t end) */ +.section .text._ZN3ams4kern4arch5arm6415UserspaceAccess26InvalidateInstructionCacheEmm, "ax", %progbits +.global _ZN3ams4kern4arch5arm6415UserspaceAccess26InvalidateInstructionCacheEmm +.type _ZN3ams4kern4arch5arm6415UserspaceAccess26InvalidateInstructionCacheEmm, %function +_ZN3ams4kern4arch5arm6415UserspaceAccess26InvalidateInstructionCacheEmm: /* Check if we have any work to do. */ cmp x1, x0 b.eq 2f @@ -99,11 +137,11 @@ _ZN3ams4kern4arch5arm6426InvalidateInstructionCacheEmm: mov x0, #1 ret -/* ================ All Userspace Memory Functions before this line. ================ */ +/* ================ All Userspace Access Functions before this line. ================ */ -/* ams::kern::arch::arm64::UserspaceMemoryAccessFunctionAreaEnd() */ -.section .text._ZN3ams4kern4arch5arm6436UserspaceMemoryAccessFunctionAreaEndEv, "ax", %progbits -.global _ZN3ams4kern4arch5arm6436UserspaceMemoryAccessFunctionAreaEndEv -.type _ZN3ams4kern4arch5arm6436UserspaceMemoryAccessFunctionAreaEndEv, %function -_ZN3ams4kern4arch5arm6436UserspaceMemoryAccessFunctionAreaEndEv: +/* ams::kern::arch::arm64::UserspaceAccessFunctionAreaEnd() */ +.section .text._ZN3ams4kern4arch5arm6430UserspaceAccessFunctionAreaEndEv, "ax", %progbits +.global _ZN3ams4kern4arch5arm6430UserspaceAccessFunctionAreaEndEv +.type _ZN3ams4kern4arch5arm6430UserspaceAccessFunctionAreaEndEv, %function +_ZN3ams4kern4arch5arm6430UserspaceAccessFunctionAreaEndEv: /* NOTE: This is not a real function, and only exists as a label for safety. */ \ No newline at end of file diff --git a/libraries/libmesosphere/source/kern_k_page_table_base.cpp b/libraries/libmesosphere/source/kern_k_page_table_base.cpp index e6973b6ce..24aeb1c44 100644 --- a/libraries/libmesosphere/source/kern_k_page_table_base.cpp +++ b/libraries/libmesosphere/source/kern_k_page_table_base.cpp @@ -765,6 +765,29 @@ namespace ams::kern { return ResultSuccess(); } + Result KPageTableBase::QueryInfo(KMemoryInfo *out_info, ams::svc::PageInfo *out_page_info, KProcessAddress addr) const { + /* If the address is invalid, create a fake block. */ + if (!this->Contains(addr, 1)) { + *out_info = { + .address = GetInteger(this->address_space_end), + .size = 0 - GetInteger(this->address_space_end), + .state = static_cast(ams::svc::MemoryState_Inaccessible), + .perm = KMemoryPermission_None, + .attribute = KMemoryAttribute_None, + .original_perm = KMemoryPermission_None, + .ipc_lock_count = 0, + .device_use_count = 0, + }; + out_page_info->flags = 0; + + return ResultSuccess(); + } + + /* Otherwise, lock the table and query. */ + KScopedLightLock lk(this->general_lock); + return this->QueryInfoImpl(out_info, out_page_info, addr); + } + Result KPageTableBase::MapIo(KPhysicalAddress phys_addr, size_t size, KMemoryPermission perm) { MESOSPHERE_ASSERT(util::IsAligned(GetInteger(phys_addr), PageSize)); MESOSPHERE_ASSERT(util::IsAligned(size, PageSize)); diff --git a/libraries/libmesosphere/source/svc/kern_svc_memory.cpp b/libraries/libmesosphere/source/svc/kern_svc_memory.cpp index 703f66563..a3770ddf5 100644 --- a/libraries/libmesosphere/source/svc/kern_svc_memory.cpp +++ b/libraries/libmesosphere/source/svc/kern_svc_memory.cpp @@ -43,10 +43,6 @@ namespace ams::kern::svc { MESOSPHERE_PANIC("Stubbed SvcUnmapMemory64 was called."); } - Result QueryMemory64(KUserPointer out_memory_info, ams::svc::PageInfo *out_page_info, ams::svc::Address address) { - MESOSPHERE_PANIC("Stubbed SvcQueryMemory64 was called."); - } - /* ============================= 64From32 ABI ============================= */ Result SetMemoryPermission64From32(ams::svc::Address address, ams::svc::Size size, ams::svc::MemoryPermission perm) { @@ -65,8 +61,4 @@ namespace ams::kern::svc { MESOSPHERE_PANIC("Stubbed SvcUnmapMemory64From32 was called."); } - Result QueryMemory64From32(KUserPointer out_memory_info, ams::svc::PageInfo *out_page_info, ams::svc::Address address) { - MESOSPHERE_PANIC("Stubbed SvcQueryMemory64From32 was called."); - } - } diff --git a/libraries/libmesosphere/source/svc/kern_svc_process_memory.cpp b/libraries/libmesosphere/source/svc/kern_svc_process_memory.cpp index 718735775..2e6348493 100644 --- a/libraries/libmesosphere/source/svc/kern_svc_process_memory.cpp +++ b/libraries/libmesosphere/source/svc/kern_svc_process_memory.cpp @@ -39,10 +39,6 @@ namespace ams::kern::svc { MESOSPHERE_PANIC("Stubbed SvcUnmapProcessMemory64 was called."); } - Result QueryProcessMemory64(KUserPointer out_memory_info, ams::svc::PageInfo *out_page_info, ams::svc::Handle process_handle, uint64_t address) { - MESOSPHERE_PANIC("Stubbed SvcQueryProcessMemory64 was called."); - } - Result MapProcessCodeMemory64(ams::svc::Handle process_handle, uint64_t dst_address, uint64_t src_address, uint64_t size) { MESOSPHERE_PANIC("Stubbed SvcMapProcessCodeMemory64 was called."); } @@ -65,10 +61,6 @@ namespace ams::kern::svc { MESOSPHERE_PANIC("Stubbed SvcUnmapProcessMemory64From32 was called."); } - Result QueryProcessMemory64From32(KUserPointer out_memory_info, ams::svc::PageInfo *out_page_info, ams::svc::Handle process_handle, uint64_t address) { - MESOSPHERE_PANIC("Stubbed SvcQueryProcessMemory64From32 was called."); - } - Result MapProcessCodeMemory64From32(ams::svc::Handle process_handle, uint64_t dst_address, uint64_t src_address, uint64_t size) { MESOSPHERE_PANIC("Stubbed SvcMapProcessCodeMemory64From32 was called."); } diff --git a/libraries/libmesosphere/source/svc/kern_svc_query_memory.cpp b/libraries/libmesosphere/source/svc/kern_svc_query_memory.cpp new file mode 100644 index 000000000..cf1480ef4 --- /dev/null +++ b/libraries/libmesosphere/source/svc/kern_svc_query_memory.cpp @@ -0,0 +1,75 @@ +/* + * Copyright (c) 2018-2020 Atmosphère-NX + * + * This program is free software; you can redistribute it and/or modify it + * under the terms and conditions of the GNU General Public License, + * version 2, as published by the Free Software Foundation. + * + * This program is distributed in the hope it will be useful, but WITHOUT + * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or + * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for + * more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + */ +#include + +namespace ams::kern::svc { + + /* ============================= Common ============================= */ + + namespace { + + Result QueryProcessMemory(ams::svc::MemoryInfo *out_memory_info, ams::svc::PageInfo *out_page_info, ams::svc::Handle process_handle, uintptr_t address) { + MESOSPHERE_LOG("%s: QueryProcessMemory(0x%08x, 0x%zx) was called\n", GetCurrentProcess().GetName(), process_handle, address); + + /* Get the process. */ + KScopedAutoObject process = GetCurrentProcess().GetHandleTable().GetObject(process_handle); + R_UNLESS(process.IsNotNull(), svc::ResultInvalidHandle()); + + /* Query the mapping's info. */ + KMemoryInfo info; + R_TRY(process->GetPageTable().QueryInfo(std::addressof(info), out_page_info, address)); + + /* Write output. */ + *out_memory_info = info.GetSvcMemoryInfo(); + return ResultSuccess(); + } + + Result QueryMemory(ams::svc::MemoryInfo *out_memory_info, ams::svc::PageInfo *out_page_info, uintptr_t address) { + /* Query memory is just QueryProcessMemory on the current process. */ + return QueryProcessMemory(out_memory_info, out_page_info, ams::svc::PseudoHandle::CurrentProcess, address); + } + + } + + /* ============================= 64 ABI ============================= */ + + Result QueryMemory64(KUserPointer out_memory_info, ams::svc::PageInfo *out_page_info, ams::svc::Address address) { + /* Get an ams::svc::MemoryInfo for the region. */ + ams::svc::MemoryInfo info = {}; + R_TRY(QueryMemory(std::addressof(info), out_page_info, address)); + + /* Try to copy to userspace. In the 64-bit case, ams::svc::lp64::MemoryInfo is the same as ams::svc::MemoryInfo. */ + static_assert(sizeof(ams::svc::MemoryInfo) == sizeof(ams::svc::lp64::MemoryInfo)); + R_TRY(out_memory_info.CopyFrom(std::addressof(info))); + + return ResultSuccess(); + } + + Result QueryProcessMemory64(KUserPointer out_memory_info, ams::svc::PageInfo *out_page_info, ams::svc::Handle process_handle, uint64_t address) { + MESOSPHERE_PANIC("Stubbed SvcQueryProcessMemory64 was called."); + } + + /* ============================= 64From32 ABI ============================= */ + + Result QueryMemory64From32(KUserPointer out_memory_info, ams::svc::PageInfo *out_page_info, ams::svc::Address address) { + MESOSPHERE_PANIC("Stubbed SvcQueryMemory64From32 was called."); + } + + Result QueryProcessMemory64From32(KUserPointer out_memory_info, ams::svc::PageInfo *out_page_info, ams::svc::Handle process_handle, uint64_t address) { + MESOSPHERE_PANIC("Stubbed SvcQueryProcessMemory64From32 was called."); + } + +} diff --git a/libraries/libvapours/include/vapours/svc/svc_common.hpp b/libraries/libvapours/include/vapours/svc/svc_common.hpp index 659bfc261..c32d3fdd8 100644 --- a/libraries/libvapours/include/vapours/svc/svc_common.hpp +++ b/libraries/libvapours/include/vapours/svc/svc_common.hpp @@ -30,7 +30,7 @@ namespace ams::svc { static constexpr size_t MaxWaitSynchronizationHandleCount = 0x40; - enum class PseudoHandle : Handle { + enum PseudoHandle : Handle { CurrentThread = 0xFFFF8000, CurrentProcess = 0xFFFF8001, }; diff --git a/mesosphere/kernel/source/arch/arm64/kern_exception_handlers_asm.s b/mesosphere/kernel/source/arch/arm64/kern_exception_handlers_asm.s index ccfce293e..1d9ba4f40 100644 --- a/mesosphere/kernel/source/arch/arm64/kern_exception_handlers_asm.s +++ b/mesosphere/kernel/source/arch/arm64/kern_exception_handlers_asm.s @@ -324,10 +324,10 @@ _ZN3ams4kern4arch5arm6430EL1SynchronousExceptionHandlerEv: /* Data abort. Check if it was from trying to access userspace memory. */ mrs x1, elr_el1 - adr x0, _ZN3ams4kern4arch5arm6438UserspaceMemoryAccessFunctionAreaBeginEv + adr x0, _ZN3ams4kern4arch5arm6432UserspaceAccessFunctionAreaBeginEv cmp x1, x0 b.lo 3f - adr x0, _ZN3ams4kern4arch5arm6436UserspaceMemoryAccessFunctionAreaEndEv + adr x0, _ZN3ams4kern4arch5arm6430UserspaceAccessFunctionAreaEndEv cmp x1, x0 b.hs 3f