2020-12-16 12:07:42 -08:00
|
|
|
using ARMeilleure.CodeGen;
|
|
|
|
using ARMeilleure.CodeGen.Unwinding;
|
|
|
|
using ARMeilleure.Memory;
|
|
|
|
using System;
|
|
|
|
using System.Collections.Generic;
|
|
|
|
using System.Diagnostics;
|
|
|
|
using System.Runtime.InteropServices;
|
|
|
|
|
|
|
|
namespace ARMeilleure.Translation.Cache
|
|
|
|
{
|
|
|
|
static class JitCache
|
|
|
|
{
|
|
|
|
private const int PageSize = 4 * 1024;
|
|
|
|
private const int PageMask = PageSize - 1;
|
|
|
|
|
|
|
|
private const int CodeAlignment = 4; // Bytes.
|
|
|
|
private const int CacheSize = 2047 * 1024 * 1024;
|
|
|
|
|
|
|
|
private static ReservedRegion _jitRegion;
|
|
|
|
|
|
|
|
private static CacheMemoryAllocator _cacheAllocator;
|
|
|
|
|
|
|
|
private static readonly List<CacheEntry> _cacheEntries = new List<CacheEntry>();
|
|
|
|
|
|
|
|
private static readonly object _lock = new object();
|
|
|
|
private static bool _initialized;
|
|
|
|
|
Add multi-level function table (#2228)
* Add AddressTable<T>
* Use AddressTable<T> for dispatch
* Remove JumpTable & co.
* Add fallback for out of range addresses
* Add PPTC support
* Add documentation to `AddressTable<T>`
* Make AddressTable<T> configurable
* Fix table walk
* Fix IsMapped check
* Remove CountTableCapacity
* Add PPTC support for fast path
* Rename IsMapped to IsValid
* Remove stale comment
* Change format of address in exception message
* Add TranslatorStubs
* Split DispatchStub
Avoids recompilation of stubs during tests.
* Add hint for 64bit or 32bit
* Add documentation to `Symbol`
* Add documentation to `TranslatorStubs`
Make `TranslatorStubs` disposable as well.
* Add documentation to `SymbolType`
* Add `AddressTableEventSource` to monitor function table size
Add an EventSource which measures the amount of unmanaged bytes
allocated by AddressTable<T> instances.
dotnet-counters monitor -n Ryujinx --counters ARMeilleure
* Add `AllowLcqInFunctionTable` optimization toggle
This is to reduce the impact this change has on the test duration.
Before everytime a test was ran, the FunctionTable would be initialized
and populated so that the newly compiled test would get registered to
it.
* Implement unmanaged dispatcher
Uses the DispatchStub to dispatch into the next translation, which
allows execution to stay in unmanaged for longer and skips a
ConcurrentDictionary look up when the target translation has been
registered to the FunctionTable.
* Remove redundant null check
* Tune levels of FunctionTable
Uses 5 levels instead of 4 and change unit of AddressTableEventSource
from KB to MB.
* Use 64-bit function table
Improves codegen for direct branches:
mov qword [rax+0x408],0x10603560
- mov rcx,sub_10603560_OFFSET
- mov ecx,[rcx]
- mov ecx,ecx
- mov rdx,JIT_CACHE_BASE
- add rdx,rcx
+ mov rcx,sub_10603560
+ mov rdx,[rcx]
mov rcx,rax
Improves codegen for dispatch stub:
and rax,byte +0x1f
- mov eax,[rcx+rax*4]
- mov eax,eax
- mov rcx,JIT_CACHE_BASE
- lea rax,[rcx+rax]
+ mov rax,[rcx+rax*8]
mov rcx,rbx
* Remove `JitCacheSymbol` & `JitCache.Offset`
* Turn `Translator.Translate` into an instance method
We do not have to add more parameter to this method and related ones as
new structures are added & needed for translation.
* Add symbol only when PTC is enabled
Address LDj3SNuD's feedback
* Change `NativeContext.Running` to a 32-bit integer
* Fix PageTable symbol for host mapped
2021-05-29 14:06:28 -07:00
|
|
|
public static IntPtr Base => _jitRegion.Pointer;
|
|
|
|
|
2020-12-16 12:07:42 -08:00
|
|
|
public static void Initialize(IJitMemoryAllocator allocator)
|
|
|
|
{
|
|
|
|
if (_initialized) return;
|
|
|
|
|
|
|
|
lock (_lock)
|
|
|
|
{
|
|
|
|
if (_initialized) return;
|
|
|
|
|
|
|
|
_jitRegion = new ReservedRegion(allocator, CacheSize);
|
|
|
|
|
|
|
|
_cacheAllocator = new CacheMemoryAllocator(CacheSize);
|
|
|
|
|
2021-12-04 15:02:30 -08:00
|
|
|
if (OperatingSystem.IsWindows())
|
2020-12-16 12:07:42 -08:00
|
|
|
{
|
|
|
|
JitUnwindWindows.InstallFunctionTableHandler(_jitRegion.Pointer, CacheSize, _jitRegion.Pointer + Allocate(PageSize));
|
|
|
|
}
|
|
|
|
|
|
|
|
_initialized = true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
public static IntPtr Map(CompiledFunction func)
|
|
|
|
{
|
|
|
|
byte[] code = func.Code;
|
|
|
|
|
|
|
|
lock (_lock)
|
|
|
|
{
|
|
|
|
Debug.Assert(_initialized);
|
|
|
|
|
|
|
|
int funcOffset = Allocate(code.Length);
|
|
|
|
|
|
|
|
IntPtr funcPtr = _jitRegion.Pointer + funcOffset;
|
|
|
|
|
|
|
|
ReprotectAsWritable(funcOffset, code.Length);
|
|
|
|
|
|
|
|
Marshal.Copy(code, 0, funcPtr, code.Length);
|
|
|
|
|
|
|
|
ReprotectAsExecutable(funcOffset, code.Length);
|
|
|
|
|
|
|
|
Add(funcOffset, code.Length, func.UnwindInfo);
|
|
|
|
|
|
|
|
return funcPtr;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
public static void Unmap(IntPtr pointer)
|
|
|
|
{
|
|
|
|
lock (_lock)
|
|
|
|
{
|
|
|
|
Debug.Assert(_initialized);
|
|
|
|
|
|
|
|
int funcOffset = (int)(pointer.ToInt64() - _jitRegion.Pointer.ToInt64());
|
|
|
|
|
|
|
|
bool result = TryFind(funcOffset, out CacheEntry entry);
|
|
|
|
Debug.Assert(result);
|
|
|
|
|
|
|
|
_cacheAllocator.Free(funcOffset, AlignCodeSize(entry.Size));
|
|
|
|
|
|
|
|
Remove(funcOffset);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
private static void ReprotectAsWritable(int offset, int size)
|
|
|
|
{
|
|
|
|
int endOffs = offset + size;
|
|
|
|
|
|
|
|
int regionStart = offset & ~PageMask;
|
|
|
|
int regionEnd = (endOffs + PageMask) & ~PageMask;
|
|
|
|
|
|
|
|
_jitRegion.Block.MapAsRwx((ulong)regionStart, (ulong)(regionEnd - regionStart));
|
|
|
|
}
|
|
|
|
|
|
|
|
private static void ReprotectAsExecutable(int offset, int size)
|
|
|
|
{
|
|
|
|
int endOffs = offset + size;
|
|
|
|
|
|
|
|
int regionStart = offset & ~PageMask;
|
|
|
|
int regionEnd = (endOffs + PageMask) & ~PageMask;
|
|
|
|
|
|
|
|
_jitRegion.Block.MapAsRx((ulong)regionStart, (ulong)(regionEnd - regionStart));
|
|
|
|
}
|
|
|
|
|
|
|
|
private static int Allocate(int codeSize)
|
|
|
|
{
|
|
|
|
codeSize = AlignCodeSize(codeSize);
|
|
|
|
|
|
|
|
int allocOffset = _cacheAllocator.Allocate(codeSize);
|
|
|
|
|
|
|
|
if (allocOffset < 0)
|
|
|
|
{
|
|
|
|
throw new OutOfMemoryException("JIT Cache exhausted.");
|
|
|
|
}
|
|
|
|
|
|
|
|
_jitRegion.ExpandIfNeeded((ulong)allocOffset + (ulong)codeSize);
|
|
|
|
|
|
|
|
return allocOffset;
|
|
|
|
}
|
|
|
|
|
|
|
|
private static int AlignCodeSize(int codeSize)
|
|
|
|
{
|
|
|
|
return checked(codeSize + (CodeAlignment - 1)) & ~(CodeAlignment - 1);
|
|
|
|
}
|
|
|
|
|
|
|
|
private static void Add(int offset, int size, UnwindInfo unwindInfo)
|
|
|
|
{
|
|
|
|
CacheEntry entry = new CacheEntry(offset, size, unwindInfo);
|
|
|
|
|
|
|
|
int index = _cacheEntries.BinarySearch(entry);
|
|
|
|
|
|
|
|
if (index < 0)
|
|
|
|
{
|
|
|
|
index = ~index;
|
|
|
|
}
|
|
|
|
|
|
|
|
_cacheEntries.Insert(index, entry);
|
|
|
|
}
|
|
|
|
|
|
|
|
private static void Remove(int offset)
|
|
|
|
{
|
|
|
|
int index = _cacheEntries.BinarySearch(new CacheEntry(offset, 0, default));
|
|
|
|
|
|
|
|
if (index < 0)
|
|
|
|
{
|
|
|
|
index = ~index - 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (index >= 0)
|
|
|
|
{
|
|
|
|
_cacheEntries.RemoveAt(index);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
public static bool TryFind(int offset, out CacheEntry entry)
|
|
|
|
{
|
|
|
|
lock (_lock)
|
|
|
|
{
|
|
|
|
int index = _cacheEntries.BinarySearch(new CacheEntry(offset, 0, default));
|
|
|
|
|
|
|
|
if (index < 0)
|
|
|
|
{
|
|
|
|
index = ~index - 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (index >= 0)
|
|
|
|
{
|
|
|
|
entry = _cacheEntries[index];
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
entry = default;
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|