diff --git a/src/CLR/Core/CLR_RT_HeapBlock_Delegate.cpp b/src/CLR/Core/CLR_RT_HeapBlock_Delegate.cpp index f5b2d5629f..0fd44b5cae 100644 --- a/src/CLR/Core/CLR_RT_HeapBlock_Delegate.cpp +++ b/src/CLR/Core/CLR_RT_HeapBlock_Delegate.cpp @@ -56,6 +56,7 @@ HRESULT CLR_RT_HeapBlock_Delegate::CreateInstance( #endif dlg->m_object.SetObjectReference(nullptr); + dlg->m_genericTypeSpec.Clear(); #if defined(NANOCLR_APPDOMAINS) dlg->m_appDomain = g_CLR_RT_ExecutionEngine.GetCurrentAppDomain(); diff --git a/src/CLR/Core/Execution.cpp b/src/CLR/Core/Execution.cpp index a52d135f2c..55a14a0bdd 100644 --- a/src/CLR/Core/Execution.cpp +++ b/src/CLR/Core/Execution.cpp @@ -678,6 +678,7 @@ HRESULT CLR_RT_ExecutionEngine::Execute(wchar_t *entryPointArgs, int maxContextS NANOCLR_CHECK_HRESULT(WaitForDebugger()); // m_cctorThread is nullptr before call and inialized by the SpawnStaticConstructor + // This will execute both non-generic and generic type static constructors SpawnStaticConstructor(m_cctorThread); while (true) @@ -809,6 +810,28 @@ void CLR_RT_ExecutionEngine::StaticConstructorTerminationCallback(void *arg) (void)arg; NATIVE_PROFILE_CLR_CORE(); + + // If the completed .cctor was for a generic type, mark it as executed + CLR_RT_HeapBlock_Delegate *dlg = g_CLR_RT_ExecutionEngine.m_cctorThread->m_dlg; + if (dlg != nullptr && dlg->m_genericTypeSpec.data != 0) + { + // This was a generic type .cctor - compute hash and mark as executed + CLR_RT_TypeSpec_Instance genericTypeInstance{}; + if (genericTypeInstance.InitializeFromIndex(dlg->m_genericTypeSpec)) + { + CLR_UINT32 hash = g_CLR_RT_TypeSystem.ComputeHashForClosedGenericType(genericTypeInstance); + CLR_RT_GenericCctorExecutionRecord *record = + g_CLR_RT_TypeSystem.FindOrCreateGenericCctorRecord(hash, nullptr); + + if (record != nullptr) + { + // Clear scheduled flag and set executed flag + record->m_flags &= ~CLR_RT_GenericCctorExecutionRecord::c_Scheduled; + record->m_flags |= CLR_RT_GenericCctorExecutionRecord::c_Executed; + } + } + } + g_CLR_RT_ExecutionEngine.SpawnStaticConstructor(g_CLR_RT_ExecutionEngine.m_cctorThread); } @@ -965,10 +988,136 @@ bool CLR_RT_ExecutionEngine::SpawnStaticConstructorHelper( } } + // Set flag to indicate regular static constructors have been processed assembly->flags |= CLR_RT_Assembly::StaticConstructorsExecuted; return false; } +bool CLR_RT_ExecutionEngine::SpawnGenericTypeStaticConstructorsHelper( + CLR_RT_Assembly *assembly, + const CLR_RT_TypeSpec_Index &startTypeSpecIndex) +{ + NATIVE_PROFILE_CLR_CORE(); + + _ASSERTE(m_cctorThread != nullptr); + _ASSERTE(m_cctorThread->CanThreadBeReused()); + _ASSERTE(assembly != nullptr); + + // Crawl TypeSpecs in this assembly to find closed generic instantiations that need .cctor execution + int numTypeSpec = assembly->tablesSize[TBL_TypeSpec]; + + // Start from the specified TypeSpec index (to resume iteration after a .cctor completes) + CLR_UINT32 startIndex = startTypeSpecIndex.TypeSpec(); + CLR_RT_TypeSpec_CrossReference *tsCross = assembly->crossReferenceTypeSpec + startIndex; + + for (int iTs = startIndex; iTs < numTypeSpec; iTs++, tsCross++) + { + // Build a TypeSpec_Instance to check if this is a closed generic instantiation + CLR_RT_TypeSpec_Instance genericTypeInstance{}; + CLR_RT_TypeSpec_Index tsIndex; + tsIndex.Set(assembly->assemblyIndex, iTs); + + if (!genericTypeInstance.InitializeFromIndex(tsIndex)) + { + continue; + } + + // Only for closed generic instantiations (have genericTypeDef) + if (!genericTypeInstance.IsClosedGenericType()) + { + continue; + } + + // Get the generic type definition + CLR_RT_TypeDef_Index typeDef = genericTypeInstance.genericTypeDef; + + // Check if the generic type definition has a static constructor + CLR_RT_Assembly *ownerAsm = g_CLR_RT_TypeSystem.m_assemblies[typeDef.Assembly() - 1]; + if (!ownerAsm->HasStaticConstructor(typeDef)) + { + continue; + } + + // Find the static constructor method for this generic type definition + const CLR_RECORD_TYPEDEF *ownerTd = ownerAsm->GetTypeDef(typeDef.Type()); + const CLR_RECORD_METHODDEF *md = ownerAsm->GetMethodDef(ownerTd->firstMethod); + + // Calculate total method count for this type + int methodCount = ownerTd->virtualMethodCount + ownerTd->instanceMethodCount + ownerTd->staticMethodCount; + + CLR_RT_MethodDef_Index cctorIndex; + bool foundCctor = false; + + for (int i = 0; i < methodCount; i++, md++) + { + if (md->flags & CLR_RECORD_METHODDEF::MD_StaticConstructor) + { + cctorIndex.Set(ownerAsm->assemblyIndex, ownerTd->firstMethod + i); + foundCctor = true; + break; + } + } + + if (!foundCctor) + { + continue; + } + + // Compute hash for the closed generic type to check if .cctor already scheduled/executed + CLR_UINT32 hash = g_CLR_RT_TypeSystem.ComputeHashForClosedGenericType(genericTypeInstance); + + // Find or create the .cctor execution record for this closed type + bool recordCreated = false; + CLR_RT_GenericCctorExecutionRecord *record = + g_CLR_RT_TypeSystem.FindOrCreateGenericCctorRecord(hash, &recordCreated); + + if (record == nullptr) + { + // Out of memory - skip this .cctor + continue; + } + + // Check if .cctor already scheduled or executed + if (record->m_flags & + (CLR_RT_GenericCctorExecutionRecord::c_Scheduled | CLR_RT_GenericCctorExecutionRecord::c_Executed)) + { + // Already handled - skip to next TypeSpec + continue; + } + + // Mark as scheduled to prevent duplicate scheduling + record->m_flags |= CLR_RT_GenericCctorExecutionRecord::c_Scheduled; + + // Create delegate for the generic type .cctor + CLR_RT_HeapBlock_Delegate *dlg; + CLR_RT_HeapBlock refDlg; + + refDlg.SetObjectReference(nullptr); + CLR_RT_ProtectFromGC gc(refDlg); + + if (SUCCEEDED(CLR_RT_HeapBlock_Delegate::CreateInstance(refDlg, cctorIndex, nullptr))) + { + dlg = refDlg.DereferenceDelegate(); + + // Store the current closed generic TypeSpec index for correct resumption + dlg->m_genericTypeSpec = tsIndex; + + if (SUCCEEDED(m_cctorThread->PushThreadProcDelegate(dlg))) + { + m_cctorThread->m_terminationCallback = StaticConstructorTerminationCallback; + return true; + } + } + + // If we failed to schedule, clear the scheduled flag + record->m_flags &= ~CLR_RT_GenericCctorExecutionRecord::c_Scheduled; + } + + // No more generic type .cctors for this assembly - set flag + assembly->flags |= CLR_RT_Assembly::StaticGenericConstructorsExecuted; + return false; +} + void CLR_RT_ExecutionEngine::SpawnStaticConstructor(CLR_RT_Thread *&pCctorThread) { NATIVE_PROFILE_CLR_CORE(); @@ -988,37 +1137,91 @@ void CLR_RT_ExecutionEngine::SpawnStaticConstructor(CLR_RT_Thread *&pCctorThread _ASSERTE(NANOCLR_INDEX_IS_VALID(index)); _SIDE_ASSERTE(inst.InitializeFromIndex(index)); - // This is ok if index is no longer valid. SpawnStaticConstructorHelper will call FindNextStaticConstructor - // which will fail - index.data++; + // Check if this is a generic type .cctor (has m_genericTypeSpec.data != 0 in the delegate) + if (dlg->m_genericTypeSpec.data != 0) + { + // Extract the TypeSpec index from the delegate and increment to next TypeSpec + CLR_RT_TypeSpec_Index tsIndex = dlg->m_genericTypeSpec; + CLR_RT_Assembly *assembly = g_CLR_RT_TypeSystem.m_assemblies[tsIndex.Assembly() - 1]; - if (SpawnStaticConstructorHelper(inst.assembly, index)) - return; + // Increment to next TypeSpec (same pattern as regular .cctor) + tsIndex.data++; + + if (SpawnGenericTypeStaticConstructorsHelper(assembly, tsIndex)) + { + return; + } + } + else + { + // Regular static constructor - increment to next method index + index.data++; + + if (SpawnStaticConstructorHelper(inst.assembly, index)) + { + return; + } + } } // first, find the AppDomainAssembly to run. (what about appdomains!!!) NANOCLR_FOREACH_ASSEMBLY(g_CLR_RT_TypeSystem) { - // Find an AppDomainAssembly that does not have it's static constructor bit set... + // Check if regular static constructors need to be executed if ((pASSM->flags & CLR_RT_Assembly::StaticConstructorsExecuted) == 0) { CLR_RT_MethodDef_Index index; index.Set(pASSM->assemblyIndex, 0); - bool fDepedenciesRun = true; + bool dependenciesSatisfied = true; - // Check that all dependent assemblies have had static constructors run. + // Check that all dependent assemblies have had regular static constructors run CLR_RT_AssemblyRef_CrossReference *ar = pASSM->crossReferenceAssemblyRef; for (int i = 0; i < pASSM->tablesSize[TBL_AssemblyRef]; i++, ar++) { if ((ar->target->flags & CLR_RT_Assembly::StaticConstructorsExecuted) == 0) { - fDepedenciesRun = true; + dependenciesSatisfied = false; break; } } - if (fDepedenciesRun && SpawnStaticConstructorHelper(pASSM, index)) - return; + if (dependenciesSatisfied) + { + // Run regular static constructors for this assembly + if (SpawnStaticConstructorHelper(pASSM, index)) + { + return; + } + } + } + + // Check if generic type static constructors need to be executed + if ((pASSM->flags & CLR_RT_Assembly::StaticGenericConstructorsExecuted) == 0) + { + bool dependenciesSatisfied = true; + + // Check that all dependent assemblies have had regular static constructors run + CLR_RT_AssemblyRef_CrossReference *ar = pASSM->crossReferenceAssemblyRef; + for (int i = 0; i < pASSM->tablesSize[TBL_AssemblyRef]; i++, ar++) + { + if ((ar->target->flags & CLR_RT_Assembly::StaticConstructorsExecuted) == 0) + { + dependenciesSatisfied = false; + break; + } + } + + if (dependenciesSatisfied) + { + // Run generic type static constructors for this assembly (starting from index 0) + CLR_RT_TypeSpec_Index startIndex; + startIndex.Set(pASSM->assemblyIndex, 0); + + if (SpawnGenericTypeStaticConstructorsHelper(pASSM, startIndex)) + { + return; + } + } } } NANOCLR_FOREACH_ASSEMBLY_END(); diff --git a/src/CLR/Core/Thread.cpp b/src/CLR/Core/Thread.cpp index c752ec5ac2..1461930ef9 100644 --- a/src/CLR/Core/Thread.cpp +++ b/src/CLR/Core/Thread.cpp @@ -163,6 +163,18 @@ HRESULT CLR_RT_Thread::PushThreadProcDelegate(CLR_RT_HeapBlock_Delegate *pDelega NANOCLR_SET_AND_LEAVE(CLR_E_WRONG_TYPE); } + // Set generic context if the delegate has a TypeSpec stored (for generic type static constructors) + // Note: We temporarily set inst.genericType to the delegate's interior pointer here, + // but will copy it to stable storage in the stack frame after Push() below + CLR_RT_TypeSpec_Index delegateTypeSpec; + delegateTypeSpec.Clear(); + + if (pDelegate->m_genericTypeSpec.data != 0) + { + delegateTypeSpec = pDelegate->m_genericTypeSpec; + inst.genericType = &delegateTypeSpec; + } + #if defined(NANOCLR_APPDOMAINS) if (!pDelegate->m_appDomain->IsLoaded()) @@ -181,6 +193,15 @@ HRESULT CLR_RT_Thread::PushThreadProcDelegate(CLR_RT_HeapBlock_Delegate *pDelega NANOCLR_CHECK_HRESULT(CLR_RT_StackFrame::Push(this, inst, inst.target->argumentsCount)); + // If we have a generic type context, copy it to stable storage in the stack frame + // and update the pointer to avoid GC relocation issues with the delegate object + if (delegateTypeSpec.data != 0) + { + CLR_RT_StackFrame *stackTop = this->CurrentFrame(); + stackTop->m_genericTypeSpecStorage = delegateTypeSpec; + stackTop->m_call.genericType = &stackTop->m_genericTypeSpecStorage; + } + if ((inst.target->flags & CLR_RECORD_METHODDEF::MD_Static) == 0) { CLR_RT_StackFrame *stackTop = this->CurrentFrame(); @@ -821,8 +842,8 @@ HRESULT CLR_RT_Thread::ProcessException_Phase1() us.GetPhase() < UnwindStack::p_1_SearchingForHandler_2_SentUsersChance && stack->m_IP) { // We have a debugger attached and we need to send some messages before we start searching. - // These messages should only get sent when the search reaches managed code. Stack::Push sets m_IP to nullptr - // for native code, so therefore we need IP to be non-nullptr + // These messages should only get sent when the search reaches managed code. Stack::Push sets m_IP to + // nullptr for native code, so therefore we need IP to be non-nullptr us.m_handlerStack = stack; diff --git a/src/CLR/Core/TypeSystem.cpp b/src/CLR/Core/TypeSystem.cpp index b1e7d1ea3e..f1948f1da0 100644 --- a/src/CLR/Core/TypeSystem.cpp +++ b/src/CLR/Core/TypeSystem.cpp @@ -5895,6 +5895,30 @@ bool CLR_RT_Assembly::FindNextStaticConstructor(CLR_RT_MethodDef_Index &index) return false; } +bool CLR_RT_Assembly::HasStaticConstructor(const CLR_RT_TypeDef_Index &typeDef) const +{ + NATIVE_PROFILE_CLR_CORE(); + + CLR_RT_Assembly *ownerAsm = g_CLR_RT_TypeSystem.m_assemblies[typeDef.Assembly() - 1]; + const CLR_RECORD_TYPEDEF *ownerTd = ownerAsm->GetTypeDef(typeDef.Type()); + + // Calculate total method count for this type + int methodCount = ownerTd->virtualMethodCount + ownerTd->instanceMethodCount + ownerTd->staticMethodCount; + + // Iterate through all methods of the generic type definition + const CLR_RECORD_METHODDEF *md = ownerAsm->GetMethodDef(ownerTd->firstMethod); + for (int i = 0; i < methodCount; i++, md++) + { + // Check if this is a static constructor + if (md->flags & CLR_RECORD_METHODDEF::MD_StaticConstructor) + { + return true; + } + } + + return false; +} + //////////////////////////////////////////////////////////////////////////////////////////////////// HRESULT CLR_RT_Assembly::ResolveComputeHashes() @@ -6034,6 +6058,11 @@ void CLR_RT_TypeSystem::TypeSystem_Initialize() g_CLR_RT_TypeSystem.m_genericStaticFields = nullptr; g_CLR_RT_TypeSystem.m_genericStaticFieldsCount = 0; g_CLR_RT_TypeSystem.m_genericStaticFieldsMaxCount = 0; + + // Initialize generic .cctor execution registry + g_CLR_RT_TypeSystem.m_genericCctorRegistry = nullptr; + g_CLR_RT_TypeSystem.m_genericCctorRegistryCount = 0; + g_CLR_RT_TypeSystem.m_genericCctorRegistryMaxCount = 0; } void CLR_RT_TypeSystem::TypeSystem_Cleanup() @@ -6067,6 +6096,15 @@ void CLR_RT_TypeSystem::TypeSystem_Cleanup() m_genericStaticFieldsCount = 0; m_genericStaticFieldsMaxCount = 0; } + + // Clean up generic .cctor execution registry + if (m_genericCctorRegistry != nullptr) + { + platform_free(m_genericCctorRegistry); + m_genericCctorRegistry = nullptr; + m_genericCctorRegistryCount = 0; + m_genericCctorRegistryMaxCount = 0; + } } //--// @@ -7746,6 +7784,73 @@ CLR_UINT32 CLR_RT_TypeSystem::ComputeHashForClosedGenericType(CLR_RT_TypeSpec_In //--// +CLR_RT_GenericCctorExecutionRecord *CLR_RT_TypeSystem::FindOrCreateGenericCctorRecord(CLR_UINT32 hash, bool *created) +{ + if (created) + { + *created = false; + } + + // Look for existing entry + for (CLR_UINT32 i = 0; i < g_CLR_RT_TypeSystem.m_genericCctorRegistryCount; i++) + { + if (g_CLR_RT_TypeSystem.m_genericCctorRegistry[i].m_hash == hash) + { + return &g_CLR_RT_TypeSystem.m_genericCctorRegistry[i]; + } + } + + // Need to create a new entry - check if we need to expand the array + if (g_CLR_RT_TypeSystem.m_genericCctorRegistryCount >= g_CLR_RT_TypeSystem.m_genericCctorRegistryMaxCount) + { + CLR_UINT32 newMax = g_CLR_RT_TypeSystem.m_genericCctorRegistryMaxCount * 2; + + if (newMax == 0) + { + newMax = 4; // Initial minimum size + } + + CLR_RT_GenericCctorExecutionRecord *newArray = + (CLR_RT_GenericCctorExecutionRecord *)platform_malloc(sizeof(CLR_RT_GenericCctorExecutionRecord) * newMax); + + if (newArray == nullptr) + { + return nullptr; // Out of memory + } + + // Copy existing records + if (g_CLR_RT_TypeSystem.m_genericCctorRegistry != nullptr && + g_CLR_RT_TypeSystem.m_genericCctorRegistryCount > 0) + { + memcpy( + newArray, + g_CLR_RT_TypeSystem.m_genericCctorRegistry, + sizeof(CLR_RT_GenericCctorExecutionRecord) * g_CLR_RT_TypeSystem.m_genericCctorRegistryCount); + + platform_free(g_CLR_RT_TypeSystem.m_genericCctorRegistry); + } + + g_CLR_RT_TypeSystem.m_genericCctorRegistry = newArray; + g_CLR_RT_TypeSystem.m_genericCctorRegistryMaxCount = newMax; + } + + // Create new record + CLR_RT_GenericCctorExecutionRecord *record = + &g_CLR_RT_TypeSystem.m_genericCctorRegistry[g_CLR_RT_TypeSystem.m_genericCctorRegistryCount++]; + + record->m_hash = hash; + record->m_flags = 0; + + if (created) + { + *created = true; + } + + return record; +} + +//--// + void CLR_RT_AttributeEnumerator::Initialize(CLR_RT_Assembly *assm) { NATIVE_PROFILE_CLR_CORE(); diff --git a/src/CLR/Include/nanoCLR_Runtime.h b/src/CLR/Include/nanoCLR_Runtime.h index efac6be268..b22d291ea1 100644 --- a/src/CLR/Include/nanoCLR_Runtime.h +++ b/src/CLR/Include/nanoCLR_Runtime.h @@ -1339,6 +1339,7 @@ struct CLR_RT_Assembly : public CLR_RT_HeapBlock_Node // EVENT HEAP - NO RELOCAT static const CLR_UINT32 Deployed = 0x00000008; static const CLR_UINT32 PreparingForExecution = 0x00000010; static const CLR_UINT32 StaticConstructorsExecuted = 0x00000020; + static const CLR_UINT32 StaticGenericConstructorsExecuted = 0x00000040; // this flag should be set when the m_header was malloc'ed static const CLR_UINT32 FreeOnDestroy = 0x00000100; @@ -1486,6 +1487,7 @@ struct CLR_RT_Assembly : public CLR_RT_HeapBlock_Node // EVENT HEAP - NO RELOCAT CLR_UINT32 &assmIndex); bool FindNextStaticConstructor(CLR_RT_MethodDef_Index &index); + bool HasStaticConstructor(const CLR_RT_TypeDef_Index &typeDef) const; bool FindMethodBoundaries(CLR_INDEX i, CLR_OFFSET &start, CLR_OFFSET &end); @@ -1988,6 +1990,20 @@ struct CLR_RT_GenericStaticFieldRecord //--// +struct CLR_RT_GenericCctorExecutionRecord +{ + // Unique hash identifier for the closed generic type + CLR_UINT32 m_hash; + + // Execution state flags + static const CLR_UINT8 c_Scheduled = 0x01; + static const CLR_UINT8 c_Executed = 0x02; + + CLR_UINT8 m_flags; +}; + +//--// + struct CLR_RT_TypeSystem // EVENT HEAP - NO RELOCATION - { struct CompatibilityLookup @@ -2019,6 +2035,11 @@ struct CLR_RT_TypeSystem // EVENT HEAP - NO RELOCATION - CLR_UINT32 m_genericStaticFieldsCount; CLR_UINT32 m_genericStaticFieldsMaxCount; + // Global registry for generic .cctor execution tracking + CLR_RT_GenericCctorExecutionRecord *m_genericCctorRegistry; + CLR_UINT32 m_genericCctorRegistryCount; + CLR_UINT32 m_genericCctorRegistryMaxCount; + //--// void TypeSystem_Initialize(); @@ -2114,6 +2135,9 @@ struct CLR_RT_TypeSystem // EVENT HEAP - NO RELOCATION - // Helper to compute hash for a closed generic type static CLR_UINT32 ComputeHashForClosedGenericType(CLR_RT_TypeSpec_Instance &typeInstance); + // Helper to find or create a generic .cctor execution record by hash + static CLR_RT_GenericCctorExecutionRecord *FindOrCreateGenericCctorRecord(CLR_UINT32 hash, bool *created); + //--// PROHIBIT_COPY_CONSTRUCTORS(CLR_RT_TypeSystem); @@ -2575,6 +2599,11 @@ struct CLR_RT_StackFrame : public CLR_RT_HeapBlock_Node // EVENT HEAP - NO RELOC CLR_RT_MethodDef_Instance m_call; + // Stable storage for generic type context (not GC-relocated) + // Used when m_call.genericType needs to point to a value that would otherwise + // be inside a GC-managed object (e.g., delegate's m_genericTypeSpec) + CLR_RT_TypeSpec_Index m_genericTypeSpecStorage; + CLR_RT_MethodHandler m_nativeMethod; CLR_PMETADATA m_IPstart; // ANY HEAP - DO RELOCATION - @@ -4231,6 +4260,9 @@ struct CLR_RT_ExecutionEngine const CLR_RT_MethodDef_Index &index); #else bool SpawnStaticConstructorHelper(CLR_RT_Assembly *assembly, const CLR_RT_MethodDef_Index &index); + bool SpawnGenericTypeStaticConstructorsHelper( + CLR_RT_Assembly *assembly, + const CLR_RT_TypeSpec_Index &startTypeSpecIndex); #endif static void FinalizerTerminationCallback(void *arg); static void StaticConstructorTerminationCallback(void *arg); diff --git a/src/CLR/Include/nanoCLR_Runtime__HeapBlock.h b/src/CLR/Include/nanoCLR_Runtime__HeapBlock.h index dc926e9f43..e799d0884b 100644 --- a/src/CLR/Include/nanoCLR_Runtime__HeapBlock.h +++ b/src/CLR/Include/nanoCLR_Runtime__HeapBlock.h @@ -1910,6 +1910,9 @@ struct CLR_RT_HeapBlock_Delegate : public CLR_RT_HeapBlock_Node // OBJECT HEAP - CLR_RT_AppDomain *m_appDomain; #endif + // Optional TypeSpec index for generic type static constructors (data == 0 means not set) + CLR_RT_TypeSpec_Index m_genericTypeSpec; + //--// const CLR_RT_MethodDef_Index &DelegateFtn() const