From 5a7189dcaaddc4b474f50a7449e080e9a3fb2e46 Mon Sep 17 00:00:00 2001 From: shupiaoyang Date: Tue, 2 Dec 2025 11:57:24 +0800 Subject: [PATCH] Add support for goexperiment.RevertCopyHashKeys in runtime maps --- .../exp_revertcopyhashkeys_off.go | 8 ++++ .../goexperiment/exp_revertcopyhashkeys_on.go | 8 ++++ src/internal/goexperiment/flags.go | 3 ++ .../runtime/maps/runtime_fast32_swiss.go | 41 +++++++++++++++---- .../runtime/maps/runtime_fast64_swiss.go | 41 +++++++++++++++---- .../runtime/maps/runtime_faststr_swiss.go | 40 ++++++++++++++---- 6 files changed, 118 insertions(+), 23 deletions(-) create mode 100644 src/internal/goexperiment/exp_revertcopyhashkeys_off.go create mode 100644 src/internal/goexperiment/exp_revertcopyhashkeys_on.go diff --git a/src/internal/goexperiment/exp_revertcopyhashkeys_off.go b/src/internal/goexperiment/exp_revertcopyhashkeys_off.go new file mode 100644 index 000000000..5cf48fb0d --- /dev/null +++ b/src/internal/goexperiment/exp_revertcopyhashkeys_off.go @@ -0,0 +1,8 @@ +// Code generated by mkconsts.go. DO NOT EDIT. + +//go:build !goexperiment.revertcopyhashkeys + +package goexperiment + +const RevertCopyHashKeys = false +const RevertCopyHashKeysInt = 0 diff --git a/src/internal/goexperiment/exp_revertcopyhashkeys_on.go b/src/internal/goexperiment/exp_revertcopyhashkeys_on.go new file mode 100644 index 000000000..0e5d79f33 --- /dev/null +++ b/src/internal/goexperiment/exp_revertcopyhashkeys_on.go @@ -0,0 +1,8 @@ +// Code generated by mkconsts.go. DO NOT EDIT. + +//go:build goexperiment.revertcopyhashkeys + +package goexperiment + +const RevertCopyHashKeys = true +const RevertCopyHashKeysInt = 1 diff --git a/src/internal/goexperiment/flags.go b/src/internal/goexperiment/flags.go index ac85fc800..953003d36 100644 --- a/src/internal/goexperiment/flags.go +++ b/src/internal/goexperiment/flags.go @@ -131,4 +131,7 @@ type Flags struct { // Kunpeng malloc prefetch optimization. PrefetchMalloc bool + + // RevertCopyHashKeys enables the "revert" hash map implementation. + RevertCopyHashKeys bool } diff --git a/src/internal/runtime/maps/runtime_fast32_swiss.go b/src/internal/runtime/maps/runtime_fast32_swiss.go index 46023cc9b..9ccd4ba1d 100644 --- a/src/internal/runtime/maps/runtime_fast32_swiss.go +++ b/src/internal/runtime/maps/runtime_fast32_swiss.go @@ -8,6 +8,7 @@ package maps import ( "internal/abi" + "internal/goexperiment" "internal/race" "internal/runtime/sys" "unsafe" @@ -48,8 +49,14 @@ func runtime_mapaccess1_fast32(typ *abi.SwissMapType, m *Map, key uint32) unsafe return unsafe.Pointer(&zeroVal[0]) } - k := key - hash := typ.Hasher(abi.NoEscape(unsafe.Pointer(&k)), m.seed) + var k uint32 + var hash uintptr + if goexperiment.RevertCopyHashKeys { + hash = typ.Hasher(abi.NoEscape(unsafe.Pointer(&key)), m.seed) + } else { + k = key + hash = typ.Hasher(abi.NoEscape(unsafe.Pointer(&k)), m.seed) + } // Select table. idx := m.directoryIndex(hash) @@ -117,8 +124,14 @@ func runtime_mapaccess2_fast32(typ *abi.SwissMapType, m *Map, key uint32) (unsaf return unsafe.Pointer(&zeroVal[0]), false } - k := key - hash := typ.Hasher(abi.NoEscape(unsafe.Pointer(&k)), m.seed) + var k uint32 + var hash uintptr + if goexperiment.RevertCopyHashKeys { + hash = typ.Hasher(abi.NoEscape(unsafe.Pointer(&key)), m.seed) + } else { + k = key + hash = typ.Hasher(abi.NoEscape(unsafe.Pointer(&k)), m.seed) + } // Select table. idx := m.directoryIndex(hash) @@ -205,8 +218,14 @@ func runtime_mapassign_fast32(typ *abi.SwissMapType, m *Map, key uint32) unsafe. fatal("concurrent map writes") } - k := key - hash := typ.Hasher(abi.NoEscape(unsafe.Pointer(&k)), m.seed) + var k uint32 + var hash uintptr + if goexperiment.RevertCopyHashKeys { + hash = typ.Hasher(abi.NoEscape(unsafe.Pointer(&key)), m.seed) + } else { + k = key + hash = typ.Hasher(abi.NoEscape(unsafe.Pointer(&k)), m.seed) + } // Set writing after calling Hasher, since Hasher may panic, in which // case we have not actually done a write. @@ -339,8 +358,14 @@ func runtime_mapassign_fast32ptr(typ *abi.SwissMapType, m *Map, key unsafe.Point fatal("concurrent map writes") } - k := key - hash := typ.Hasher(abi.NoEscape(unsafe.Pointer(&k)), m.seed) + var k unsafe.Pointer + var hash uintptr + if goexperiment.RevertCopyHashKeys { + hash = typ.Hasher(abi.NoEscape(unsafe.Pointer(&key)), m.seed) + } else { + k = key + hash = typ.Hasher(abi.NoEscape(unsafe.Pointer(&k)), m.seed) + } // Set writing after calling Hasher, since Hasher may panic, in which // case we have not actually done a write. diff --git a/src/internal/runtime/maps/runtime_fast64_swiss.go b/src/internal/runtime/maps/runtime_fast64_swiss.go index 6bc6b2f0b..9cd5d1ca6 100644 --- a/src/internal/runtime/maps/runtime_fast64_swiss.go +++ b/src/internal/runtime/maps/runtime_fast64_swiss.go @@ -8,6 +8,7 @@ package maps import ( "internal/abi" + "internal/goexperiment" "internal/race" "internal/runtime/sys" "unsafe" @@ -48,8 +49,14 @@ func runtime_mapaccess1_fast64(typ *abi.SwissMapType, m *Map, key uint64) unsafe return unsafe.Pointer(&zeroVal[0]) } - k := key - hash := typ.Hasher(abi.NoEscape(unsafe.Pointer(&k)), m.seed) + var k uint64 + var hash uintptr + if goexperiment.RevertCopyHashKeys { + hash = typ.Hasher(abi.NoEscape(unsafe.Pointer(&key)), m.seed) + } else { + k = key + hash = typ.Hasher(abi.NoEscape(unsafe.Pointer(&k)), m.seed) + } // Select table. idx := m.directoryIndex(hash) @@ -117,8 +124,14 @@ func runtime_mapaccess2_fast64(typ *abi.SwissMapType, m *Map, key uint64) (unsaf return unsafe.Pointer(&zeroVal[0]), false } - k := key - hash := typ.Hasher(abi.NoEscape(unsafe.Pointer(&k)), m.seed) + var k uint64 + var hash uintptr + if goexperiment.RevertCopyHashKeys { + hash = typ.Hasher(abi.NoEscape(unsafe.Pointer(&key)), m.seed) + } else { + k = key + hash = typ.Hasher(abi.NoEscape(unsafe.Pointer(&k)), m.seed) + } // Select table. idx := m.directoryIndex(hash) @@ -205,8 +218,14 @@ func runtime_mapassign_fast64(typ *abi.SwissMapType, m *Map, key uint64) unsafe. fatal("concurrent map writes") } - k := key - hash := typ.Hasher(abi.NoEscape(unsafe.Pointer(&k)), m.seed) + var k uint64 + var hash uintptr + if goexperiment.RevertCopyHashKeys { + hash = typ.Hasher(abi.NoEscape(unsafe.Pointer(&key)), m.seed) + } else { + k = key + hash = typ.Hasher(abi.NoEscape(unsafe.Pointer(&k)), m.seed) + } // Set writing after calling Hasher, since Hasher may panic, in which // case we have not actually done a write. @@ -377,8 +396,14 @@ func runtime_mapassign_fast64ptr(typ *abi.SwissMapType, m *Map, key unsafe.Point fatal("concurrent map writes") } - k := key - hash := typ.Hasher(abi.NoEscape(unsafe.Pointer(&k)), m.seed) + var k unsafe.Pointer + var hash uintptr + if goexperiment.RevertCopyHashKeys { + hash = typ.Hasher(abi.NoEscape(unsafe.Pointer(&key)), m.seed) + } else { + k = key + hash = typ.Hasher(abi.NoEscape(unsafe.Pointer(&k)), m.seed) + } // Set writing after calling Hasher, since Hasher may panic, in which // case we have not actually done a write. diff --git a/src/internal/runtime/maps/runtime_faststr_swiss.go b/src/internal/runtime/maps/runtime_faststr_swiss.go index 077c05ae8..f4c4de6c8 100644 --- a/src/internal/runtime/maps/runtime_faststr_swiss.go +++ b/src/internal/runtime/maps/runtime_faststr_swiss.go @@ -9,6 +9,7 @@ package maps import ( "internal/abi" "internal/goarch" + "internal/goexperiment" "internal/race" "internal/runtime/sys" "unsafe" @@ -56,7 +57,14 @@ func (m *Map) getWithoutKeySmallFastStr(typ *abi.SwissMapType, key string) unsaf dohash: // This path will cost 1 hash and 1+ε comparisons. - hash := typ.Hasher(abi.NoEscape(unsafe.Pointer(&key)), m.seed) + var k string + var hash uintptr + if goexperiment.RevertCopyHashKeys { + hash = typ.Hasher(abi.NoEscape(unsafe.Pointer(&key)), m.seed) + } else { + k = key + hash = typ.Hasher(abi.NoEscape(unsafe.Pointer(&k)), m.seed) + } h2 := uint8(h2(hash)) ctrls = *g.ctrls() slotKey = g.key(typ, 0) @@ -124,8 +132,14 @@ func runtime_mapaccess1_faststr(typ *abi.SwissMapType, m *Map, key string) unsaf return elem } - k := key - hash := typ.Hasher(abi.NoEscape(unsafe.Pointer(&k)), m.seed) + var k string + var hash uintptr + if goexperiment.RevertCopyHashKeys { + hash = typ.Hasher(abi.NoEscape(unsafe.Pointer(&key)), m.seed) + } else { + k = key + hash = typ.Hasher(abi.NoEscape(unsafe.Pointer(&k)), m.seed) + } // Select table. idx := m.directoryIndex(hash) @@ -183,8 +197,14 @@ func runtime_mapaccess2_faststr(typ *abi.SwissMapType, m *Map, key string) (unsa return elem, true } - k := key - hash := typ.Hasher(abi.NoEscape(unsafe.Pointer(&k)), m.seed) + var k string + var hash uintptr + if goexperiment.RevertCopyHashKeys { + hash = typ.Hasher(abi.NoEscape(unsafe.Pointer(&key)), m.seed) + } else { + k = key + hash = typ.Hasher(abi.NoEscape(unsafe.Pointer(&k)), m.seed) + } // Select table. idx := m.directoryIndex(hash) @@ -273,8 +293,14 @@ func runtime_mapassign_faststr(typ *abi.SwissMapType, m *Map, key string) unsafe fatal("concurrent map writes") } - k := key - hash := typ.Hasher(abi.NoEscape(unsafe.Pointer(&k)), m.seed) + var k string + var hash uintptr + if goexperiment.RevertCopyHashKeys { + hash = typ.Hasher(abi.NoEscape(unsafe.Pointer(&key)), m.seed) + } else { + k = key + hash = typ.Hasher(abi.NoEscape(unsafe.Pointer(&k)), m.seed) + } // Set writing after calling Hasher, since Hasher may panic, in which // case we have not actually done a write. -- Gitee