diff --git a/src/internal/goexperiment/exp_revertcopyhashkeys_off.go b/src/internal/goexperiment/exp_revertcopyhashkeys_off.go new file mode 100644 index 0000000000000000000000000000000000000000..5cf48fb0dae2ab17ca06cc21c17b1b92f1ac0cf6 --- /dev/null +++ b/src/internal/goexperiment/exp_revertcopyhashkeys_off.go @@ -0,0 +1,8 @@ +// Code generated by mkconsts.go. DO NOT EDIT. + +//go:build !goexperiment.revertcopyhashkeys + +package goexperiment + +const RevertCopyHashKeys = false +const RevertCopyHashKeysInt = 0 diff --git a/src/internal/goexperiment/exp_revertcopyhashkeys_on.go b/src/internal/goexperiment/exp_revertcopyhashkeys_on.go new file mode 100644 index 0000000000000000000000000000000000000000..0e5d79f33b868ac28cce74e67ed3c9d6ee3e6414 --- /dev/null +++ b/src/internal/goexperiment/exp_revertcopyhashkeys_on.go @@ -0,0 +1,8 @@ +// Code generated by mkconsts.go. DO NOT EDIT. + +//go:build goexperiment.revertcopyhashkeys + +package goexperiment + +const RevertCopyHashKeys = true +const RevertCopyHashKeysInt = 1 diff --git a/src/internal/goexperiment/flags.go b/src/internal/goexperiment/flags.go index ac85fc800092a40a2090e8f719e5c012b54a682f..953003d364a1e1ae62ead49adcb1ac8157b63c12 100644 --- a/src/internal/goexperiment/flags.go +++ b/src/internal/goexperiment/flags.go @@ -131,4 +131,7 @@ type Flags struct { // Kunpeng malloc prefetch optimization. PrefetchMalloc bool + + // RevertCopyHashKeys enables the "revert" hash map implementation. + RevertCopyHashKeys bool } diff --git a/src/internal/runtime/maps/runtime_fast32_swiss.go b/src/internal/runtime/maps/runtime_fast32_swiss.go index 46023cc9b70729f01b8239c2766d9e4d8c6b8bc3..9ccd4ba1d4141c3f39b6fe5b62bb6b68e14785f2 100644 --- a/src/internal/runtime/maps/runtime_fast32_swiss.go +++ b/src/internal/runtime/maps/runtime_fast32_swiss.go @@ -8,6 +8,7 @@ package maps import ( "internal/abi" + "internal/goexperiment" "internal/race" "internal/runtime/sys" "unsafe" @@ -48,8 +49,14 @@ func runtime_mapaccess1_fast32(typ *abi.SwissMapType, m *Map, key uint32) unsafe return unsafe.Pointer(&zeroVal[0]) } - k := key - hash := typ.Hasher(abi.NoEscape(unsafe.Pointer(&k)), m.seed) + var k uint32 + var hash uintptr + if goexperiment.RevertCopyHashKeys { + hash = typ.Hasher(abi.NoEscape(unsafe.Pointer(&key)), m.seed) + } else { + k = key + hash = typ.Hasher(abi.NoEscape(unsafe.Pointer(&k)), m.seed) + } // Select table. idx := m.directoryIndex(hash) @@ -117,8 +124,14 @@ func runtime_mapaccess2_fast32(typ *abi.SwissMapType, m *Map, key uint32) (unsaf return unsafe.Pointer(&zeroVal[0]), false } - k := key - hash := typ.Hasher(abi.NoEscape(unsafe.Pointer(&k)), m.seed) + var k uint32 + var hash uintptr + if goexperiment.RevertCopyHashKeys { + hash = typ.Hasher(abi.NoEscape(unsafe.Pointer(&key)), m.seed) + } else { + k = key + hash = typ.Hasher(abi.NoEscape(unsafe.Pointer(&k)), m.seed) + } // Select table. idx := m.directoryIndex(hash) @@ -205,8 +218,14 @@ func runtime_mapassign_fast32(typ *abi.SwissMapType, m *Map, key uint32) unsafe. fatal("concurrent map writes") } - k := key - hash := typ.Hasher(abi.NoEscape(unsafe.Pointer(&k)), m.seed) + var k uint32 + var hash uintptr + if goexperiment.RevertCopyHashKeys { + hash = typ.Hasher(abi.NoEscape(unsafe.Pointer(&key)), m.seed) + } else { + k = key + hash = typ.Hasher(abi.NoEscape(unsafe.Pointer(&k)), m.seed) + } // Set writing after calling Hasher, since Hasher may panic, in which // case we have not actually done a write. @@ -339,8 +358,14 @@ func runtime_mapassign_fast32ptr(typ *abi.SwissMapType, m *Map, key unsafe.Point fatal("concurrent map writes") } - k := key - hash := typ.Hasher(abi.NoEscape(unsafe.Pointer(&k)), m.seed) + var k unsafe.Pointer + var hash uintptr + if goexperiment.RevertCopyHashKeys { + hash = typ.Hasher(abi.NoEscape(unsafe.Pointer(&key)), m.seed) + } else { + k = key + hash = typ.Hasher(abi.NoEscape(unsafe.Pointer(&k)), m.seed) + } // Set writing after calling Hasher, since Hasher may panic, in which // case we have not actually done a write. diff --git a/src/internal/runtime/maps/runtime_fast64_swiss.go b/src/internal/runtime/maps/runtime_fast64_swiss.go index 6bc6b2f0b1f5cab30e5973ea7bb25219531b4ac8..9cd5d1ca653c355c2d73174905c038e53bb0b24e 100644 --- a/src/internal/runtime/maps/runtime_fast64_swiss.go +++ b/src/internal/runtime/maps/runtime_fast64_swiss.go @@ -8,6 +8,7 @@ package maps import ( "internal/abi" + "internal/goexperiment" "internal/race" "internal/runtime/sys" "unsafe" @@ -48,8 +49,14 @@ func runtime_mapaccess1_fast64(typ *abi.SwissMapType, m *Map, key uint64) unsafe return unsafe.Pointer(&zeroVal[0]) } - k := key - hash := typ.Hasher(abi.NoEscape(unsafe.Pointer(&k)), m.seed) + var k uint64 + var hash uintptr + if goexperiment.RevertCopyHashKeys { + hash = typ.Hasher(abi.NoEscape(unsafe.Pointer(&key)), m.seed) + } else { + k = key + hash = typ.Hasher(abi.NoEscape(unsafe.Pointer(&k)), m.seed) + } // Select table. idx := m.directoryIndex(hash) @@ -117,8 +124,14 @@ func runtime_mapaccess2_fast64(typ *abi.SwissMapType, m *Map, key uint64) (unsaf return unsafe.Pointer(&zeroVal[0]), false } - k := key - hash := typ.Hasher(abi.NoEscape(unsafe.Pointer(&k)), m.seed) + var k uint64 + var hash uintptr + if goexperiment.RevertCopyHashKeys { + hash = typ.Hasher(abi.NoEscape(unsafe.Pointer(&key)), m.seed) + } else { + k = key + hash = typ.Hasher(abi.NoEscape(unsafe.Pointer(&k)), m.seed) + } // Select table. idx := m.directoryIndex(hash) @@ -205,8 +218,14 @@ func runtime_mapassign_fast64(typ *abi.SwissMapType, m *Map, key uint64) unsafe. fatal("concurrent map writes") } - k := key - hash := typ.Hasher(abi.NoEscape(unsafe.Pointer(&k)), m.seed) + var k uint64 + var hash uintptr + if goexperiment.RevertCopyHashKeys { + hash = typ.Hasher(abi.NoEscape(unsafe.Pointer(&key)), m.seed) + } else { + k = key + hash = typ.Hasher(abi.NoEscape(unsafe.Pointer(&k)), m.seed) + } // Set writing after calling Hasher, since Hasher may panic, in which // case we have not actually done a write. @@ -377,8 +396,14 @@ func runtime_mapassign_fast64ptr(typ *abi.SwissMapType, m *Map, key unsafe.Point fatal("concurrent map writes") } - k := key - hash := typ.Hasher(abi.NoEscape(unsafe.Pointer(&k)), m.seed) + var k unsafe.Pointer + var hash uintptr + if goexperiment.RevertCopyHashKeys { + hash = typ.Hasher(abi.NoEscape(unsafe.Pointer(&key)), m.seed) + } else { + k = key + hash = typ.Hasher(abi.NoEscape(unsafe.Pointer(&k)), m.seed) + } // Set writing after calling Hasher, since Hasher may panic, in which // case we have not actually done a write. diff --git a/src/internal/runtime/maps/runtime_faststr_swiss.go b/src/internal/runtime/maps/runtime_faststr_swiss.go index 077c05ae8b94b765526cc22bd1d45cf56f0e3f4d..f4c4de6c8d439d3b1803c34472a1276845febc16 100644 --- a/src/internal/runtime/maps/runtime_faststr_swiss.go +++ b/src/internal/runtime/maps/runtime_faststr_swiss.go @@ -9,6 +9,7 @@ package maps import ( "internal/abi" "internal/goarch" + "internal/goexperiment" "internal/race" "internal/runtime/sys" "unsafe" @@ -56,7 +57,14 @@ func (m *Map) getWithoutKeySmallFastStr(typ *abi.SwissMapType, key string) unsaf dohash: // This path will cost 1 hash and 1+ε comparisons. - hash := typ.Hasher(abi.NoEscape(unsafe.Pointer(&key)), m.seed) + var k string + var hash uintptr + if goexperiment.RevertCopyHashKeys { + hash = typ.Hasher(abi.NoEscape(unsafe.Pointer(&key)), m.seed) + } else { + k = key + hash = typ.Hasher(abi.NoEscape(unsafe.Pointer(&k)), m.seed) + } h2 := uint8(h2(hash)) ctrls = *g.ctrls() slotKey = g.key(typ, 0) @@ -124,8 +132,14 @@ func runtime_mapaccess1_faststr(typ *abi.SwissMapType, m *Map, key string) unsaf return elem } - k := key - hash := typ.Hasher(abi.NoEscape(unsafe.Pointer(&k)), m.seed) + var k string + var hash uintptr + if goexperiment.RevertCopyHashKeys { + hash = typ.Hasher(abi.NoEscape(unsafe.Pointer(&key)), m.seed) + } else { + k = key + hash = typ.Hasher(abi.NoEscape(unsafe.Pointer(&k)), m.seed) + } // Select table. idx := m.directoryIndex(hash) @@ -183,8 +197,14 @@ func runtime_mapaccess2_faststr(typ *abi.SwissMapType, m *Map, key string) (unsa return elem, true } - k := key - hash := typ.Hasher(abi.NoEscape(unsafe.Pointer(&k)), m.seed) + var k string + var hash uintptr + if goexperiment.RevertCopyHashKeys { + hash = typ.Hasher(abi.NoEscape(unsafe.Pointer(&key)), m.seed) + } else { + k = key + hash = typ.Hasher(abi.NoEscape(unsafe.Pointer(&k)), m.seed) + } // Select table. idx := m.directoryIndex(hash) @@ -273,8 +293,14 @@ func runtime_mapassign_faststr(typ *abi.SwissMapType, m *Map, key string) unsafe fatal("concurrent map writes") } - k := key - hash := typ.Hasher(abi.NoEscape(unsafe.Pointer(&k)), m.seed) + var k string + var hash uintptr + if goexperiment.RevertCopyHashKeys { + hash = typ.Hasher(abi.NoEscape(unsafe.Pointer(&key)), m.seed) + } else { + k = key + hash = typ.Hasher(abi.NoEscape(unsafe.Pointer(&k)), m.seed) + } // Set writing after calling Hasher, since Hasher may panic, in which // case we have not actually done a write.