1 Star 0 Fork 80

eulaceura/SPEC.java-1.8.0-openjdk

加入 Gitee
与超过 1200万 开发者一起发现、参与优秀开源项目,私有仓库也完全免费 :)
免费加入
克隆/下载
0018-8130832-Extend-the-WhiteBox-API-to-provide-informati.patch 94.37 KB
一键复制 编辑 原始数据 按行查看 历史
1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332
Date: Wed, 7 Jun 2023 18:07:31 +0800
Subject: [PATCH 18/59] 8130832: Extend the WhiteBox API to provide information about the availability of compiler intrinsics
Bug url: https://bugs.openjdk.org/browse/JDK-8130832
---
hotspot/src/share/vm/c1/c1_Compiler.cpp | 160 +++++++
hotspot/src/share/vm/c1/c1_Compiler.hpp | 13 +
hotspot/src/share/vm/c1/c1_GraphBuilder.cpp | 420 ++++++------------
hotspot/src/share/vm/c1/c1_GraphBuilder.hpp | 12 +-
hotspot/src/share/vm/classfile/vmSymbols.cpp | 326 ++++++++++++++
hotspot/src/share/vm/classfile/vmSymbols.hpp | 20 +
.../share/vm/compiler/abstractCompiler.hpp | 53 +++
hotspot/src/share/vm/opto/c2compiler.cpp | 347 +++++++++++++++
hotspot/src/share/vm/opto/c2compiler.hpp | 20 +
hotspot/src/share/vm/opto/library_call.cpp | 326 +-------------
hotspot/src/share/vm/prims/whitebox.cpp | 20 +
.../intrinsics/IntrinsicAvailableTest.java | 126 ++++++
.../mathexact/sanity/IntrinsicBase.java | 30 +-
.../mathexact/sanity/MathIntrinsic.java | 81 ++++
14 files changed, 1349 insertions(+), 605 deletions(-)
create mode 100644 hotspot/test/compiler/intrinsics/IntrinsicAvailableTest.java
diff --git a/hotspot/src/share/vm/c1/c1_Compiler.cpp b/hotspot/src/share/vm/c1/c1_Compiler.cpp
index 8fd7bec66..599986278 100644
--- a/hotspot/src/share/vm/c1/c1_Compiler.cpp
+++ b/hotspot/src/share/vm/c1/c1_Compiler.cpp
@@ -93,6 +93,162 @@ BufferBlob* Compiler::init_buffer_blob() {
return buffer_blob;
}
+bool Compiler::is_intrinsic_supported(methodHandle method) {
+ vmIntrinsics::ID id = method->intrinsic_id();
+ assert(id != vmIntrinsics::_none, "must be a VM intrinsic");
+
+ if (method->is_synchronized()) {
+ // C1 does not support intrinsification of synchronized methods.
+ return false;
+ }
+
+ switch (id) {
+ case vmIntrinsics::_compareAndSwapLong:
+ if (!VM_Version::supports_cx8()) return false;
+ break;
+ case vmIntrinsics::_getAndAddInt:
+ if (!VM_Version::supports_atomic_getadd4()) return false;
+ break;
+ case vmIntrinsics::_getAndAddLong:
+ if (!VM_Version::supports_atomic_getadd8()) return false;
+ break;
+ case vmIntrinsics::_getAndSetInt:
+ if (!VM_Version::supports_atomic_getset4()) return false;
+ break;
+ case vmIntrinsics::_getAndSetLong:
+ if (!VM_Version::supports_atomic_getset8()) return false;
+ break;
+ case vmIntrinsics::_getAndSetObject:
+#ifdef _LP64
+ if (!UseCompressedOops && !VM_Version::supports_atomic_getset8()) return false;
+ if (UseCompressedOops && !VM_Version::supports_atomic_getset4()) return false;
+#else
+ if (!VM_Version::supports_atomic_getset4()) return false;
+#endif
+ break;
+ case vmIntrinsics::_arraycopy:
+ case vmIntrinsics::_currentTimeMillis:
+ case vmIntrinsics::_nanoTime:
+ case vmIntrinsics::_Reference_get:
+ // Use the intrinsic version of Reference.get() so that the value in
+ // the referent field can be registered by the G1 pre-barrier code.
+ // Also to prevent commoning reads from this field across safepoint
+ // since GC can change its value.
+ case vmIntrinsics::_loadFence:
+ case vmIntrinsics::_storeFence:
+ case vmIntrinsics::_fullFence:
+ case vmIntrinsics::_floatToRawIntBits:
+ case vmIntrinsics::_intBitsToFloat:
+ case vmIntrinsics::_doubleToRawLongBits:
+ case vmIntrinsics::_longBitsToDouble:
+ case vmIntrinsics::_getClass:
+ case vmIntrinsics::_isInstance:
+ case vmIntrinsics::_currentThread:
+ case vmIntrinsics::_dabs:
+ case vmIntrinsics::_dsqrt:
+ case vmIntrinsics::_dsin:
+ case vmIntrinsics::_dcos:
+ case vmIntrinsics::_dtan:
+ case vmIntrinsics::_dlog:
+ case vmIntrinsics::_dlog10:
+ case vmIntrinsics::_dexp:
+ case vmIntrinsics::_dpow:
+ case vmIntrinsics::_getObject:
+ case vmIntrinsics::_getBoolean:
+ case vmIntrinsics::_getByte:
+ case vmIntrinsics::_getShort:
+ case vmIntrinsics::_getChar:
+ case vmIntrinsics::_getInt:
+ case vmIntrinsics::_getLong:
+ case vmIntrinsics::_getFloat:
+ case vmIntrinsics::_getDouble:
+ case vmIntrinsics::_putObject:
+ case vmIntrinsics::_putBoolean:
+ case vmIntrinsics::_putByte:
+ case vmIntrinsics::_putShort:
+ case vmIntrinsics::_putChar:
+ case vmIntrinsics::_putInt:
+ case vmIntrinsics::_putLong:
+ case vmIntrinsics::_putFloat:
+ case vmIntrinsics::_putDouble:
+ case vmIntrinsics::_getObjectVolatile:
+ case vmIntrinsics::_getBooleanVolatile:
+ case vmIntrinsics::_getByteVolatile:
+ case vmIntrinsics::_getShortVolatile:
+ case vmIntrinsics::_getCharVolatile:
+ case vmIntrinsics::_getIntVolatile:
+ case vmIntrinsics::_getLongVolatile:
+ case vmIntrinsics::_getFloatVolatile:
+ case vmIntrinsics::_getDoubleVolatile:
+ case vmIntrinsics::_putObjectVolatile:
+ case vmIntrinsics::_putBooleanVolatile:
+ case vmIntrinsics::_putByteVolatile:
+ case vmIntrinsics::_putShortVolatile:
+ case vmIntrinsics::_putCharVolatile:
+ case vmIntrinsics::_putIntVolatile:
+ case vmIntrinsics::_putLongVolatile:
+ case vmIntrinsics::_putFloatVolatile:
+ case vmIntrinsics::_putDoubleVolatile:
+ case vmIntrinsics::_getByte_raw:
+ case vmIntrinsics::_getShort_raw:
+ case vmIntrinsics::_getChar_raw:
+ case vmIntrinsics::_getInt_raw:
+ case vmIntrinsics::_getLong_raw:
+ case vmIntrinsics::_getFloat_raw:
+ case vmIntrinsics::_getDouble_raw:
+ case vmIntrinsics::_putByte_raw:
+ case vmIntrinsics::_putShort_raw:
+ case vmIntrinsics::_putChar_raw:
+ case vmIntrinsics::_putInt_raw:
+ case vmIntrinsics::_putLong_raw:
+ case vmIntrinsics::_putFloat_raw:
+ case vmIntrinsics::_putDouble_raw:
+ case vmIntrinsics::_prefetchRead:
+ case vmIntrinsics::_prefetchWrite:
+ case vmIntrinsics::_prefetchReadStatic:
+ case vmIntrinsics::_prefetchWriteStatic:
+ case vmIntrinsics::_putOrderedObject:
+ case vmIntrinsics::_putOrderedInt:
+ case vmIntrinsics::_putOrderedLong:
+ case vmIntrinsics::_checkIndex:
+ case vmIntrinsics::_updateCRC32:
+ case vmIntrinsics::_updateBytesCRC32:
+ case vmIntrinsics::_updateByteBufferCRC32:
+ case vmIntrinsics::_dgemm_dgemm:
+ case vmIntrinsics::_dgemv_dgemv:
+ case vmIntrinsics::_compareAndSwapInt:
+ case vmIntrinsics::_compareAndSwapObject:
+#ifdef JFR_HAVE_INTRINSICS
+ case vmIntrinsics::_getClassId:
+ case vmIntrinsics::_getEventWriter:
+ case vmIntrinsics::_counterTime:
+#endif
+ break;
+ default:
+ return false; // Intrinsics not on the previous list are not available.
+ }
+
+ return true;
+}
+
+bool Compiler::is_intrinsic_disabled_by_flag(methodHandle method) {
+ vmIntrinsics::ID id = method->intrinsic_id();
+ assert(id != vmIntrinsics::_none, "must be a VM intrinsic");
+
+ if (vmIntrinsics::is_disabled_by_flags(id)) {
+ return true;
+ }
+
+ if (!InlineNatives && id != vmIntrinsics::_Reference_get) {
+ return true;
+ }
+
+ if (!InlineClassNatives && id == vmIntrinsics::_getClass) {
+ return true;
+ }
+
+ return false;
+}
void Compiler::compile_method(ciEnv* env, ciMethod* method, int entry_bci) {
BufferBlob* buffer_blob = CompilerThread::current()->get_buffer_blob();
@@ -111,3 +267,7 @@ void Compiler::compile_method(ciEnv* env, ciMethod* method, int entry_bci) {
void Compiler::print_timers() {
Compilation::print_timers();
}
+
+bool Compiler::is_intrinsic_available(methodHandle method, methodHandle /* compilation_context */) {
+ return is_intrinsic_supported(method) && !is_intrinsic_disabled_by_flag(method);
+}
diff --git a/hotspot/src/share/vm/c1/c1_Compiler.hpp b/hotspot/src/share/vm/c1/c1_Compiler.hpp
index 8c645e658..8383a64a3 100644
--- a/hotspot/src/share/vm/c1/c1_Compiler.hpp
+++ b/hotspot/src/share/vm/c1/c1_Compiler.hpp
@@ -56,6 +56,19 @@ class Compiler: public AbstractCompiler {
// Print compilation timers and statistics
virtual void print_timers();
+
+ // Check the availability of an intrinsic for 'method' given a compilation context.
+ // The compilation context is needed to support per-method usage of the
+ // DisableIntrinsic flag. However, as C1 ignores the DisableIntrinsic flag, it
+ // ignores the compilation context.
+ virtual bool is_intrinsic_available(methodHandle method, methodHandle compilation_context);
+
+ // Check if the C1 compiler supports an intrinsic for 'method'.
+ virtual bool is_intrinsic_supported(methodHandle method);
+
+ // Processing of command-line flags specific to the C1 compiler.
+ virtual bool is_intrinsic_disabled_by_flag(methodHandle method);
+
};
#endif // SHARE_VM_C1_C1_COMPILER_HPP
diff --git a/hotspot/src/share/vm/c1/c1_GraphBuilder.cpp b/hotspot/src/share/vm/c1/c1_GraphBuilder.cpp
index 5ae34702d..eb8ffe5e5 100644
--- a/hotspot/src/share/vm/c1/c1_GraphBuilder.cpp
+++ b/hotspot/src/share/vm/c1/c1_GraphBuilder.cpp
@@ -3457,238 +3457,81 @@ const char* GraphBuilder::should_not_inline(ciMethod* callee) const {
}
-bool GraphBuilder::try_inline_intrinsics(ciMethod* callee) {
- if (callee->is_synchronized()) {
- // We don't currently support any synchronized intrinsics
- return false;
- }
-
- // callee seems like a good candidate
- // determine id
+void GraphBuilder::build_graph_for_intrinsic(ciMethod* callee) {
vmIntrinsics::ID id = callee->intrinsic_id();
- if (!InlineNatives && id != vmIntrinsics::_Reference_get) {
- // InlineNatives does not control Reference.get
- INLINE_BAILOUT("intrinsic method inlining disabled");
+ assert(id != vmIntrinsics::_none, "must be a VM intrinsic");
+
+ // Some intrinsics need special IR nodes.
+ switch(id) {
+ case vmIntrinsics::_getObject : append_unsafe_get_obj(callee, T_OBJECT, false); return;
+ case vmIntrinsics::_getBoolean : append_unsafe_get_obj(callee, T_BOOLEAN, false); return;
+ case vmIntrinsics::_getByte : append_unsafe_get_obj(callee, T_BYTE, false); return;
+ case vmIntrinsics::_getShort : append_unsafe_get_obj(callee, T_SHORT, false); return;
+ case vmIntrinsics::_getChar : append_unsafe_get_obj(callee, T_CHAR, false); return;
+ case vmIntrinsics::_getInt : append_unsafe_get_obj(callee, T_INT, false); return;
+ case vmIntrinsics::_getLong : append_unsafe_get_obj(callee, T_LONG, false); return;
+ case vmIntrinsics::_getFloat : append_unsafe_get_obj(callee, T_FLOAT, false); return;
+ case vmIntrinsics::_getDouble : append_unsafe_get_obj(callee, T_DOUBLE, false); return;
+ case vmIntrinsics::_putObject : append_unsafe_put_obj(callee, T_OBJECT, false); return;
+ case vmIntrinsics::_putBoolean : append_unsafe_put_obj(callee, T_BOOLEAN, false); return;
+ case vmIntrinsics::_putByte : append_unsafe_put_obj(callee, T_BYTE, false); return;
+ case vmIntrinsics::_putShort : append_unsafe_put_obj(callee, T_SHORT, false); return;
+ case vmIntrinsics::_putChar : append_unsafe_put_obj(callee, T_CHAR, false); return;
+ case vmIntrinsics::_putInt : append_unsafe_put_obj(callee, T_INT, false); return;
+ case vmIntrinsics::_putLong : append_unsafe_put_obj(callee, T_LONG, false); return;
+ case vmIntrinsics::_putFloat : append_unsafe_put_obj(callee, T_FLOAT, false); return;
+ case vmIntrinsics::_putDouble : append_unsafe_put_obj(callee, T_DOUBLE, false); return;
+ case vmIntrinsics::_getObjectVolatile : append_unsafe_get_obj(callee, T_OBJECT, true); return;
+ case vmIntrinsics::_getBooleanVolatile : append_unsafe_get_obj(callee, T_BOOLEAN, true); return;
+ case vmIntrinsics::_getByteVolatile : append_unsafe_get_obj(callee, T_BYTE, true); return;
+ case vmIntrinsics::_getShortVolatile : append_unsafe_get_obj(callee, T_SHORT, true); return;
+ case vmIntrinsics::_getCharVolatile : append_unsafe_get_obj(callee, T_CHAR, true); return;
+ case vmIntrinsics::_getIntVolatile : append_unsafe_get_obj(callee, T_INT, true); return;
+ case vmIntrinsics::_getLongVolatile : append_unsafe_get_obj(callee, T_LONG, true); return;
+ case vmIntrinsics::_getFloatVolatile : append_unsafe_get_obj(callee, T_FLOAT, true); return;
+ case vmIntrinsics::_getDoubleVolatile : append_unsafe_get_obj(callee, T_DOUBLE, true); return;
+ case vmIntrinsics::_putObjectVolatile : append_unsafe_put_obj(callee, T_OBJECT, true); return;
+ case vmIntrinsics::_putBooleanVolatile : append_unsafe_put_obj(callee, T_BOOLEAN, true); return;
+ case vmIntrinsics::_putByteVolatile : append_unsafe_put_obj(callee, T_BYTE, true); return;
+ case vmIntrinsics::_putShortVolatile : append_unsafe_put_obj(callee, T_SHORT, true); return;
+ case vmIntrinsics::_putCharVolatile : append_unsafe_put_obj(callee, T_CHAR, true); return;
+ case vmIntrinsics::_putIntVolatile : append_unsafe_put_obj(callee, T_INT, true); return;
+ case vmIntrinsics::_putLongVolatile : append_unsafe_put_obj(callee, T_LONG, true); return;
+ case vmIntrinsics::_putFloatVolatile : append_unsafe_put_obj(callee, T_FLOAT, true); return;
+ case vmIntrinsics::_putDoubleVolatile : append_unsafe_put_obj(callee, T_DOUBLE, true); return;
+ case vmIntrinsics::_getByte_raw : append_unsafe_get_raw(callee, T_BYTE ); return;
+ case vmIntrinsics::_getShort_raw : append_unsafe_get_raw(callee, T_SHORT ); return;
+ case vmIntrinsics::_getChar_raw : append_unsafe_get_raw(callee, T_CHAR ); return;
+ case vmIntrinsics::_getInt_raw : append_unsafe_get_raw(callee, T_INT ); return;
+ case vmIntrinsics::_getLong_raw : append_unsafe_get_raw(callee, T_LONG ); return;
+ case vmIntrinsics::_getFloat_raw : append_unsafe_get_raw(callee, T_FLOAT ); return;
+ case vmIntrinsics::_getDouble_raw : append_unsafe_get_raw(callee, T_DOUBLE); return;
+ case vmIntrinsics::_putByte_raw : append_unsafe_put_raw(callee, T_BYTE ); return;
+ case vmIntrinsics::_putShort_raw : append_unsafe_put_raw(callee, T_SHORT ); return;
+ case vmIntrinsics::_putChar_raw : append_unsafe_put_raw(callee, T_CHAR ); return;
+ case vmIntrinsics::_putInt_raw : append_unsafe_put_raw(callee, T_INT ); return;
+ case vmIntrinsics::_putLong_raw : append_unsafe_put_raw(callee, T_LONG ); return;
+ case vmIntrinsics::_putFloat_raw : append_unsafe_put_raw(callee, T_FLOAT ); return;
+ case vmIntrinsics::_putDouble_raw : append_unsafe_put_raw(callee, T_DOUBLE); return;
+ case vmIntrinsics::_prefetchRead : append_unsafe_prefetch(callee, false, false); return;
+ case vmIntrinsics::_prefetchWrite : append_unsafe_prefetch(callee, false, true ); return;
+ case vmIntrinsics::_prefetchReadStatic : append_unsafe_prefetch(callee, true, false); return;
+ case vmIntrinsics::_prefetchWriteStatic: append_unsafe_prefetch(callee, true, true ); return;
+ case vmIntrinsics::_putOrderedObject : append_unsafe_put_obj(callee, T_OBJECT, true); return;
+ case vmIntrinsics::_putOrderedInt : append_unsafe_put_obj(callee, T_INT, true); return;
+ case vmIntrinsics::_putOrderedLong : append_unsafe_put_obj(callee, T_LONG, true); return;
+ case vmIntrinsics::_compareAndSwapLong:
+ case vmIntrinsics::_compareAndSwapInt:
+ case vmIntrinsics::_compareAndSwapObject: append_unsafe_CAS(callee); return;
+ case vmIntrinsics::_getAndAddInt:
+ case vmIntrinsics::_getAndAddLong : append_unsafe_get_and_set_obj(callee, true); return;
+ case vmIntrinsics::_getAndSetInt :
+ case vmIntrinsics::_getAndSetLong :
+ case vmIntrinsics::_getAndSetObject : append_unsafe_get_and_set_obj(callee, false); return;
+ default:
+ break;
}
- bool preserves_state = false;
- bool cantrap = true;
- switch (id) {
- case vmIntrinsics::_arraycopy:
- if (!InlineArrayCopy) return false;
- break;
-
-#ifdef JFR_HAVE_INTRINSICS
-#if defined(_LP64) || !defined(TRACE_ID_CLASS_SHIFT)
- case vmIntrinsics::_getClassId:
- preserves_state = false;
- cantrap = false;
- break;
-#endif
-
- case vmIntrinsics::_getEventWriter:
- preserves_state = false;
- cantrap = true;
- break;
-
- case vmIntrinsics::_counterTime:
- preserves_state = true;
- cantrap = false;
- break;
-#endif
- case vmIntrinsics::_currentTimeMillis:
- case vmIntrinsics::_nanoTime:
- preserves_state = true;
- cantrap = false;
- break;
-
- case vmIntrinsics::_floatToRawIntBits :
- case vmIntrinsics::_intBitsToFloat :
- case vmIntrinsics::_doubleToRawLongBits :
- case vmIntrinsics::_longBitsToDouble :
- if (!InlineMathNatives) return false;
- preserves_state = true;
- cantrap = false;
- break;
-
- case vmIntrinsics::_getClass :
- case vmIntrinsics::_isInstance :
- case vmIntrinsics::_isPrimitive :
- if (!InlineClassNatives) return false;
- preserves_state = true;
- break;
-
- case vmIntrinsics::_currentThread :
- if (!InlineThreadNatives) return false;
- preserves_state = true;
- cantrap = false;
- break;
-
- case vmIntrinsics::_dabs : // fall through
- case vmIntrinsics::_dsqrt : // fall through
- case vmIntrinsics::_dsin : // fall through
- case vmIntrinsics::_dcos : // fall through
- case vmIntrinsics::_dtan : // fall through
- case vmIntrinsics::_dlog : // fall through
- case vmIntrinsics::_dlog10 : // fall through
- case vmIntrinsics::_dexp : // fall through
- case vmIntrinsics::_dpow : // fall through
- if (!InlineMathNatives) return false;
- cantrap = false;
- preserves_state = true;
- break;
-
- // Use special nodes for Unsafe instructions so we can more easily
- // perform an address-mode optimization on the raw variants
- case vmIntrinsics::_getObject : return append_unsafe_get_obj(callee, T_OBJECT, false);
- case vmIntrinsics::_getBoolean: return append_unsafe_get_obj(callee, T_BOOLEAN, false);
- case vmIntrinsics::_getByte : return append_unsafe_get_obj(callee, T_BYTE, false);
- case vmIntrinsics::_getShort : return append_unsafe_get_obj(callee, T_SHORT, false);
- case vmIntrinsics::_getChar : return append_unsafe_get_obj(callee, T_CHAR, false);
- case vmIntrinsics::_getInt : return append_unsafe_get_obj(callee, T_INT, false);
- case vmIntrinsics::_getLong : return append_unsafe_get_obj(callee, T_LONG, false);
- case vmIntrinsics::_getFloat : return append_unsafe_get_obj(callee, T_FLOAT, false);
- case vmIntrinsics::_getDouble : return append_unsafe_get_obj(callee, T_DOUBLE, false);
-
- case vmIntrinsics::_putObject : return append_unsafe_put_obj(callee, T_OBJECT, false);
- case vmIntrinsics::_putBoolean: return append_unsafe_put_obj(callee, T_BOOLEAN, false);
- case vmIntrinsics::_putByte : return append_unsafe_put_obj(callee, T_BYTE, false);
- case vmIntrinsics::_putShort : return append_unsafe_put_obj(callee, T_SHORT, false);
- case vmIntrinsics::_putChar : return append_unsafe_put_obj(callee, T_CHAR, false);
- case vmIntrinsics::_putInt : return append_unsafe_put_obj(callee, T_INT, false);
- case vmIntrinsics::_putLong : return append_unsafe_put_obj(callee, T_LONG, false);
- case vmIntrinsics::_putFloat : return append_unsafe_put_obj(callee, T_FLOAT, false);
- case vmIntrinsics::_putDouble : return append_unsafe_put_obj(callee, T_DOUBLE, false);
-
- case vmIntrinsics::_getObjectVolatile : return append_unsafe_get_obj(callee, T_OBJECT, true);
- case vmIntrinsics::_getBooleanVolatile: return append_unsafe_get_obj(callee, T_BOOLEAN, true);
- case vmIntrinsics::_getByteVolatile : return append_unsafe_get_obj(callee, T_BYTE, true);
- case vmIntrinsics::_getShortVolatile : return append_unsafe_get_obj(callee, T_SHORT, true);
- case vmIntrinsics::_getCharVolatile : return append_unsafe_get_obj(callee, T_CHAR, true);
- case vmIntrinsics::_getIntVolatile : return append_unsafe_get_obj(callee, T_INT, true);
- case vmIntrinsics::_getLongVolatile : return append_unsafe_get_obj(callee, T_LONG, true);
- case vmIntrinsics::_getFloatVolatile : return append_unsafe_get_obj(callee, T_FLOAT, true);
- case vmIntrinsics::_getDoubleVolatile : return append_unsafe_get_obj(callee, T_DOUBLE, true);
-
- case vmIntrinsics::_putObjectVolatile : return append_unsafe_put_obj(callee, T_OBJECT, true);
- case vmIntrinsics::_putBooleanVolatile: return append_unsafe_put_obj(callee, T_BOOLEAN, true);
- case vmIntrinsics::_putByteVolatile : return append_unsafe_put_obj(callee, T_BYTE, true);
- case vmIntrinsics::_putShortVolatile : return append_unsafe_put_obj(callee, T_SHORT, true);
- case vmIntrinsics::_putCharVolatile : return append_unsafe_put_obj(callee, T_CHAR, true);
- case vmIntrinsics::_putIntVolatile : return append_unsafe_put_obj(callee, T_INT, true);
- case vmIntrinsics::_putLongVolatile : return append_unsafe_put_obj(callee, T_LONG, true);
- case vmIntrinsics::_putFloatVolatile : return append_unsafe_put_obj(callee, T_FLOAT, true);
- case vmIntrinsics::_putDoubleVolatile : return append_unsafe_put_obj(callee, T_DOUBLE, true);
-
- case vmIntrinsics::_getByte_raw : return append_unsafe_get_raw(callee, T_BYTE);
- case vmIntrinsics::_getShort_raw : return append_unsafe_get_raw(callee, T_SHORT);
- case vmIntrinsics::_getChar_raw : return append_unsafe_get_raw(callee, T_CHAR);
- case vmIntrinsics::_getInt_raw : return append_unsafe_get_raw(callee, T_INT);
- case vmIntrinsics::_getLong_raw : return append_unsafe_get_raw(callee, T_LONG);
- case vmIntrinsics::_getFloat_raw : return append_unsafe_get_raw(callee, T_FLOAT);
- case vmIntrinsics::_getDouble_raw : return append_unsafe_get_raw(callee, T_DOUBLE);
-
- case vmIntrinsics::_putByte_raw : return append_unsafe_put_raw(callee, T_BYTE);
- case vmIntrinsics::_putShort_raw : return append_unsafe_put_raw(callee, T_SHORT);
- case vmIntrinsics::_putChar_raw : return append_unsafe_put_raw(callee, T_CHAR);
- case vmIntrinsics::_putInt_raw : return append_unsafe_put_raw(callee, T_INT);
- case vmIntrinsics::_putLong_raw : return append_unsafe_put_raw(callee, T_LONG);
- case vmIntrinsics::_putFloat_raw : return append_unsafe_put_raw(callee, T_FLOAT);
- case vmIntrinsics::_putDouble_raw : return append_unsafe_put_raw(callee, T_DOUBLE);
-
- case vmIntrinsics::_prefetchRead : return append_unsafe_prefetch(callee, false, false);
- case vmIntrinsics::_prefetchWrite : return append_unsafe_prefetch(callee, false, true);
- case vmIntrinsics::_prefetchReadStatic : return append_unsafe_prefetch(callee, true, false);
- case vmIntrinsics::_prefetchWriteStatic : return append_unsafe_prefetch(callee, true, true);
-
- case vmIntrinsics::_checkIndex :
- if (!InlineNIOCheckIndex) return false;
- preserves_state = true;
- break;
- case vmIntrinsics::_putOrderedObject : return append_unsafe_put_obj(callee, T_OBJECT, true);
- case vmIntrinsics::_putOrderedInt : return append_unsafe_put_obj(callee, T_INT, true);
- case vmIntrinsics::_putOrderedLong : return append_unsafe_put_obj(callee, T_LONG, true);
-
- case vmIntrinsics::_compareAndSwapLong:
- if (!VM_Version::supports_cx8()) return false;
- // fall through
- case vmIntrinsics::_compareAndSwapInt:
- case vmIntrinsics::_compareAndSwapObject:
- append_unsafe_CAS(callee);
- return true;
-
- case vmIntrinsics::_getAndAddInt:
- if (!VM_Version::supports_atomic_getadd4()) {
- return false;
- }
- return append_unsafe_get_and_set_obj(callee, true);
- case vmIntrinsics::_getAndAddLong:
- if (!VM_Version::supports_atomic_getadd8()) {
- return false;
- }
- return append_unsafe_get_and_set_obj(callee, true);
- case vmIntrinsics::_getAndSetInt:
- if (!VM_Version::supports_atomic_getset4()) {
- return false;
- }
- return append_unsafe_get_and_set_obj(callee, false);
- case vmIntrinsics::_getAndSetLong:
- if (!VM_Version::supports_atomic_getset8()) {
- return false;
- }
- return append_unsafe_get_and_set_obj(callee, false);
- case vmIntrinsics::_getAndSetObject:
-#ifdef _LP64
- if (!UseCompressedOops && !VM_Version::supports_atomic_getset8()) {
- return false;
- }
- if (UseCompressedOops && !VM_Version::supports_atomic_getset4()) {
- return false;
- }
-#else
- if (!VM_Version::supports_atomic_getset4()) {
- return false;
- }
-#endif
- return append_unsafe_get_and_set_obj(callee, false);
-
- case vmIntrinsics::_Reference_get:
- // Use the intrinsic version of Reference.get() so that the value in
- // the referent field can be registered by the G1 pre-barrier code.
- // Also to prevent commoning reads from this field across safepoint
- // since GC can change its value.
- preserves_state = true;
- break;
-
- case vmIntrinsics::_updateCRC32:
- case vmIntrinsics::_updateBytesCRC32:
- case vmIntrinsics::_updateByteBufferCRC32:
- if (!UseCRC32Intrinsics) return false;
- cantrap = false;
- preserves_state = true;
- break;
-
- case vmIntrinsics::_loadFence :
- case vmIntrinsics::_storeFence:
- case vmIntrinsics::_fullFence :
- break;
-
- case vmIntrinsics::_dgemm_dgemm:
- if (!UseF2jBLASIntrinsics || (StubRoutines::dgemmDgemm() == NULL)) {
- return false;
- }
- cantrap = false;
- preserves_state = true;
- break;
-
- case vmIntrinsics::_dgemv_dgemv:
- if (!UseF2jBLASIntrinsics || (StubRoutines::dgemvDgemv() == NULL)) return false;
- cantrap = false;
- preserves_state = true;
- break;
-
- default : return false; // do not inline
- }
// create intrinsic node
const bool has_receiver = !callee->is_static();
ValueType* result_type = as_ValueType(callee->return_type());
@@ -3713,8 +3556,10 @@ bool GraphBuilder::try_inline_intrinsics(ciMethod* callee) {
}
}
- Intrinsic* result = new Intrinsic(result_type, id, args, has_receiver, state_before,
- preserves_state, cantrap);
+ Intrinsic* result = new Intrinsic(result_type, callee->intrinsic_id(),
+ args, has_receiver, state_before,
+ vmIntrinsics::preserves_state(id),
+ vmIntrinsics::can_trap(id));
// append instruction & push result
Value value = append_split(result);
if (result_type != voidType) push(result_type, value);
@@ -3722,8 +3567,22 @@ bool GraphBuilder::try_inline_intrinsics(ciMethod* callee) {
if (callee != method() && profile_return() && result_type->is_object_kind()) {
profile_return_type(result, callee);
}
+}
- // done
+bool GraphBuilder::try_inline_intrinsics(ciMethod* callee) {
+ // For calling is_intrinsic_available we need to transition to
+ // the '_thread_in_vm' state because is_intrinsic_available()
+ // does not accesses critical VM-internal data.
+ if (!_compilation->compiler()->is_intrinsic_available(callee->get_Method(), NULL)) {
+ if (!InlineNatives) {
+ // Return false and also set message that the inlining of
+ // intrinsics has been disabled in general.
+ INLINE_BAILOUT("intrinsic method inlining disabled");
+ } else {
+ return false;
+ }
+ }
+ build_graph_for_intrinsic(callee);
return true;
}
@@ -4323,63 +4182,51 @@ void GraphBuilder::pop_scope_for_jsr() {
_scope_data = scope_data()->parent();
}
-bool GraphBuilder::append_unsafe_get_obj(ciMethod* callee, BasicType t, bool is_volatile) {
- if (InlineUnsafeOps) {
- Values* args = state()->pop_arguments(callee->arg_size());
- null_check(args->at(0));
- Instruction* offset = args->at(2);
+void GraphBuilder::append_unsafe_get_obj(ciMethod* callee, BasicType t, bool is_volatile) {
+ Values* args = state()->pop_arguments(callee->arg_size());
+ null_check(args->at(0));
+ Instruction* offset = args->at(2);
#ifndef _LP64
- offset = append(new Convert(Bytecodes::_l2i, offset, as_ValueType(T_INT)));
+ offset = append(new Convert(Bytecodes::_l2i, offset, as_ValueType(T_INT)));
#endif
- Instruction* op = append(new UnsafeGetObject(t, args->at(1), offset, is_volatile));
- push(op->type(), op);
- compilation()->set_has_unsafe_access(true);
- }
- return InlineUnsafeOps;
+ Instruction* op = append(new UnsafeGetObject(t, args->at(1), offset, is_volatile));
+ push(op->type(), op);
+ compilation()->set_has_unsafe_access(true);
}
-bool GraphBuilder::append_unsafe_put_obj(ciMethod* callee, BasicType t, bool is_volatile) {
- if (InlineUnsafeOps) {
- Values* args = state()->pop_arguments(callee->arg_size());
- null_check(args->at(0));
- Instruction* offset = args->at(2);
+void GraphBuilder::append_unsafe_put_obj(ciMethod* callee, BasicType t, bool is_volatile) {
+ Values* args = state()->pop_arguments(callee->arg_size());
+ null_check(args->at(0));
+ Instruction* offset = args->at(2);
#ifndef _LP64
- offset = append(new Convert(Bytecodes::_l2i, offset, as_ValueType(T_INT)));
+ offset = append(new Convert(Bytecodes::_l2i, offset, as_ValueType(T_INT)));
#endif
- Value val = args->at(3);
- if (t == T_BOOLEAN) {
- Value mask = append(new Constant(new IntConstant(1)));
- val = append(new LogicOp(Bytecodes::_iand, val, mask));
- }
- Instruction* op = append(new UnsafePutObject(t, args->at(1), offset, val, is_volatile));
- compilation()->set_has_unsafe_access(true);
- kill_all();
+ Value val = args->at(3);
+ if (t == T_BOOLEAN) {
+ Value mask = append(new Constant(new IntConstant(1)));
+ val = append(new LogicOp(Bytecodes::_iand, val, mask));
}
- return InlineUnsafeOps;
+ Instruction* op = append(new UnsafePutObject(t, args->at(1), offset, val, is_volatile));
+ compilation()->set_has_unsafe_access(true);
+ kill_all();
}
-bool GraphBuilder::append_unsafe_get_raw(ciMethod* callee, BasicType t) {
- if (InlineUnsafeOps) {
- Values* args = state()->pop_arguments(callee->arg_size());
- null_check(args->at(0));
- Instruction* op = append(new UnsafeGetRaw(t, args->at(1), false));
- push(op->type(), op);
- compilation()->set_has_unsafe_access(true);
- }
- return InlineUnsafeOps;
+void GraphBuilder::append_unsafe_get_raw(ciMethod* callee, BasicType t) {
+ Values* args = state()->pop_arguments(callee->arg_size());
+ null_check(args->at(0));
+ Instruction* op = append(new UnsafeGetRaw(t, args->at(1), false));
+ push(op->type(), op);
+ compilation()->set_has_unsafe_access(true);
}
-bool GraphBuilder::append_unsafe_put_raw(ciMethod* callee, BasicType t) {
- if (InlineUnsafeOps) {
- Values* args = state()->pop_arguments(callee->arg_size());
- null_check(args->at(0));
- Instruction* op = append(new UnsafePutRaw(t, args->at(1), args->at(2)));
- compilation()->set_has_unsafe_access(true);
- }
- return InlineUnsafeOps;
+void GraphBuilder::append_unsafe_put_raw(ciMethod* callee, BasicType t) {
+ Values* args = state()->pop_arguments(callee->arg_size());
+ null_check(args->at(0));
+ Instruction* op = append(new UnsafePutRaw(t, args->at(1), args->at(2)));
+ compilation()->set_has_unsafe_access(true);
}
@@ -4495,21 +4342,18 @@ void GraphBuilder::print_inlining(ciMethod* callee, const char* msg, bool succes
}
}
-bool GraphBuilder::append_unsafe_get_and_set_obj(ciMethod* callee, bool is_add) {
- if (InlineUnsafeOps) {
- Values* args = state()->pop_arguments(callee->arg_size());
- BasicType t = callee->return_type()->basic_type();
- null_check(args->at(0));
- Instruction* offset = args->at(2);
+void GraphBuilder::append_unsafe_get_and_set_obj(ciMethod* callee, bool is_add) {
+ Values* args = state()->pop_arguments(callee->arg_size());
+ BasicType t = callee->return_type()->basic_type();
+ null_check(args->at(0));
+ Instruction* offset = args->at(2);
#ifndef _LP64
- offset = append(new Convert(Bytecodes::_l2i, offset, as_ValueType(T_INT)));
+ offset = append(new Convert(Bytecodes::_l2i, offset, as_ValueType(T_INT)));
#endif
- Instruction* op = append(new UnsafeGetAndSetObject(t, args->at(1), offset, args->at(3), is_add));
- compilation()->set_has_unsafe_access(true);
- kill_all();
- push(op->type(), op);
- }
- return InlineUnsafeOps;
+ Instruction* op = append(new UnsafeGetAndSetObject(t, args->at(1), offset, args->at(3), is_add));
+ compilation()->set_has_unsafe_access(true);
+ kill_all();
+ push(op->type(), op);
}
#ifndef PRODUCT
diff --git a/hotspot/src/share/vm/c1/c1_GraphBuilder.hpp b/hotspot/src/share/vm/c1/c1_GraphBuilder.hpp
index 2caf6142e..4896985c8 100644
--- a/hotspot/src/share/vm/c1/c1_GraphBuilder.hpp
+++ b/hotspot/src/share/vm/c1/c1_GraphBuilder.hpp
@@ -339,6 +339,8 @@ class GraphBuilder VALUE_OBJ_CLASS_SPEC {
void inline_sync_entry(Value lock, BlockBegin* sync_handler);
void fill_sync_handler(Value lock, BlockBegin* sync_handler, bool default_handler = false);
+ void build_graph_for_intrinsic(ciMethod* callee);
+
// inliners
bool try_inline( ciMethod* callee, bool holder_known, Bytecodes::Code bc = Bytecodes::_illegal, Value receiver = NULL);
bool try_inline_intrinsics(ciMethod* callee);
@@ -364,13 +366,13 @@ class GraphBuilder VALUE_OBJ_CLASS_SPEC {
void pop_scope();
void pop_scope_for_jsr();
- bool append_unsafe_get_obj(ciMethod* callee, BasicType t, bool is_volatile);
- bool append_unsafe_put_obj(ciMethod* callee, BasicType t, bool is_volatile);
- bool append_unsafe_get_raw(ciMethod* callee, BasicType t);
- bool append_unsafe_put_raw(ciMethod* callee, BasicType t);
+ void append_unsafe_get_obj(ciMethod* callee, BasicType t, bool is_volatile);
+ void append_unsafe_put_obj(ciMethod* callee, BasicType t, bool is_volatile);
+ void append_unsafe_get_raw(ciMethod* callee, BasicType t);
+ void append_unsafe_put_raw(ciMethod* callee, BasicType t);
bool append_unsafe_prefetch(ciMethod* callee, bool is_store, bool is_static);
void append_unsafe_CAS(ciMethod* callee);
- bool append_unsafe_get_and_set_obj(ciMethod* callee, bool is_add);
+ void append_unsafe_get_and_set_obj(ciMethod* callee, bool is_add);
void print_inlining(ciMethod* callee, const char* msg = NULL, bool success = true);
diff --git a/hotspot/src/share/vm/classfile/vmSymbols.cpp b/hotspot/src/share/vm/classfile/vmSymbols.cpp
index 34514022a..9633e4609 100644
--- a/hotspot/src/share/vm/classfile/vmSymbols.cpp
+++ b/hotspot/src/share/vm/classfile/vmSymbols.cpp
@@ -335,12 +335,338 @@ bool vmIntrinsics::should_be_pinned(vmIntrinsics::ID id) {
case vmIntrinsics::_nanoTime:
case vmIntrinsics::_dgemm_dgemm:
case vmIntrinsics::_dgemv_dgemv:
+ case vmIntrinsics::_f2jblas_ddot:
return true;
default:
return false;
}
}
+bool vmIntrinsics::preserves_state(vmIntrinsics::ID id) {
+ assert(id != vmIntrinsics::_none, "must be a VM intrinsic");
+ switch(id) {
+#ifdef JFR_HAVE_INTRINSICS
+ case vmIntrinsics::_counterTime:
+#endif
+ case vmIntrinsics::_currentTimeMillis:
+ case vmIntrinsics::_nanoTime:
+ case vmIntrinsics::_floatToRawIntBits:
+ case vmIntrinsics::_intBitsToFloat:
+ case vmIntrinsics::_doubleToRawLongBits:
+ case vmIntrinsics::_longBitsToDouble:
+ case vmIntrinsics::_getClass:
+ case vmIntrinsics::_isInstance:
+ case vmIntrinsics::_isPrimitive:
+ case vmIntrinsics::_currentThread:
+ case vmIntrinsics::_dabs:
+ case vmIntrinsics::_dsqrt:
+ case vmIntrinsics::_dsin:
+ case vmIntrinsics::_dcos:
+ case vmIntrinsics::_dtan:
+ case vmIntrinsics::_dlog:
+ case vmIntrinsics::_dlog10:
+ case vmIntrinsics::_dexp:
+ case vmIntrinsics::_dpow:
+ case vmIntrinsics::_checkIndex:
+ case vmIntrinsics::_Reference_get:
+ case vmIntrinsics::_updateCRC32:
+ case vmIntrinsics::_updateBytesCRC32:
+ case vmIntrinsics::_updateByteBufferCRC32:
+ case vmIntrinsics::_dgemm_dgemm:
+ case vmIntrinsics::_dgemv_dgemv:
+ case vmIntrinsics::_f2jblas_ddot:
+ return true;
+ default:
+ return false;
+ }
+}
+
+bool vmIntrinsics::can_trap(vmIntrinsics::ID id) {
+ assert(id != vmIntrinsics::_none, "must be a VM intrinsic");
+ switch(id) {
+#ifdef JFR_HAVE_INTRINSICS
+ case vmIntrinsics::_getClassId:
+ case vmIntrinsics::_counterTime:
+#endif
+ case vmIntrinsics::_currentTimeMillis:
+ case vmIntrinsics::_nanoTime:
+ case vmIntrinsics::_floatToRawIntBits:
+ case vmIntrinsics::_intBitsToFloat:
+ case vmIntrinsics::_doubleToRawLongBits:
+ case vmIntrinsics::_longBitsToDouble:
+ case vmIntrinsics::_currentThread:
+ case vmIntrinsics::_dabs:
+ case vmIntrinsics::_dsqrt:
+ case vmIntrinsics::_dsin:
+ case vmIntrinsics::_dcos:
+ case vmIntrinsics::_dtan:
+ case vmIntrinsics::_dlog:
+ case vmIntrinsics::_dlog10:
+ case vmIntrinsics::_dexp:
+ case vmIntrinsics::_dpow:
+ case vmIntrinsics::_updateCRC32:
+ case vmIntrinsics::_updateBytesCRC32:
+ case vmIntrinsics::_updateByteBufferCRC32:
+ case vmIntrinsics::_dgemm_dgemm:
+ case vmIntrinsics::_dgemv_dgemv:
+ case vmIntrinsics::_f2jblas_ddot:
+ return false;
+ default:
+ return true;
+ }
+}
+
+bool vmIntrinsics::does_virtual_dispatch(vmIntrinsics::ID id) {
+ assert(id != vmIntrinsics::_none, "must be a VM intrinsic");
+ switch(id) {
+ case vmIntrinsics::_hashCode:
+ case vmIntrinsics::_clone:
+ return true;
+ default:
+ return false;
+ }
+}
+
+int vmIntrinsics::predicates_needed(vmIntrinsics::ID id) {
+ assert(id != vmIntrinsics::_none, "must be a VM intrinsic");
+ switch (id) {
+ case vmIntrinsics::_cipherBlockChaining_encryptAESCrypt:
+ case vmIntrinsics::_cipherBlockChaining_decryptAESCrypt:
+ case vmIntrinsics::_counterMode_AESCrypt:
+ return 1;
+ case vmIntrinsics::_digestBase_implCompressMB:
+ return 3;
+ default:
+ return 0;
+ }
+}
+
+bool vmIntrinsics::is_disabled_by_flags(vmIntrinsics::ID id) {
+ assert(id != vmIntrinsics::_none, "must be a VM intrinsic");
+ switch (id) {
+ case vmIntrinsics::_isInstance:
+ case vmIntrinsics::_isAssignableFrom:
+ case vmIntrinsics::_getModifiers:
+ case vmIntrinsics::_isInterface:
+ case vmIntrinsics::_isArray:
+ case vmIntrinsics::_isPrimitive:
+ case vmIntrinsics::_getSuperclass:
+ case vmIntrinsics::_getLength:
+ case vmIntrinsics::_newArray:
+ if (!InlineClassNatives) return true;
+ break;
+ case vmIntrinsics::_currentThread:
+ case vmIntrinsics::_isInterrupted:
+ if (!InlineThreadNatives) return true;
+ break;
+ case vmIntrinsics::_floatToRawIntBits:
+ case vmIntrinsics::_intBitsToFloat:
+ case vmIntrinsics::_doubleToRawLongBits:
+ case vmIntrinsics::_longBitsToDouble:
+ case vmIntrinsics::_dabs:
+ case vmIntrinsics::_dsqrt:
+ case vmIntrinsics::_dsin:
+ case vmIntrinsics::_dcos:
+ case vmIntrinsics::_dtan:
+ case vmIntrinsics::_dlog:
+ case vmIntrinsics::_dexp:
+ case vmIntrinsics::_dpow:
+ case vmIntrinsics::_dlog10:
+ case vmIntrinsics::_datan2:
+ case vmIntrinsics::_min:
+ case vmIntrinsics::_max:
+ case vmIntrinsics::_floatToIntBits:
+ case vmIntrinsics::_doubleToLongBits:
+ if (!InlineMathNatives) return true;
+ break;
+ case vmIntrinsics::_arraycopy:
+ if (!InlineArrayCopy) return true;
+ break;
+ case vmIntrinsics::_updateCRC32:
+ case vmIntrinsics::_updateBytesCRC32:
+ case vmIntrinsics::_updateByteBufferCRC32:
+ if (!UseCRC32Intrinsics) return true;
+ break;
+ case vmIntrinsics::_dgemm_dgemm:
+ if (!UseF2jBLASIntrinsics || (StubRoutines::dgemmDgemm() == NULL)) return true;
+ break;
+ case vmIntrinsics::_dgemv_dgemv:
+ if (!UseF2jBLASIntrinsics || (StubRoutines::dgemvDgemv() == NULL)) return true;
+ break;
+ case vmIntrinsics::_getObject:
+ case vmIntrinsics::_getBoolean:
+ case vmIntrinsics::_getByte:
+ case vmIntrinsics::_getShort:
+ case vmIntrinsics::_getChar:
+ case vmIntrinsics::_getInt:
+ case vmIntrinsics::_getLong:
+ case vmIntrinsics::_getFloat:
+ case vmIntrinsics::_getDouble:
+ case vmIntrinsics::_putObject:
+ case vmIntrinsics::_putBoolean:
+ case vmIntrinsics::_putByte:
+ case vmIntrinsics::_putShort:
+ case vmIntrinsics::_putChar:
+ case vmIntrinsics::_putInt:
+ case vmIntrinsics::_putLong:
+ case vmIntrinsics::_putFloat:
+ case vmIntrinsics::_putDouble:
+ case vmIntrinsics::_getObjectVolatile:
+ case vmIntrinsics::_getBooleanVolatile:
+ case vmIntrinsics::_getByteVolatile:
+ case vmIntrinsics::_getShortVolatile:
+ case vmIntrinsics::_getCharVolatile:
+ case vmIntrinsics::_getIntVolatile:
+ case vmIntrinsics::_getLongVolatile:
+ case vmIntrinsics::_getFloatVolatile:
+ case vmIntrinsics::_getDoubleVolatile:
+ case vmIntrinsics::_putObjectVolatile:
+ case vmIntrinsics::_putBooleanVolatile:
+ case vmIntrinsics::_putByteVolatile:
+ case vmIntrinsics::_putShortVolatile:
+ case vmIntrinsics::_putCharVolatile:
+ case vmIntrinsics::_putIntVolatile:
+ case vmIntrinsics::_putLongVolatile:
+ case vmIntrinsics::_putFloatVolatile:
+ case vmIntrinsics::_putDoubleVolatile:
+ case vmIntrinsics::_getByte_raw:
+ case vmIntrinsics::_getShort_raw:
+ case vmIntrinsics::_getChar_raw:
+ case vmIntrinsics::_getInt_raw:
+ case vmIntrinsics::_getLong_raw:
+ case vmIntrinsics::_getFloat_raw:
+ case vmIntrinsics::_getDouble_raw:
+ case vmIntrinsics::_putByte_raw:
+ case vmIntrinsics::_putShort_raw:
+ case vmIntrinsics::_putChar_raw:
+ case vmIntrinsics::_putInt_raw:
+ case vmIntrinsics::_putLong_raw:
+ case vmIntrinsics::_putFloat_raw:
+ case vmIntrinsics::_putDouble_raw:
+ case vmIntrinsics::_prefetchRead:
+ case vmIntrinsics::_prefetchWrite:
+ case vmIntrinsics::_prefetchReadStatic:
+ case vmIntrinsics::_prefetchWriteStatic:
+ case vmIntrinsics::_putOrderedObject:
+ case vmIntrinsics::_putOrderedLong:
+ case vmIntrinsics::_putOrderedInt:
+ case vmIntrinsics::_getAndAddInt:
+ case vmIntrinsics::_getAndAddLong:
+ case vmIntrinsics::_getAndSetInt:
+ case vmIntrinsics::_getAndSetLong:
+ case vmIntrinsics::_getAndSetObject:
+ if (!InlineUnsafeOps) return true;
+ break;
+ case vmIntrinsics::_allocateInstance:
+ case vmIntrinsics::_getAddress_raw:
+ case vmIntrinsics::_putAddress_raw:
+ if (!InlineUnsafeOps) return true;
+ break;
+ case vmIntrinsics::_hashCode:
+ if (!InlineObjectHash) return true;
+ break;
+ case vmIntrinsics::_aescrypt_encryptBlock:
+ case vmIntrinsics::_aescrypt_decryptBlock:
+ if (!UseAESIntrinsics) return true;
+ break;
+ case vmIntrinsics::_cipherBlockChaining_encryptAESCrypt:
+ case vmIntrinsics::_cipherBlockChaining_decryptAESCrypt:
+ if (!UseAESIntrinsics) return true;
+ break;
+ case vmIntrinsics::_counterMode_AESCrypt:
+ if (!UseAESCTRIntrinsics) {
+ return true;
+ }
+ break;
+ case vmIntrinsics::_sha_implCompress:
+ if (!UseSHA1Intrinsics) return true;
+ break;
+ case vmIntrinsics::_sha2_implCompress:
+ if (!UseSHA256Intrinsics) return true;
+ break;
+ case vmIntrinsics::_sha5_implCompress:
+ if (!UseSHA512Intrinsics) return true;
+ break;
+ case vmIntrinsics::_digestBase_implCompressMB:
+ if (!(UseSHA1Intrinsics || UseSHA256Intrinsics || UseSHA512Intrinsics)) return true;
+ break;
+ case vmIntrinsics::_ghash_processBlocks:
+ if (!UseGHASHIntrinsics) return true;
+ break;
+ case vmIntrinsics::_copyMemory:
+ if (!InlineArrayCopy || !InlineUnsafeOps) return true;
+ break;
+#ifdef COMPILER1
+ case vmIntrinsics::_checkIndex:
+ if (!InlineNIOCheckIndex) return true;
+ break;
+#endif // COMPILER1
+#ifdef COMPILER2
+ case vmIntrinsics::_clone:
+ case vmIntrinsics::_copyOf:
+ case vmIntrinsics::_copyOfRange:
+ // These intrinsics use both the objectcopy and the arraycopy
+ // intrinsic mechanism.
+ if (!InlineObjectCopy || !InlineArrayCopy) return true;
+ break;
+ case vmIntrinsics::_compareTo:
+ if (!SpecialStringCompareTo) return true;
+ break;
+ case vmIntrinsics::_indexOf:
+ if (!SpecialStringIndexOf) return true;
+ break;
+ case vmIntrinsics::_equals:
+ if (!SpecialStringEquals) return true;
+ break;
+ case vmIntrinsics::_equalsC:
+ if (!SpecialArraysEquals) return true;
+ break;
+ case vmIntrinsics::_encodeISOArray:
+ if (!SpecialEncodeISOArray) return true;
+ break;
+ case vmIntrinsics::_getCallerClass:
+ if (!InlineReflectionGetCallerClass) return true;
+ break;
+ case vmIntrinsics::_multiplyToLen:
+ if (!UseMultiplyToLenIntrinsic) return true;
+ break;
+ case vmIntrinsics::_squareToLen:
+ if (!UseSquareToLenIntrinsic) return true;
+ break;
+ case vmIntrinsics::_mulAdd:
+ if (!UseMulAddIntrinsic) return true;
+ break;
+ case vmIntrinsics::_montgomeryMultiply:
+ if (!UseMontgomeryMultiplyIntrinsic) return true;
+ break;
+ case vmIntrinsics::_montgomerySquare:
+ if (!UseMontgomerySquareIntrinsic) return true;
+ break;
+ case vmIntrinsics::_addExactI:
+ case vmIntrinsics::_addExactL:
+ case vmIntrinsics::_decrementExactI:
+ case vmIntrinsics::_decrementExactL:
+ case vmIntrinsics::_incrementExactI:
+ case vmIntrinsics::_incrementExactL:
+ case vmIntrinsics::_multiplyExactI:
+ case vmIntrinsics::_multiplyExactL:
+ case vmIntrinsics::_negateExactI:
+ case vmIntrinsics::_negateExactL:
+ case vmIntrinsics::_subtractExactI:
+ case vmIntrinsics::_subtractExactL:
+ if (!UseMathExactIntrinsics || !InlineMathNatives) return true;
+ break;
+ case vmIntrinsics::_f2jblas_ddot:
+ if (!UseF2jBLASIntrinsics || (StubRoutines::ddotF2jBLAS() == NULL)) return true;
+ break;
+#endif // COMPILER2
+ default:
+ return false;
+ }
+
+ return false;
+}
+
#define VM_INTRINSIC_INITIALIZE(id, klass, name, sig, flags) #id "\0"
static const char* vm_intrinsic_name_bodies =
VM_INTRINSICS_DO(VM_INTRINSIC_INITIALIZE,
diff --git a/hotspot/src/share/vm/classfile/vmSymbols.hpp b/hotspot/src/share/vm/classfile/vmSymbols.hpp
index af7b83e28..242660259 100644
--- a/hotspot/src/share/vm/classfile/vmSymbols.hpp
+++ b/hotspot/src/share/vm/classfile/vmSymbols.hpp
@@ -1339,6 +1339,26 @@ public:
static ID for_raw_conversion(BasicType src, BasicType dest);
static bool should_be_pinned(vmIntrinsics::ID id);
+
+ // The methods below provide information related to compiling intrinsics.
+
+ // (1) Information needed by the C1 compiler.
+
+ static bool preserves_state(vmIntrinsics::ID id);
+ static bool can_trap(vmIntrinsics::ID id);
+
+ // (2) Information needed by the C2 compiler.
+
+ // Returns true if the intrinsic for method 'method' will perform a virtual dispatch.
+ static bool does_virtual_dispatch(vmIntrinsics::ID id);
+ // A return value larger than 0 indicates that the intrinsic for method
+ // 'method' requires predicated logic.
+ static int predicates_needed(vmIntrinsics::ID id);
+
+ // Returns true if an intrinsic is disabled by command-line flags and
+ // false otherwise. Implements functionality common to the C1
+ // and the C2 compiler.
+ static bool is_disabled_by_flags(vmIntrinsics::ID id);
};
#endif // SHARE_VM_CLASSFILE_VMSYMBOLS_HPP
diff --git a/hotspot/src/share/vm/compiler/abstractCompiler.hpp b/hotspot/src/share/vm/compiler/abstractCompiler.hpp
index 11aea60a2..2f94b626b 100644
--- a/hotspot/src/share/vm/compiler/abstractCompiler.hpp
+++ b/hotspot/src/share/vm/compiler/abstractCompiler.hpp
@@ -54,6 +54,59 @@ class AbstractCompiler : public CHeapObj<mtCompiler> {
virtual bool supports_native() { return true; }
virtual bool supports_osr () { return true; }
virtual bool can_compile_method(methodHandle method) { return true; }
+
+ // Determine if the current compiler provides an intrinsic
+ // for method 'method'. An intrinsic is available if:
+ // - the intrinsic is enabled (by using the appropriate command-line flag) and
+ // - the platform on which the VM is running supports the intrinsic
+ // (i.e., the platform provides the instructions necessary for the compiler
+ // to generate the intrinsic code).
+ //
+ // The second parameter, 'compilation_context', is needed to implement functionality
+ // related to the DisableIntrinsic command-line flag. The DisableIntrinsic flag can
+ // be used to prohibit the C2 compiler (but not the C1 compiler) to use an intrinsic.
+ // There are three ways to disable an intrinsic using the DisableIntrinsic flag:
+ //
+ // (1) -XX:DisableIntrinsic=_hashCode,_getClass
+ // Disables intrinsification of _hashCode and _getClass globally
+ // (i.e., the intrinsified version the methods will not be used at all).
+ // (2) -XX:CompileCommand=option,aClass::aMethod,ccstr,DisableIntrinsic,_hashCode
+ // Disables intrinsification of _hashCode if it is called from
+ // aClass::aMethod (but not for any other call site of _hashCode)
+ // (3) -XX:CompileCommand=option,java.lang.ref.Reference::get,ccstr,DisableIntrinsic,_Reference_get
+ // Some methods are not compiled by C2. Instead, the C2 compiler
+ // returns directly the intrinsified version of these methods.
+ // The command above forces C2 to compile _Reference_get, but
+ // allows using the intrinsified version of _Reference_get at all
+ // other call sites.
+ //
+ // From the modes above, (1) disable intrinsics globally, (2) and (3)
+ // disable intrinsics on a per-method basis. In cases (2) and (3) the
+ // compilation context is aClass::aMethod and java.lang.ref.Reference::get,
+ // respectively.
+ virtual bool is_intrinsic_available(methodHandle /* method */, methodHandle /* compilation_context */) {
+ return false;
+ }
+
+ // Determines if an intrinsic is supported by the compiler, that is,
+ // the compiler provides the instructions necessary to generate
+ // the intrinsic code for method 'method'.
+ //
+ // The 'is_intrinsic_supported' method is a white list, that is,
+ // by default no intrinsics are supported by a compiler except
+ // the ones listed in the method. Overriding methods should conform
+ // to this behavior.
+ virtual bool is_intrinsic_supported(methodHandle /* method */) {
+ return false;
+ }
+
+ // Implements compiler-specific processing of command-line flags.
+ // Processing of command-line flags common to all compilers is implemented
+ // in vmIntrinsicss::is_disabled_by_flag.
+ virtual bool is_intrinsic_disabled_by_flag(methodHandle /* method */) {
+ return false;
+ }
+
#if defined(TIERED) || ( !defined(COMPILER1) && !defined(COMPILER2) && !defined(SHARK))
virtual bool is_c1 () { return false; }
virtual bool is_c2 () { return false; }
diff --git a/hotspot/src/share/vm/opto/c2compiler.cpp b/hotspot/src/share/vm/opto/c2compiler.cpp
index 137f49600..64de417f6 100644
--- a/hotspot/src/share/vm/opto/c2compiler.cpp
+++ b/hotspot/src/share/vm/opto/c2compiler.cpp
@@ -165,3 +165,350 @@ void C2Compiler::compile_method(ciEnv* env, ciMethod* target, int entry_bci) {
void C2Compiler::print_timers() {
// do nothing
}
+
+bool C2Compiler::is_intrinsic_available(methodHandle method, methodHandle compilation_context) {
+ // Assume a non-virtual dispatch. A virtual dispatch is
+ // possible for only a limited set of available intrinsics whereas
+ // a non-virtual dispatch is possible for all available intrinsics.
+ return is_intrinsic_supported(method, false) &&
+ !is_intrinsic_disabled_by_flag(method, compilation_context);
+}
+
+bool C2Compiler::is_intrinsic_supported(methodHandle method, bool is_virtual) {
+ vmIntrinsics::ID id = method->intrinsic_id();
+ assert(id != vmIntrinsics::_none, "must be a VM intrinsic");
+
+ if (id < vmIntrinsics::FIRST_ID || id >= vmIntrinsics::LAST_COMPILER_INLINE) {
+ return false;
+ }
+
+ // Only Object.hashCode and Object.clone intrinsics implement also a virtual
+ // dispatch because calling both methods is expensive but both methods are
+ // frequently overridden. All other intrinsics implement only a non-virtual
+ // dispatch.
+ if (is_virtual) {
+ switch (id) {
+ case vmIntrinsics::_hashCode:
+ case vmIntrinsics::_clone:
+ break;
+ default:
+ return false;
+ }
+ }
+
+ switch (id) {
+ case vmIntrinsics::_compareTo:
+ if (!Matcher::match_rule_supported(Op_StrComp)) return false;
+ break;
+ case vmIntrinsics::_equals:
+ if (!Matcher::match_rule_supported(Op_StrEquals)) return false;
+ break;
+ case vmIntrinsics::_equalsC:
+ if (!Matcher::match_rule_supported(Op_AryEq)) return false;
+ break;
+ case vmIntrinsics::_copyMemory:
+ if (StubRoutines::unsafe_arraycopy() == NULL) return false;
+ break;
+ case vmIntrinsics::_encodeISOArray:
+ if (!Matcher::match_rule_supported(Op_EncodeISOArray)) return false;
+ break;
+ case vmIntrinsics::_bitCount_i:
+ if (!Matcher::match_rule_supported(Op_PopCountI)) return false;
+ break;
+ case vmIntrinsics::_bitCount_l:
+ if (!Matcher::match_rule_supported(Op_PopCountL)) return false;
+ break;
+ case vmIntrinsics::_numberOfLeadingZeros_i:
+ if (!Matcher::match_rule_supported(Op_CountLeadingZerosI)) return false;
+ break;
+ case vmIntrinsics::_numberOfLeadingZeros_l:
+ if (!Matcher::match_rule_supported(Op_CountLeadingZerosL)) return false;
+ break;
+ case vmIntrinsics::_numberOfTrailingZeros_i:
+ if (!Matcher::match_rule_supported(Op_CountTrailingZerosI)) return false;
+ break;
+ case vmIntrinsics::_numberOfTrailingZeros_l:
+ if (!Matcher::match_rule_supported(Op_CountTrailingZerosL)) return false;
+ break;
+ case vmIntrinsics::_reverseBytes_c:
+ if (!Matcher::match_rule_supported(Op_ReverseBytesUS)) return false;
+ break;
+ case vmIntrinsics::_reverseBytes_s:
+ if (!Matcher::match_rule_supported(Op_ReverseBytesS)) return false;
+ break;
+ case vmIntrinsics::_reverseBytes_i:
+ if (!Matcher::match_rule_supported(Op_ReverseBytesI)) return false;
+ break;
+ case vmIntrinsics::_reverseBytes_l:
+ if (!Matcher::match_rule_supported(Op_ReverseBytesL)) return false;
+ break;
+ case vmIntrinsics::_compareAndSwapObject:
+#ifdef _LP64
+ if (!UseCompressedOops && !Matcher::match_rule_supported(Op_CompareAndSwapP)) return false;
+#endif
+ break;
+ case vmIntrinsics::_compareAndSwapLong:
+ if (!Matcher::match_rule_supported(Op_CompareAndSwapL)) return false;
+ break;
+ case vmIntrinsics::_getAndAddInt:
+ if (!Matcher::match_rule_supported(Op_GetAndAddI)) return false;
+ break;
+ case vmIntrinsics::_getAndAddLong:
+ if (!Matcher::match_rule_supported(Op_GetAndAddL)) return false;
+ break;
+ case vmIntrinsics::_getAndSetInt:
+ if (!Matcher::match_rule_supported(Op_GetAndSetI)) return false;
+ break;
+ case vmIntrinsics::_getAndSetLong:
+ if (!Matcher::match_rule_supported(Op_GetAndSetL)) return false;
+ break;
+ case vmIntrinsics::_getAndSetObject:
+#ifdef _LP64
+ if (!UseCompressedOops && !Matcher::match_rule_supported(Op_GetAndSetP)) return false;
+ if (UseCompressedOops && !Matcher::match_rule_supported(Op_GetAndSetN)) return false;
+ break;
+#else
+ if (!Matcher::match_rule_supported(Op_GetAndSetP)) return false;
+ break;
+#endif
+ case vmIntrinsics::_incrementExactI:
+ case vmIntrinsics::_addExactI:
+ if (!Matcher::match_rule_supported(Op_OverflowAddI)) return false;
+ break;
+ case vmIntrinsics::_incrementExactL:
+ case vmIntrinsics::_addExactL:
+ if (!Matcher::match_rule_supported(Op_OverflowAddL)) return false;
+ break;
+ case vmIntrinsics::_decrementExactI:
+ case vmIntrinsics::_subtractExactI:
+ if (!Matcher::match_rule_supported(Op_OverflowSubI)) return false;
+ break;
+ case vmIntrinsics::_decrementExactL:
+ case vmIntrinsics::_subtractExactL:
+ if (!Matcher::match_rule_supported(Op_OverflowSubL)) return false;
+ break;
+ case vmIntrinsics::_negateExactI:
+ if (!Matcher::match_rule_supported(Op_OverflowSubI)) return false;
+ break;
+ case vmIntrinsics::_negateExactL:
+ if (!Matcher::match_rule_supported(Op_OverflowSubL)) return false;
+ break;
+ case vmIntrinsics::_multiplyExactI:
+ if (!Matcher::match_rule_supported(Op_OverflowMulI)) return false;
+ break;
+ case vmIntrinsics::_multiplyExactL:
+ if (!Matcher::match_rule_supported(Op_OverflowMulL)) return false;
+ break;
+ case vmIntrinsics::_getCallerClass:
+ if (SystemDictionary::reflect_CallerSensitive_klass() == NULL) return false;
+ break;
+ case vmIntrinsics::_hashCode:
+ case vmIntrinsics::_identityHashCode:
+ case vmIntrinsics::_getClass:
+ case vmIntrinsics::_dsin:
+ case vmIntrinsics::_dcos:
+ case vmIntrinsics::_dtan:
+ case vmIntrinsics::_dabs:
+ case vmIntrinsics::_datan2:
+ case vmIntrinsics::_dsqrt:
+ case vmIntrinsics::_dexp:
+ case vmIntrinsics::_dlog:
+ case vmIntrinsics::_dlog10:
+ case vmIntrinsics::_dpow:
+ case vmIntrinsics::_min:
+ case vmIntrinsics::_max:
+ case vmIntrinsics::_arraycopy:
+ case vmIntrinsics::_indexOf:
+ case vmIntrinsics::_getObject:
+ case vmIntrinsics::_getBoolean:
+ case vmIntrinsics::_getByte:
+ case vmIntrinsics::_getShort:
+ case vmIntrinsics::_getChar:
+ case vmIntrinsics::_getInt:
+ case vmIntrinsics::_getLong:
+ case vmIntrinsics::_getFloat:
+ case vmIntrinsics::_getDouble:
+ case vmIntrinsics::_putObject:
+ case vmIntrinsics::_putBoolean:
+ case vmIntrinsics::_putByte:
+ case vmIntrinsics::_putShort:
+ case vmIntrinsics::_putChar:
+ case vmIntrinsics::_putInt:
+ case vmIntrinsics::_putLong:
+ case vmIntrinsics::_putFloat:
+ case vmIntrinsics::_putDouble:
+ case vmIntrinsics::_getByte_raw:
+ case vmIntrinsics::_getShort_raw:
+ case vmIntrinsics::_getChar_raw:
+ case vmIntrinsics::_getInt_raw:
+ case vmIntrinsics::_getLong_raw:
+ case vmIntrinsics::_getFloat_raw:
+ case vmIntrinsics::_getDouble_raw:
+ case vmIntrinsics::_getAddress_raw:
+ case vmIntrinsics::_putByte_raw:
+ case vmIntrinsics::_putShort_raw:
+ case vmIntrinsics::_putChar_raw:
+ case vmIntrinsics::_putInt_raw:
+ case vmIntrinsics::_putLong_raw:
+ case vmIntrinsics::_putFloat_raw:
+ case vmIntrinsics::_putDouble_raw:
+ case vmIntrinsics::_putAddress_raw:
+ case vmIntrinsics::_prefetchRead:
+ case vmIntrinsics::_prefetchWrite:
+ case vmIntrinsics::_prefetchReadStatic:
+ case vmIntrinsics::_prefetchWriteStatic:
+ case vmIntrinsics::_getObjectVolatile:
+ case vmIntrinsics::_getBooleanVolatile:
+ case vmIntrinsics::_getByteVolatile:
+ case vmIntrinsics::_getShortVolatile:
+ case vmIntrinsics::_getCharVolatile:
+ case vmIntrinsics::_getIntVolatile:
+ case vmIntrinsics::_getLongVolatile:
+ case vmIntrinsics::_getFloatVolatile:
+ case vmIntrinsics::_getDoubleVolatile:
+ case vmIntrinsics::_putObjectVolatile:
+ case vmIntrinsics::_putBooleanVolatile:
+ case vmIntrinsics::_putByteVolatile:
+ case vmIntrinsics::_putShortVolatile:
+ case vmIntrinsics::_putCharVolatile:
+ case vmIntrinsics::_putIntVolatile:
+ case vmIntrinsics::_putLongVolatile:
+ case vmIntrinsics::_putFloatVolatile:
+ case vmIntrinsics::_putDoubleVolatile:
+ case vmIntrinsics::_compareAndSwapInt:
+ case vmIntrinsics::_putOrderedObject:
+ case vmIntrinsics::_putOrderedInt:
+ case vmIntrinsics::_putOrderedLong:
+ case vmIntrinsics::_loadFence:
+ case vmIntrinsics::_storeFence:
+ case vmIntrinsics::_fullFence:
+ case vmIntrinsics::_currentThread:
+ case vmIntrinsics::_isInterrupted:
+#ifdef JFR_HAVE_INTRINSICS
+ case vmIntrinsics::_getClassId:
+ case vmIntrinsics::_getEventWriter:
+ case vmIntrinsics::_counterTime:
+#endif
+ case vmIntrinsics::_currentTimeMillis:
+ case vmIntrinsics::_nanoTime:
+ case vmIntrinsics::_allocateInstance:
+ case vmIntrinsics::_newArray:
+ case vmIntrinsics::_getLength:
+ case vmIntrinsics::_copyOf:
+ case vmIntrinsics::_copyOfRange:
+ case vmIntrinsics::_clone:
+ case vmIntrinsics::_isAssignableFrom:
+ case vmIntrinsics::_isInstance:
+ case vmIntrinsics::_getModifiers:
+ case vmIntrinsics::_isInterface:
+ case vmIntrinsics::_isArray:
+ case vmIntrinsics::_isPrimitive:
+ case vmIntrinsics::_getSuperclass:
+ case vmIntrinsics::_getClassAccessFlags:
+ case vmIntrinsics::_floatToRawIntBits:
+ case vmIntrinsics::_floatToIntBits:
+ case vmIntrinsics::_intBitsToFloat:
+ case vmIntrinsics::_doubleToRawLongBits:
+ case vmIntrinsics::_doubleToLongBits:
+ case vmIntrinsics::_longBitsToDouble:
+ case vmIntrinsics::_Reference_get:
+ case vmIntrinsics::_aescrypt_encryptBlock:
+ case vmIntrinsics::_aescrypt_decryptBlock:
+ case vmIntrinsics::_cipherBlockChaining_encryptAESCrypt:
+ case vmIntrinsics::_cipherBlockChaining_decryptAESCrypt:
+ case vmIntrinsics::_counterMode_AESCrypt:
+ case vmIntrinsics::_sha_implCompress:
+ case vmIntrinsics::_sha2_implCompress:
+ case vmIntrinsics::_sha5_implCompress:
+ case vmIntrinsics::_digestBase_implCompressMB:
+ case vmIntrinsics::_multiplyToLen:
+ case vmIntrinsics::_squareToLen:
+ case vmIntrinsics::_mulAdd:
+ case vmIntrinsics::_montgomeryMultiply:
+ case vmIntrinsics::_montgomerySquare:
+ case vmIntrinsics::_ghash_processBlocks:
+ case vmIntrinsics::_updateCRC32:
+ case vmIntrinsics::_updateBytesCRC32:
+ case vmIntrinsics::_updateByteBufferCRC32:
+ case vmIntrinsics::_profileBoolean:
+ case vmIntrinsics::_dgemm_dgemm:
+ case vmIntrinsics::_dgemv_dgemv:
+ case vmIntrinsics::_f2jblas_ddot:
+ break;
+ default:
+ return false;
+ }
+ return true;
+}
+
+bool C2Compiler::is_intrinsic_disabled_by_flag(methodHandle method, methodHandle compilation_context) {
+ vmIntrinsics::ID id = method->intrinsic_id();
+ assert(id != vmIntrinsics::_none, "must be a VM intrinsic");
+
+ if (vmIntrinsics::is_disabled_by_flags(method->intrinsic_id())) {
+ return true;
+ }
+
+ // Check if the intrinsic corresponding to 'method' has been disabled on
+ // the command line by using the DisableIntrinsic flag (either globally
+ // or on a per-method level, see src/share/vm/compiler/abstractCompiler.hpp
+ // for details).
+ // Usually, the compilation context is the caller of the method 'method'.
+ // The only case when for a non-recursive method 'method' the compilation context
+ // is not the caller of the 'method' (but it is the method itself) is
+ // java.lang.ref.Referene::get.
+ // For java.lang.ref.Reference::get, the intrinsic version is used
+ // instead of the C2-compiled version so that the value in the referent
+ // field can be registered by the G1 pre-barrier code. The intrinsified
+ // version of Reference::get also adds a memory barrier to prevent
+ // commoning reads from the referent field across safepoint since GC
+ // can change the referent field's value. See Compile::Compile()
+ // in src/share/vm/opto/compile.cpp for more details.
+ ccstr disable_intr = NULL;
+ if ((DisableIntrinsic[0] != '\0' && strstr(DisableIntrinsic, vmIntrinsics::name_at(id)) != NULL) ||
+ (!compilation_context.is_null() &&
+ CompilerOracle::has_option_value(compilation_context, "DisableIntrinsic", disable_intr) &&
+ strstr(disable_intr, vmIntrinsics::name_at(id)) != NULL)
+ ) {
+ return true;
+ }
+
+ // -XX:-InlineNatives disables nearly all intrinsics except the ones listed in
+ // the following switch statement.
+ if (!InlineNatives) {
+ switch (id) {
+ case vmIntrinsics::_indexOf:
+ case vmIntrinsics::_compareTo:
+ case vmIntrinsics::_equals:
+ case vmIntrinsics::_equalsC:
+ case vmIntrinsics::_getAndAddInt:
+ case vmIntrinsics::_getAndAddLong:
+ case vmIntrinsics::_getAndSetInt:
+ case vmIntrinsics::_getAndSetLong:
+ case vmIntrinsics::_getAndSetObject:
+ case vmIntrinsics::_loadFence:
+ case vmIntrinsics::_storeFence:
+ case vmIntrinsics::_fullFence:
+ case vmIntrinsics::_Reference_get:
+ break;
+ default:
+ return true;
+ }
+ }
+
+ if (!InlineUnsafeOps) {
+ switch (id) {
+ case vmIntrinsics::_loadFence:
+ case vmIntrinsics::_storeFence:
+ case vmIntrinsics::_fullFence:
+ case vmIntrinsics::_compareAndSwapObject:
+ case vmIntrinsics::_compareAndSwapLong:
+ case vmIntrinsics::_compareAndSwapInt:
+ return true;
+ default:
+ return false;
+ }
+ }
+
+ return false;
+}
diff --git a/hotspot/src/share/vm/opto/c2compiler.hpp b/hotspot/src/share/vm/opto/c2compiler.hpp
index c1449bc34..8854f1593 100644
--- a/hotspot/src/share/vm/opto/c2compiler.hpp
+++ b/hotspot/src/share/vm/opto/c2compiler.hpp
@@ -53,6 +53,26 @@ public:
// Print compilation timers and statistics
void print_timers();
+
+ // Check the availability of an intrinsic for 'method' given a compilation context.
+ virtual bool is_intrinsic_available(methodHandle method, methodHandle compilation_context);
+
+ // Return true if the intrinsification of a method supported by the compiler
+ // assuming a non-virtual dispatch. Return false otherwise.
+ virtual bool is_intrinsic_supported(methodHandle method) {
+ return is_intrinsic_supported(method, false);
+ }
+
+ // Check if the compiler supports an intrinsic for 'method' given the
+ // the dispatch mode specified by the 'is_virtual' parameter.
+ virtual bool is_intrinsic_supported(methodHandle method, bool is_virtual);
+
+ // Processing of command-line flags specific to the C2 compiler.
+ virtual bool is_intrinsic_disabled_by_flag(methodHandle method) {
+ return is_intrinsic_disabled_by_flag(method, NULL);
+ }
+
+ virtual bool is_intrinsic_disabled_by_flag(methodHandle method, methodHandle compilation_context);
};
#endif // SHARE_VM_OPTO_C2COMPILER_HPP
diff --git a/hotspot/src/share/vm/opto/library_call.cpp b/hotspot/src/share/vm/opto/library_call.cpp
index 2add82dd1..9ba5201f0 100644
--- a/hotspot/src/share/vm/opto/library_call.cpp
+++ b/hotspot/src/share/vm/opto/library_call.cpp
@@ -30,6 +30,7 @@
#include "jfr/support/jfrIntrinsics.hpp"
#include "oops/objArrayKlass.hpp"
#include "opto/addnode.hpp"
+#include "opto/c2compiler.hpp"
#include "opto/callGenerator.hpp"
#include "opto/cfgnode.hpp"
#include "opto/connode.hpp"
@@ -346,322 +347,35 @@ CallGenerator* Compile::make_vm_intrinsic(ciMethod* m, bool is_virtual) {
vmIntrinsics::ID id = m->intrinsic_id();
assert(id != vmIntrinsics::_none, "must be a VM intrinsic");
- ccstr disable_intr = NULL;
-
- if ((DisableIntrinsic[0] != '\0'
- && strstr(DisableIntrinsic, vmIntrinsics::name_at(id)) != NULL) ||
- (method_has_option_value("DisableIntrinsic", disable_intr)
- && strstr(disable_intr, vmIntrinsics::name_at(id)) != NULL)) {
- // disabled by a user request on the command line:
- // example: -XX:DisableIntrinsic=_hashCode,_getClass
- return NULL;
- }
-
if (!m->is_loaded()) {
- // do not attempt to inline unloaded methods
+ // Do not attempt to inline unloaded methods.
return NULL;
}
- // Only a few intrinsics implement a virtual dispatch.
- // They are expensive calls which are also frequently overridden.
- if (is_virtual) {
- switch (id) {
- case vmIntrinsics::_hashCode:
- case vmIntrinsics::_clone:
- // OK, Object.hashCode and Object.clone intrinsics come in both flavors
- break;
- default:
- return NULL;
- }
- }
+ C2Compiler* compiler = (C2Compiler*)CompileBroker::compiler(CompLevel_full_optimization);
+ bool is_available = false;
- // -XX:-InlineNatives disables nearly all intrinsics:
- if (!InlineNatives) {
- switch (id) {
- case vmIntrinsics::_indexOf:
- case vmIntrinsics::_compareTo:
- case vmIntrinsics::_equals:
- case vmIntrinsics::_equalsC:
- case vmIntrinsics::_getAndAddInt:
- case vmIntrinsics::_getAndAddLong:
- case vmIntrinsics::_getAndSetInt:
- case vmIntrinsics::_getAndSetLong:
- case vmIntrinsics::_getAndSetObject:
- case vmIntrinsics::_loadFence:
- case vmIntrinsics::_storeFence:
- case vmIntrinsics::_fullFence:
- break; // InlineNatives does not control String.compareTo
- case vmIntrinsics::_Reference_get:
- break; // InlineNatives does not control Reference.get
- default:
- return NULL;
- }
+ {
+ // For calling is_intrinsic_supported and is_intrinsic_disabled_by_flag
+ // the compiler must transition to '_thread_in_vm' state because both
+ // methods access VM-internal data.
+ VM_ENTRY_MARK;
+ methodHandle mh(THREAD, m->get_Method());
+ methodHandle ct(THREAD, method()->get_Method());
+ is_available = compiler->is_intrinsic_supported(mh, is_virtual) &&
+ !compiler->is_intrinsic_disabled_by_flag(mh, ct);
}
- int predicates = 0;
- bool does_virtual_dispatch = false;
-
- switch (id) {
- case vmIntrinsics::_compareTo:
- if (!SpecialStringCompareTo) return NULL;
- if (!Matcher::match_rule_supported(Op_StrComp)) return NULL;
- break;
- case vmIntrinsics::_indexOf:
- if (!SpecialStringIndexOf) return NULL;
- break;
- case vmIntrinsics::_equals:
- if (!SpecialStringEquals) return NULL;
- if (!Matcher::match_rule_supported(Op_StrEquals)) return NULL;
- break;
- case vmIntrinsics::_equalsC:
- if (!SpecialArraysEquals) return NULL;
- if (!Matcher::match_rule_supported(Op_AryEq)) return NULL;
- break;
- case vmIntrinsics::_arraycopy:
- if (!InlineArrayCopy) return NULL;
- break;
- case vmIntrinsics::_copyMemory:
- if (StubRoutines::unsafe_arraycopy() == NULL) return NULL;
- if (!InlineArrayCopy) return NULL;
- break;
- case vmIntrinsics::_hashCode:
- if (!InlineObjectHash) return NULL;
- does_virtual_dispatch = true;
- break;
- case vmIntrinsics::_clone:
- does_virtual_dispatch = true;
- case vmIntrinsics::_copyOf:
- case vmIntrinsics::_copyOfRange:
- if (!InlineObjectCopy) return NULL;
- // These also use the arraycopy intrinsic mechanism:
- if (!InlineArrayCopy) return NULL;
- break;
- case vmIntrinsics::_encodeISOArray:
- if (!SpecialEncodeISOArray) return NULL;
- if (!Matcher::match_rule_supported(Op_EncodeISOArray)) return NULL;
- break;
- case vmIntrinsics::_checkIndex:
- // We do not intrinsify this. The optimizer does fine with it.
- return NULL;
-
- case vmIntrinsics::_getCallerClass:
- if (!UseNewReflection) return NULL;
- if (!InlineReflectionGetCallerClass) return NULL;
- if (SystemDictionary::reflect_CallerSensitive_klass() == NULL) return NULL;
- break;
-
- case vmIntrinsics::_bitCount_i:
- if (!Matcher::match_rule_supported(Op_PopCountI)) return NULL;
- break;
-
- case vmIntrinsics::_bitCount_l:
- if (!Matcher::match_rule_supported(Op_PopCountL)) return NULL;
- break;
-
- case vmIntrinsics::_numberOfLeadingZeros_i:
- if (!Matcher::match_rule_supported(Op_CountLeadingZerosI)) return NULL;
- break;
-
- case vmIntrinsics::_numberOfLeadingZeros_l:
- if (!Matcher::match_rule_supported(Op_CountLeadingZerosL)) return NULL;
- break;
-
- case vmIntrinsics::_numberOfTrailingZeros_i:
- if (!Matcher::match_rule_supported(Op_CountTrailingZerosI)) return NULL;
- break;
-
- case vmIntrinsics::_numberOfTrailingZeros_l:
- if (!Matcher::match_rule_supported(Op_CountTrailingZerosL)) return NULL;
- break;
-
- case vmIntrinsics::_reverseBytes_c:
- if (!Matcher::match_rule_supported(Op_ReverseBytesUS)) return NULL;
- break;
- case vmIntrinsics::_reverseBytes_s:
- if (!Matcher::match_rule_supported(Op_ReverseBytesS)) return NULL;
- break;
- case vmIntrinsics::_reverseBytes_i:
- if (!Matcher::match_rule_supported(Op_ReverseBytesI)) return NULL;
- break;
- case vmIntrinsics::_reverseBytes_l:
- if (!Matcher::match_rule_supported(Op_ReverseBytesL)) return NULL;
- break;
-
- case vmIntrinsics::_Reference_get:
- // Use the intrinsic version of Reference.get() so that the value in
- // the referent field can be registered by the G1 pre-barrier code.
- // Also add memory barrier to prevent commoning reads from this field
- // across safepoint since GC can change it value.
- break;
-
- case vmIntrinsics::_compareAndSwapObject:
-#ifdef _LP64
- if (!UseCompressedOops && !Matcher::match_rule_supported(Op_CompareAndSwapP)) return NULL;
-#endif
- break;
-
- case vmIntrinsics::_compareAndSwapLong:
- if (!Matcher::match_rule_supported(Op_CompareAndSwapL)) return NULL;
- break;
-
- case vmIntrinsics::_getAndAddInt:
- if (!Matcher::match_rule_supported(Op_GetAndAddI)) return NULL;
- break;
-
- case vmIntrinsics::_getAndAddLong:
- if (!Matcher::match_rule_supported(Op_GetAndAddL)) return NULL;
- break;
-
- case vmIntrinsics::_getAndSetInt:
- if (!Matcher::match_rule_supported(Op_GetAndSetI)) return NULL;
- break;
-
- case vmIntrinsics::_getAndSetLong:
- if (!Matcher::match_rule_supported(Op_GetAndSetL)) return NULL;
- break;
-
- case vmIntrinsics::_getAndSetObject:
-#ifdef _LP64
- if (!UseCompressedOops && !Matcher::match_rule_supported(Op_GetAndSetP)) return NULL;
- if (UseCompressedOops && !Matcher::match_rule_supported(Op_GetAndSetN)) return NULL;
- break;
-#else
- if (!Matcher::match_rule_supported(Op_GetAndSetP)) return NULL;
- break;
-#endif
-
- case vmIntrinsics::_aescrypt_encryptBlock:
- case vmIntrinsics::_aescrypt_decryptBlock:
- if (!UseAESIntrinsics) return NULL;
- break;
-
- case vmIntrinsics::_multiplyToLen:
- if (!UseMultiplyToLenIntrinsic) return NULL;
- break;
-
- case vmIntrinsics::_squareToLen:
- if (!UseSquareToLenIntrinsic) return NULL;
- break;
-
- case vmIntrinsics::_mulAdd:
- if (!UseMulAddIntrinsic) return NULL;
- break;
-
- case vmIntrinsics::_montgomeryMultiply:
- if (!UseMontgomeryMultiplyIntrinsic) return NULL;
- break;
- case vmIntrinsics::_montgomerySquare:
- if (!UseMontgomerySquareIntrinsic) return NULL;
- break;
-
- case vmIntrinsics::_cipherBlockChaining_encryptAESCrypt:
- case vmIntrinsics::_cipherBlockChaining_decryptAESCrypt:
- if (!UseAESIntrinsics) return NULL;
- // these two require the predicated logic
- predicates = 1;
- break;
-
- case vmIntrinsics::_counterMode_AESCrypt:
- if (!UseAESCTRIntrinsics) {
- return NULL;
- }
- predicates = 1;
- break;
-
- case vmIntrinsics::_sha_implCompress:
- if (!UseSHA1Intrinsics) return NULL;
- break;
-
- case vmIntrinsics::_sha2_implCompress:
- if (!UseSHA256Intrinsics) return NULL;
- break;
-
- case vmIntrinsics::_sha5_implCompress:
- if (!UseSHA512Intrinsics) return NULL;
- break;
-
- case vmIntrinsics::_digestBase_implCompressMB:
- if (!(UseSHA1Intrinsics || UseSHA256Intrinsics || UseSHA512Intrinsics)) return NULL;
- predicates = 3;
- break;
-
- case vmIntrinsics::_ghash_processBlocks:
- if (!UseGHASHIntrinsics) return NULL;
- break;
-
- case vmIntrinsics::_updateCRC32:
- case vmIntrinsics::_updateBytesCRC32:
- case vmIntrinsics::_updateByteBufferCRC32:
- if (!UseCRC32Intrinsics) return NULL;
- break;
-
- case vmIntrinsics::_f2jblas_ddot:
- case vmIntrinsics::_dgemm_dgemm:
- case vmIntrinsics::_dgemv_dgemv:
- if (!UseF2jBLASIntrinsics) return NULL;
- break;
-
- case vmIntrinsics::_incrementExactI:
- case vmIntrinsics::_addExactI:
- if (!Matcher::match_rule_supported(Op_OverflowAddI) || !UseMathExactIntrinsics) return NULL;
- break;
- case vmIntrinsics::_incrementExactL:
- case vmIntrinsics::_addExactL:
- if (!Matcher::match_rule_supported(Op_OverflowAddL) || !UseMathExactIntrinsics) return NULL;
- break;
- case vmIntrinsics::_decrementExactI:
- case vmIntrinsics::_subtractExactI:
- if (!Matcher::match_rule_supported(Op_OverflowSubI) || !UseMathExactIntrinsics) return NULL;
- break;
- case vmIntrinsics::_decrementExactL:
- case vmIntrinsics::_subtractExactL:
- if (!Matcher::match_rule_supported(Op_OverflowSubL) || !UseMathExactIntrinsics) return NULL;
- break;
- case vmIntrinsics::_negateExactI:
- if (!Matcher::match_rule_supported(Op_OverflowSubI) || !UseMathExactIntrinsics) return NULL;
- break;
- case vmIntrinsics::_negateExactL:
- if (!Matcher::match_rule_supported(Op_OverflowSubL) || !UseMathExactIntrinsics) return NULL;
- break;
- case vmIntrinsics::_multiplyExactI:
- if (!Matcher::match_rule_supported(Op_OverflowMulI) || !UseMathExactIntrinsics) return NULL;
- break;
- case vmIntrinsics::_multiplyExactL:
- if (!Matcher::match_rule_supported(Op_OverflowMulL) || !UseMathExactIntrinsics) return NULL;
- break;
-
- default:
+ if (is_available) {
assert(id <= vmIntrinsics::LAST_COMPILER_INLINE, "caller responsibility");
assert(id != vmIntrinsics::_Object_init && id != vmIntrinsics::_invoke, "enum out of order?");
- break;
- }
-
- // -XX:-InlineClassNatives disables natives from the Class class.
- // The flag applies to all reflective calls, notably Array.newArray
- // (visible to Java programmers as Array.newInstance).
- if (m->holder()->name() == ciSymbol::java_lang_Class() ||
- m->holder()->name() == ciSymbol::java_lang_reflect_Array()) {
- if (!InlineClassNatives) return NULL;
- }
-
- // -XX:-InlineThreadNatives disables natives from the Thread class.
- if (m->holder()->name() == ciSymbol::java_lang_Thread()) {
- if (!InlineThreadNatives) return NULL;
- }
-
- // -XX:-InlineMathNatives disables natives from the Math,Float and Double classes.
- if (m->holder()->name() == ciSymbol::java_lang_Math() ||
- m->holder()->name() == ciSymbol::java_lang_Float() ||
- m->holder()->name() == ciSymbol::java_lang_Double()) {
- if (!InlineMathNatives) return NULL;
- }
-
- // -XX:-InlineUnsafeOps disables natives from the Unsafe class.
- if (m->holder()->name() == ciSymbol::sun_misc_Unsafe()) {
- if (!InlineUnsafeOps) return NULL;
+ return new LibraryIntrinsic(m, is_virtual,
+ vmIntrinsics::predicates_needed(id),
+ vmIntrinsics::does_virtual_dispatch(id),
+ (vmIntrinsics::ID) id);
+ } else {
+ return NULL;
}
-
- return new LibraryIntrinsic(m, is_virtual, predicates, does_virtual_dispatch, (vmIntrinsics::ID) id);
}
//----------------------register_library_intrinsics-----------------------
diff --git a/hotspot/src/share/vm/prims/whitebox.cpp b/hotspot/src/share/vm/prims/whitebox.cpp
index 482bf6a62..16af34e34 100644
--- a/hotspot/src/share/vm/prims/whitebox.cpp
+++ b/hotspot/src/share/vm/prims/whitebox.cpp
@@ -559,6 +559,24 @@ WB_ENTRY(jboolean, WB_IsMethodQueuedForCompilation(JNIEnv* env, jobject o, jobje
return mh->queued_for_compilation();
WB_END
+WB_ENTRY(jboolean, WB_IsIntrinsicAvailable(JNIEnv* env, jobject /* o */, jobject method, jobject compilation_context, jint compLevel))
+ if (compLevel < CompLevel_none || compLevel > CompLevel_highest_tier) {
+ return false; // Intrinsic is not available on a non-existent compilation level.
+ }
+ jmethodID method_id, compilation_context_id;
+ method_id = reflected_method_to_jmid(thread, env, method);
+ CHECK_JNI_EXCEPTION_(env, JNI_FALSE);
+ methodHandle mh(THREAD, Method::checked_resolve_jmethod_id(method_id));
+ if (compilation_context != NULL) {
+ compilation_context_id = reflected_method_to_jmid(thread, env, compilation_context);
+ CHECK_JNI_EXCEPTION_(env, JNI_FALSE);
+ methodHandle cch(THREAD, Method::checked_resolve_jmethod_id(compilation_context_id));
+ return CompileBroker::compiler(compLevel)->is_intrinsic_available(mh, cch);
+ } else {
+ return CompileBroker::compiler(compLevel)->is_intrinsic_available(mh, NULL);
+ }
+WB_END
+
WB_ENTRY(jint, WB_GetMethodCompilationLevel(JNIEnv* env, jobject o, jobject method, jboolean is_osr))
jmethodID jmid = reflected_method_to_jmid(thread, env, method);
CHECK_JNI_EXCEPTION_(env, CompLevel_none);
@@ -1341,6 +1359,8 @@ static JNINativeMethod methods[] = {
(void*)&WB_IsMethodCompilable},
{CC"isMethodQueuedForCompilation",
CC"(Ljava/lang/reflect/Executable;)Z", (void*)&WB_IsMethodQueuedForCompilation},
+ {CC"isIntrinsicAvailable0", CC"(Ljava/lang/reflect/Executable;Ljava/lang/reflect/Executable;I)Z",
+ (void*)&WB_IsIntrinsicAvailable},
{CC"makeMethodNotCompilable",
CC"(Ljava/lang/reflect/Executable;IZ)V", (void*)&WB_MakeMethodNotCompilable},
{CC"testSetDontInlineMethod",
diff --git a/hotspot/test/compiler/intrinsics/IntrinsicAvailableTest.java b/hotspot/test/compiler/intrinsics/IntrinsicAvailableTest.java
new file mode 100644
index 000000000..1a5475403
--- /dev/null
+++ b/hotspot/test/compiler/intrinsics/IntrinsicAvailableTest.java
@@ -0,0 +1,126 @@
+/*
+ * Copyright (c) 2015, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+import java.lang.reflect.Executable;
+import java.util.concurrent.Callable;
+import java.util.Objects;
+/*
+ * @test
+ * @bug 8130832
+ * @library /testlibrary /../../test/lib /compiler/whitebox /compiler/testlibrary
+ * @build IntrinsicAvailableTest
+ * @run main ClassFileInstaller sun.hotspot.WhiteBox
+ * sun.hotspot.WhiteBox$WhiteBoxPermission
+ * @run main/othervm -Xbootclasspath/a:.
+ * -XX:+UnlockDiagnosticVMOptions
+ * -XX:+WhiteBoxAPI
+ * -XX:+UseCRC32Intrinsics
+ * IntrinsicAvailableTest
+ * @run main/othervm -Xbootclasspath/a:.
+ * -XX:+UnlockDiagnosticVMOptions
+ * -XX:+WhiteBoxAPI
+ * -XX:-UseCRC32Intrinsics
+ * IntrinsicAvailableTest
+ */
+public class IntrinsicAvailableTest extends CompilerWhiteBoxTest {
+ protected String VMName;
+
+ public IntrinsicAvailableTest(IntrinsicAvailableTestTestCase testCase) {
+ super(testCase);
+ VMName = System.getProperty("java.vm.name");
+ }
+
+ public static class IntrinsicAvailableTestTestCase implements TestCase {
+
+ public String name() {
+ return "IntrinsicAvailableTestTestCase";
+ }
+
+ public Executable getExecutable() {
+ // Using a single method to test the
+ // WhiteBox.isIntrinsicAvailable(Executable method, int compLevel)
+ // call for the compilation level corresponding to both the C1 and C2
+ // compiler keeps the current test simple.
+ //
+ // The tested method is java.util.zip.CRC32.update(int, int) because
+ // both C1 and C2 define an intrinsic for the method and
+ // the UseCRC32Intrinsics flag can be used to enable/disable
+ // intrinsification of the method in both product and fastdebug
+ // builds.
+ try {
+ return Class.forName("java.util.zip.CRC32").getDeclaredMethod("update", int.class, int.class);
+ } catch (NoSuchMethodException e) {
+ throw new RuntimeException("Test bug, method unavailable. " + e);
+ } catch (ClassNotFoundException e) {
+ throw new RuntimeException("Test bug, class unavailable. " + e);
+ }
+ }
+
+ public Callable<Integer> getCallable() {
+ return null;
+ }
+
+ public boolean isOsr() {
+ return false;
+ }
+
+ }
+
+ protected void checkIntrinsicForCompilationLevel(Executable method, int compLevel) throws Exception {
+ boolean intrinsicEnabled = Boolean.valueOf(getVMOption("UseCRC32Intrinsics"));
+ boolean intrinsicAvailable = WHITE_BOX.isIntrinsicAvailable(method,
+ compLevel);
+
+ String intrinsicEnabledMessage = intrinsicEnabled ? "enabled" : "disabled";
+ String intrinsicAvailableMessage = intrinsicAvailable ? "available" : "not available";
+
+ if (intrinsicEnabled == intrinsicAvailable) {
+ System.out.println("Expected result: intrinsic for java.util.zip.CRC32.update() is " +
+ intrinsicEnabledMessage + " and intrinsic is " + intrinsicAvailableMessage +
+ " at compilation level " + compLevel);
+ } else {
+ throw new RuntimeException("Unexpected result: intrinsic for java.util.zip.CRC32.update() is " +
+ intrinsicEnabledMessage + " but intrinsic is " + intrinsicAvailableMessage +
+ " at compilation level " + compLevel);
+ }
+ }
+
+ protected boolean isServerVM() {
+ return VMName.toLowerCase().contains("server");
+ }
+
+ public void test() throws Exception {
+ Executable intrinsicMethod = testCase.getExecutable();
+ if (isServerVM()) {
+ if (TIERED_COMPILATION) {
+ checkIntrinsicForCompilationLevel(intrinsicMethod, COMP_LEVEL_SIMPLE);
+ }
+ checkIntrinsicForCompilationLevel(intrinsicMethod, COMP_LEVEL_FULL_OPTIMIZATION);
+ } else {
+ checkIntrinsicForCompilationLevel(intrinsicMethod, COMP_LEVEL_SIMPLE);
+ }
+ }
+
+ public static void main(String args[]) throws Exception {
+ new IntrinsicAvailableTest(new IntrinsicAvailableTestTestCase()).test();
+ }
+}
diff --git a/hotspot/test/compiler/intrinsics/mathexact/sanity/IntrinsicBase.java b/hotspot/test/compiler/intrinsics/mathexact/sanity/IntrinsicBase.java
index bdc39962f..a258140b1 100644
--- a/hotspot/test/compiler/intrinsics/mathexact/sanity/IntrinsicBase.java
+++ b/hotspot/test/compiler/intrinsics/mathexact/sanity/IntrinsicBase.java
@@ -67,7 +67,7 @@ public abstract class IntrinsicBase extends CompilerWhiteBoxTest {
compileAtLevel(CompilerWhiteBoxTest.COMP_LEVEL_SIMPLE);
}
- if (!isIntrinsicSupported()) {
+ if (!isIntrinsicAvailable()) {
expectedIntrinsicCount = 0;
}
break;
@@ -114,7 +114,11 @@ public abstract class IntrinsicBase extends CompilerWhiteBoxTest {
}
}
- protected abstract boolean isIntrinsicSupported();
+ // An intrinsic is available if:
+ // - the intrinsic is enabled (by using the appropriate command-line flag) and
+ // - the intrinsic is supported by the VM (i.e., the platform on which the VM is
+ // running provides the instructions necessary for the VM to generate the intrinsic).
+ protected abstract boolean isIntrinsicAvailable();
protected abstract String getIntrinsicId();
@@ -123,13 +127,20 @@ public abstract class IntrinsicBase extends CompilerWhiteBoxTest {
}
static class IntTest extends IntrinsicBase {
+
+ protected boolean isIntrinsicAvailable; // The tested intrinsic is available on the current platform.
+
protected IntTest(MathIntrinsic.IntIntrinsic testCase) {
super(testCase);
+ // Only the C2 compiler intrinsifies exact math methods
+ // so check if the intrinsics are available with C2.
+ isIntrinsicAvailable = WHITE_BOX.isIntrinsicAvailable(testCase.getTestMethod(),
+ COMP_LEVEL_FULL_OPTIMIZATION);
}
@Override
- protected boolean isIntrinsicSupported() {
- return isServerVM() && Boolean.valueOf(useMathExactIntrinsics) && (Platform.isX86() || Platform.isX64() || Platform.isAArch64());
+ protected boolean isIntrinsicAvailable() {
+ return isIntrinsicAvailable;
}
@Override
@@ -139,13 +150,20 @@ public abstract class IntrinsicBase extends CompilerWhiteBoxTest {
}
static class LongTest extends IntrinsicBase {
+
+ protected boolean isIntrinsicAvailable; // The tested intrinsic is available on the current platform.
+
protected LongTest(MathIntrinsic.LongIntrinsic testCase) {
super(testCase);
+ // Only the C2 compiler intrinsifies exact math methods
+ // so check if the intrinsics are available with C2.
+ isIntrinsicAvailable = WHITE_BOX.isIntrinsicAvailable(testCase.getTestMethod(),
+ COMP_LEVEL_FULL_OPTIMIZATION);
}
@Override
- protected boolean isIntrinsicSupported() {
- return isServerVM() && Boolean.valueOf(useMathExactIntrinsics) && (Platform.isX64() || Platform.isAArch64());
+ protected boolean isIntrinsicAvailable() {
+ return isIntrinsicAvailable;
}
@Override
diff --git a/hotspot/test/compiler/intrinsics/mathexact/sanity/MathIntrinsic.java b/hotspot/test/compiler/intrinsics/mathexact/sanity/MathIntrinsic.java
index 99039f9a7..b967bd4b0 100644
--- a/hotspot/test/compiler/intrinsics/mathexact/sanity/MathIntrinsic.java
+++ b/hotspot/test/compiler/intrinsics/mathexact/sanity/MathIntrinsic.java
@@ -28,36 +28,65 @@ public class MathIntrinsic {
enum IntIntrinsic implements CompilerWhiteBoxTest.TestCase {
Add {
+ @Override
+ Executable testMethod() throws NoSuchMethodException, ClassNotFoundException {
+ return Class.forName("java.lang.Math").getDeclaredMethod("addExact", int.class, int.class);
+ }
+
@Override
Object execMathMethod() {
return intR = Math.addExact(int1, int2);
}
},
Subtract {
+ @Override
+ Executable testMethod() throws NoSuchMethodException, ClassNotFoundException {
+ return Class.forName("java.lang.Math").getDeclaredMethod("subtractExact", int.class, int.class);
+ }
@Override
Object execMathMethod() {
return intR = Math.subtractExact(int1, int2);
}
},
Multiply {
+ @Override
+ Executable testMethod() throws NoSuchMethodException, ClassNotFoundException {
+ return Class.forName("java.lang.Math").getDeclaredMethod("multiplyExact", int.class, int.class);
+ }
+
@Override
Object execMathMethod() {
return intR = Math.multiplyExact(int1, int2);
}
},
Increment {
+ @Override
+ Executable testMethod() throws NoSuchMethodException, ClassNotFoundException {
+ return Class.forName("java.lang.Math").getDeclaredMethod("incrementExact", int.class);
+ }
+
@Override
Object execMathMethod() {
return intR = Math.incrementExact(int1);
}
},
Decrement {
+ @Override
+ Executable testMethod() throws NoSuchMethodException, ClassNotFoundException {
+ return Class.forName("java.lang.Math").getDeclaredMethod("decrementExact", int.class);
+ }
+
@Override
Object execMathMethod() {
return intR = Math.decrementExact(int1);
}
},
Negate {
+ @Override
+ Executable testMethod() throws NoSuchMethodException, ClassNotFoundException {
+ return Class.forName("java.lang.Math").getDeclaredMethod("negateExact", int.class);
+ }
+
@Override
Object execMathMethod() {
return intR = Math.negateExact(int1);
@@ -67,8 +96,19 @@ public class MathIntrinsic {
protected int int2;
protected int intR;
+ abstract Executable testMethod() throws NoSuchMethodException, ClassNotFoundException;
abstract Object execMathMethod();
+ public Executable getTestMethod() {
+ try {
+ return testMethod();
+ } catch (NoSuchMethodException e) {
+ throw new RuntimeException("Test bug, no such method: " + e);
+ } catch (ClassNotFoundException e) {
+ throw new RuntimeException("Test bug, no such class: " + e);
+ }
+ }
+
@Override
public Executable getExecutable() {
try {
@@ -92,36 +132,66 @@ public class MathIntrinsic {
enum LongIntrinsic implements CompilerWhiteBoxTest.TestCase {
Add {
+ @Override
+ Executable testMethod() throws NoSuchMethodException, ClassNotFoundException {
+ return Class.forName("java.lang.Math").getDeclaredMethod("addExact", long.class, long.class);
+ }
+
@Override
Object execMathMethod() {
return longR = Math.addExact(long1, long2);
}
},
Subtract {
+ @Override
+ Executable testMethod() throws NoSuchMethodException, ClassNotFoundException {
+ return Class.forName("java.lang.Math").getDeclaredMethod("subtractExact", long.class, long.class);
+ }
+
@Override
Object execMathMethod() {
return longR = Math.subtractExact(long1, long2);
}
},
Multiply {
+ @Override
+ Executable testMethod() throws NoSuchMethodException, ClassNotFoundException {
+ return Class.forName("java.lang.Math").getDeclaredMethod("multiplyExact", long.class, long.class);
+ }
+
@Override
Object execMathMethod() {
return longR = Math.multiplyExact(long1, long2);
}
},
Increment {
+ @Override
+ Executable testMethod() throws NoSuchMethodException, ClassNotFoundException {
+ return Class.forName("java.lang.Math").getDeclaredMethod("incrementExact", long.class);
+ }
+
@Override
Object execMathMethod() {
return longR = Math.incrementExact(long1);
}
},
Decrement {
+ @Override
+ Executable testMethod() throws NoSuchMethodException, ClassNotFoundException {
+ return Class.forName("java.lang.Math").getDeclaredMethod("decrementExact", long.class);
+ }
+
@Override
Object execMathMethod() {
return longR = Math.decrementExact(long1);
}
},
Negate {
+ @Override
+ Executable testMethod() throws NoSuchMethodException, ClassNotFoundException {
+ return Class.forName("java.lang.Math").getDeclaredMethod("negateExact", long.class);
+ }
+
@Override
Object execMathMethod() {
return longR = Math.negateExact(long1);
@@ -131,8 +201,19 @@ public class MathIntrinsic {
protected long long2;
protected long longR;
+ abstract Executable testMethod() throws NoSuchMethodException, ClassNotFoundException;
abstract Object execMathMethod();
+ public Executable getTestMethod() {
+ try {
+ return testMethod();
+ } catch (NoSuchMethodException e) {
+ throw new RuntimeException("Test bug, no such method: " + e);
+ } catch (ClassNotFoundException e) {
+ throw new RuntimeException("Test bug, no such class: " + e);
+ }
+ }
+
@Override
public Executable getExecutable() {
try {
--
2.22.0
马建仓 AI 助手
尝试更多
代码解读
代码找茬
代码优化
1
https://gitee.com/eulaceura/SPEC.java-1.8.0-openjdk.git
git@gitee.com:eulaceura/SPEC.java-1.8.0-openjdk.git
eulaceura
SPEC.java-1.8.0-openjdk
SPEC.java-1.8.0-openjdk
master

搜索帮助

344bd9b3 5694891 D2dac590 5694891