399 bool inline_vector_call(int arity);
400 bool inline_vector_frombits_coerced();
401 bool inline_vector_mask_operation();
402 bool inline_vector_mem_operation(bool is_store);
403 bool inline_vector_mem_masked_operation(bool is_store);
404 bool inline_vector_gather_scatter(bool is_scatter);
405 bool inline_vector_reduction();
406 bool inline_vector_test();
407 bool inline_vector_blend();
408 bool inline_vector_rearrange();
409 bool inline_vector_select_from();
410 bool inline_vector_compare();
411 bool inline_vector_broadcast_int();
412 bool inline_vector_convert();
413 bool inline_vector_extract();
414 bool inline_vector_insert();
415 bool inline_vector_compress_expand();
416 bool inline_index_vector();
417 bool inline_index_partially_in_upper_range();
418 bool inline_vector_select_from_two_vectors();
419
420 Node* gen_call_to_vector_math(int vector_api_op_id, BasicType bt, int num_elem, Node* opd1, Node* opd2);
421
422 enum VectorMaskUseType {
423 VecMaskUseLoad = 1 << 0,
424 VecMaskUseStore = 1 << 1,
425 VecMaskUseAll = VecMaskUseLoad | VecMaskUseStore,
426 VecMaskUsePred = 1 << 2,
427 VecMaskNotUsed = 1 << 3
428 };
429
430 bool arch_supports_vector(int op, int num_elem, BasicType type, VectorMaskUseType mask_use_type, bool has_scalar_args = false);
431 bool arch_supports_vector_rotate(int opc, int num_elem, BasicType elem_bt, VectorMaskUseType mask_use_type, bool has_scalar_args = false);
432
433 void clear_upper_avx() {
434 #ifdef X86
435 if (UseAVX >= 2) {
436 C->set_clear_upper_avx(true);
437 }
438 #endif
439 }
440
441 bool inline_getObjectSize();
442
443 bool inline_blackhole();
444 };
445
446 #endif // SHARE_OPTO_LIBRARY_CALL_HPP
|
399 bool inline_vector_call(int arity);
400 bool inline_vector_frombits_coerced();
401 bool inline_vector_mask_operation();
402 bool inline_vector_mem_operation(bool is_store);
403 bool inline_vector_mem_masked_operation(bool is_store);
404 bool inline_vector_gather_scatter(bool is_scatter);
405 bool inline_vector_reduction();
406 bool inline_vector_test();
407 bool inline_vector_blend();
408 bool inline_vector_rearrange();
409 bool inline_vector_select_from();
410 bool inline_vector_compare();
411 bool inline_vector_broadcast_int();
412 bool inline_vector_convert();
413 bool inline_vector_extract();
414 bool inline_vector_insert();
415 bool inline_vector_compress_expand();
416 bool inline_index_vector();
417 bool inline_index_partially_in_upper_range();
418 bool inline_vector_select_from_two_vectors();
419 bool inline_timestamp(bool serial);
420
421 Node* gen_call_to_vector_math(int vector_api_op_id, BasicType bt, int num_elem, Node* opd1, Node* opd2);
422
423 enum VectorMaskUseType {
424 VecMaskUseLoad = 1 << 0,
425 VecMaskUseStore = 1 << 1,
426 VecMaskUseAll = VecMaskUseLoad | VecMaskUseStore,
427 VecMaskUsePred = 1 << 2,
428 VecMaskNotUsed = 1 << 3
429 };
430
431 bool arch_supports_vector(int op, int num_elem, BasicType type, VectorMaskUseType mask_use_type, bool has_scalar_args = false);
432 bool arch_supports_vector_rotate(int opc, int num_elem, BasicType elem_bt, VectorMaskUseType mask_use_type, bool has_scalar_args = false);
433
434 void clear_upper_avx() {
435 #ifdef X86
436 if (UseAVX >= 2) {
437 C->set_clear_upper_avx(true);
438 }
439 #endif
440 }
441
442 bool inline_getObjectSize();
443
444 bool inline_blackhole();
445
446 bool inline_sizeOf();
447 bool inline_sizeOf_impl(Node* obj);
448
449 bool inline_addressOf();
450
451 };
452
453 #endif // SHARE_OPTO_LIBRARY_CALL_HPP
|