HVX_64             53 apps/HelloHexagon/pipeline.cpp             if (get_target().features_any_of({Target::HVX_64, Target::HVX_128})) {
HVX_64            181 apps/camera_pipe/camera_pipe_generator.cpp         if (get_target().has_feature(Target::HVX_64)) {
HVX_64            201 apps/camera_pipe/camera_pipe_generator.cpp         if (get_target().features_any_of({Target::HVX_64, Target::HVX_128})) {
HVX_64            250 apps/camera_pipe/camera_pipe_generator.cpp     if (get_target().features_any_of({Target::HVX_64, Target::HVX_128})) {
HVX_64            323 apps/camera_pipe/camera_pipe_generator.cpp     } else if (get_target().has_feature(Target::HVX_64)) {
HVX_64            331 apps/camera_pipe/camera_pipe_generator.cpp     if (get_target().has_feature(Target::HVX_64)) {
HVX_64            363 apps/camera_pipe/camera_pipe_generator.cpp     if (get_target().features_any_of({Target::HVX_64, Target::HVX_128})) {
HVX_64             35 apps/hexagon_benchmarks/conv3x3_generator.cpp         if (get_target().features_any_of({Target::HVX_64, Target::HVX_128})) {
HVX_64             28 apps/hexagon_benchmarks/dilate3x3_generator.cpp         if (get_target().features_any_of({Target::HVX_64, Target::HVX_128})) {
HVX_64             31 apps/hexagon_benchmarks/gaussian5x5_generator.cpp         if (get_target().features_any_of({Target::HVX_64, Target::HVX_128})) {
HVX_64             39 apps/hexagon_benchmarks/median3x3_generator.cpp         if (get_target().features_any_of({Target::HVX_64, Target::HVX_128})) {
HVX_64             33 apps/hexagon_benchmarks/sobel_generator.cpp         if (get_target().features_any_of({Target::HVX_64, Target::HVX_128})) {
HVX_64             55 apps/hexagon_matmul/pipeline.cpp             if (target.has_feature(Target::HVX_64)) {
HVX_64            202 src/CodeGen_C.cpp                 target.has_feature(Target::HVX_64)) {
HVX_64             82 src/CodeGen_Hexagon.cpp     if (module.target().features_all_of({Halide::Target::HVX_128, Halide::Target::HVX_64})) {
HVX_64           2037 src/Func.h         if (t.has_feature(Target::HVX_64) || t.has_feature(Target::HVX_128)) {
HVX_64            844 src/HexagonOffload.cpp         Target::HVX_64,
HVX_64            650 src/JITModule.cpp         one_gpu.set_feature(Target::HVX_64, false);
HVX_64            680 src/JITModule.cpp             one_gpu.set_feature(Target::HVX_64);
HVX_64            818 src/JITModule.cpp     if (target.features_any_of({Target::HVX_64, Target::HVX_128})) {
HVX_64            768 src/LLVM_Runtime_Linker.cpp                 if (t.has_feature(Target::HVX_64)) {
HVX_64            867 src/LLVM_Runtime_Linker.cpp         if (t.arch != Target::Hexagon && t.features_any_of({Target::HVX_64, Target::HVX_128})) {
HVX_64            226 src/Lower.cpp          (t.arch != Target::Hexagon && (t.features_any_of({Target::HVX_64, Target::HVX_128})))) {
HVX_64            388 src/Prefetch.cpp     if (t.features_any_of({Target::HVX_64, Target::HVX_128})) {
HVX_64            250 src/Target.cpp     {"hvx_64", Target::HVX_64},
HVX_64            508 src/Target.cpp     case DeviceAPI::Hexagon:     return has_feature(Target::HVX_64) || has_feature(Target::HVX_128);
HVX_64            237 src/Target.h                   } else if (has_feature(Halide::Target::HVX_64)) {
HVX_64            248 src/VectorizeLoops.cpp             internal_assert(target.features_any_of({Target::HVX_64, Target::HVX_128}))
HVX_64             56 test/correctness/bit_counting.cpp     if (t.features_any_of({Target::HVX_64, Target::HVX_128})) {
HVX_64             27 test/correctness/boundary_conditions.cpp     } else if (t.features_any_of({Target::HVX_64, Target::HVX_128})) {
HVX_64             26 test/correctness/bounds.cpp     } else if (target.features_any_of({Target::HVX_64, Target::HVX_128})) {
HVX_64             23 test/correctness/bounds_inference.cpp     } else if (target.features_any_of({Target::HVX_64, Target::HVX_128})) {
HVX_64             81 test/correctness/convolution.cpp     } else if (target.has_feature(Target::HVX_64) || target.has_feature(Target::HVX_128)) {
HVX_64             46 test/correctness/convolution_multiple_kernels.cpp     } else if (target.has_feature(Target::HVX_64)) {
HVX_64             33 test/correctness/dilate3x3.cpp     } else if (target.features_any_of({Target::HVX_64, Target::HVX_128})) {
HVX_64             34 test/correctness/func_lifetime.cpp     } else if (target.features_any_of({Target::HVX_64, Target::HVX_128})) {
HVX_64             57 test/correctness/func_lifetime.cpp         } else if (target.features_any_of({Target::HVX_64, Target::HVX_128})) {
HVX_64             38 test/correctness/func_lifetime_2.cpp         } else if (target.features_any_of({Target::HVX_64, Target::HVX_128})) {
HVX_64             57 test/correctness/func_lifetime_2.cpp         } else if (target.features_any_of({Target::HVX_64, Target::HVX_128})) {
HVX_64             37 test/correctness/gpu_data_flows.cpp         } else if (target.features_any_of({Target::HVX_64, Target::HVX_128})) {
HVX_64             71 test/correctness/gpu_data_flows.cpp         } else if (target.features_any_of({Target::HVX_64, Target::HVX_128})) {
HVX_64             47 test/correctness/gpu_non_contiguous_copy.cpp     } else if (target.features_any_of({Target::HVX_64, Target::HVX_128})) {
HVX_64             34 test/correctness/gpu_object_lifetime_1.cpp         } else if (target.features_any_of({Target::HVX_64, Target::HVX_128})) {
HVX_64             40 test/correctness/gpu_object_lifetime_2.cpp         } else if (target.features_any_of({Target::HVX_64, Target::HVX_128})) {
HVX_64             45 test/correctness/gpu_object_lifetime_3.cpp                 } else if (target.features_any_of({Target::HVX_64, Target::HVX_128})) {
HVX_64             26 test/correctness/interleave_rgb.cpp     } else if (target.features_any_of({Target::HVX_64, Target::HVX_128})) {
HVX_64             17 test/correctness/interleave_x.cpp     } else if (target.features_any_of({Target::HVX_64, Target::HVX_128})) {
HVX_64             47 test/correctness/leak_device_memory.cpp             } else if (target.features_any_of({Target::HVX_64, Target::HVX_128})) {
HVX_64             34 test/correctness/logical.cpp         } else if (target.features_any_of({Target::HVX_64, Target::HVX_128})) {
HVX_64             67 test/correctness/logical.cpp         } else if (target.features_any_of({Target::HVX_64, Target::HVX_128})) {
HVX_64             98 test/correctness/logical.cpp         } else if (target.features_any_of({Target::HVX_64, Target::HVX_128})) {
HVX_64            127 test/correctness/logical.cpp         } else if (target.features_any_of({Target::HVX_64, Target::HVX_128})) {
HVX_64            179 test/correctness/logical.cpp             } else if (target.features_any_of({Target::HVX_64, Target::HVX_128})) {
HVX_64             67 test/correctness/math.cpp         } else if (target.features_any_of({Target::HVX_64, Target::HVX_128})) { \
HVX_64             92 test/correctness/math.cpp         } else if (target.features_any_of({Target::HVX_64, Target::HVX_128})) {     \
HVX_64             50 test/correctness/median3x3.cpp     } else if (target.features_any_of({Target::HVX_64, Target::HVX_128})) {
HVX_64            550 test/correctness/mul_div_mod.cpp     } else if (target.features_any_of({Target::HVX_64, Target::HVX_128})) {
HVX_64            560 test/correctness/mul_div_mod.cpp     } else if (target.has_feature(Target::HVX_64)) {
HVX_64             30 test/correctness/param.cpp     } else if (target.features_any_of({Target::HVX_64, Target::HVX_128})) {
HVX_64            101 test/correctness/predicated_store_load.cpp     if (target.features_any_of({Target::HVX_64, Target::HVX_128})) {
HVX_64            130 test/correctness/predicated_store_load.cpp     if (target.features_any_of({Target::HVX_64, Target::HVX_128})) {
HVX_64            168 test/correctness/predicated_store_load.cpp     if (target.features_any_of({Target::HVX_64, Target::HVX_128})) {
HVX_64            201 test/correctness/predicated_store_load.cpp     if (target.features_any_of({Target::HVX_64, Target::HVX_128})) {
HVX_64            236 test/correctness/predicated_store_load.cpp     if (target.features_any_of({Target::HVX_64, Target::HVX_128})) {
HVX_64            271 test/correctness/predicated_store_load.cpp     if (target.features_any_of({Target::HVX_64, Target::HVX_128})) {
HVX_64            303 test/correctness/predicated_store_load.cpp     if (target.features_any_of({Target::HVX_64, Target::HVX_128})) {
HVX_64            338 test/correctness/predicated_store_load.cpp     if (target.features_any_of({Target::HVX_64, Target::HVX_128})) {
HVX_64           1445 test/correctness/simd_op_check.cpp         if (target.has_feature(Target::HVX_64)) {
HVX_64             23 test/correctness/tuple_reduction.cpp         } else if (target.features_any_of({Target::HVX_64, Target::HVX_128})) {
HVX_64             60 test/correctness/tuple_reduction.cpp         } else if (target.features_any_of({Target::HVX_64, Target::HVX_128})) {
HVX_64             67 test/correctness/tuple_reduction.cpp                 } else if (target.features_any_of({Target::HVX_64, Target::HVX_128})) {
HVX_64            110 test/correctness/tuple_reduction.cpp                 } else if (target.features_any_of({Target::HVX_64, Target::HVX_128})) {
HVX_64            156 test/correctness/tuple_reduction.cpp                 } else if (target.features_any_of({Target::HVX_64, Target::HVX_128})) {
HVX_64             30 test/correctness/vector_cast.cpp     if (target.features_any_of({Target::HVX_64, Target::HVX_128})) {
HVX_64             42 test/correctness/vector_cast.cpp     } else if (target.features_any_of({Target::HVX_64, Target::HVX_128})) {
HVX_64             75 test/correctness/vector_cast.cpp         if (target.features_any_of({Target::HVX_64, Target::HVX_128})) {
HVX_64             43 test/correctness/widening_reduction.cpp         } else if (target.features_any_of({Target::HVX_64, Target::HVX_128})) {
HVX_64             85 test/correctness/widening_reduction.cpp         } else if (target.features_any_of({Target::HVX_64, Target::HVX_128})) {