Functions
ML::experimental::fil::detail::inference Namespace Reference

Functions

template<raft_proto::device_type D, bool has_categorical_nodes, typename forest_t , typename vector_output_t = std::nullptr_t, typename categorical_data_t = std::nullptr_t>
std::enable_if_t< std::disjunction_v< std::bool_constant< D==raft_proto::device_type::cpu >, std::bool_constant<!raft_proto::GPU_ENABLED > >, void > infer (forest_t const &forest, postprocessor< typename forest_t::io_type > const &postproc, typename forest_t::io_type *output, typename forest_t::io_type *input, index_type row_count, index_type col_count, index_type output_count, vector_output_t vector_output=nullptr, categorical_data_t categorical_data=nullptr, infer_kind infer_type=infer_kind::default_kind, std::optional< index_type > specified_chunk_size=std::nullopt, raft_proto::device_id< D > device=raft_proto::device_id< D >{}, raft_proto::cuda_stream=raft_proto::cuda_stream{})
 
template<raft_proto::device_type D, bool has_categorical_nodes, typename forest_t , typename vector_output_t = std::nullptr_t, typename categorical_data_t = std::nullptr_t>
std::enable_if_t< D==raft_proto::device_type::gpu, void > infer (forest_t const &forest, postprocessor< typename forest_t::io_type > const &postproc, typename forest_t::io_type *output, typename forest_t::io_type *input, index_type row_count, index_type col_count, index_type class_count, vector_output_t vector_output=nullptr, categorical_data_t categorical_data=nullptr, infer_kind infer_type=infer_kind::default_kind, std::optional< index_type > specified_chunk_size=std::nullopt, raft_proto::device_id< D > device=raft_proto::device_id< D >{}, raft_proto::cuda_stream stream=raft_proto::cuda_stream{})
 

Function Documentation

◆ infer() [1/2]

template<raft_proto::device_type D, bool has_categorical_nodes, typename forest_t , typename vector_output_t = std::nullptr_t, typename categorical_data_t = std::nullptr_t>
std::enable_if_t<D == raft_proto::device_type::gpu, void> ML::experimental::fil::detail::inference::infer ( forest_t const &  forest,
postprocessor< typename forest_t::io_type > const &  postproc,
typename forest_t::io_type *  output,
typename forest_t::io_type *  input,
index_type  row_count,
index_type  col_count,
index_type  class_count,
vector_output_t  vector_output = nullptr,
categorical_data_t  categorical_data = nullptr,
infer_kind  infer_type = infer_kind::default_kind,
std::optional< index_type specified_chunk_size = std::nullopt,
raft_proto::device_id< D >  device = raft_proto::device_id< D >{},
raft_proto::cuda_stream  stream = raft_proto::cuda_stream{} 
)

◆ infer() [2/2]

template<raft_proto::device_type D, bool has_categorical_nodes, typename forest_t , typename vector_output_t = std::nullptr_t, typename categorical_data_t = std::nullptr_t>
std::enable_if_t<std::disjunction_v<std::bool_constant<D == raft_proto::device_type::cpu>, std::bool_constant<!raft_proto::GPU_ENABLED> >, void> ML::experimental::fil::detail::inference::infer ( forest_t const &  forest,
postprocessor< typename forest_t::io_type > const &  postproc,
typename forest_t::io_type *  output,
typename forest_t::io_type *  input,
index_type  row_count,
index_type  col_count,
index_type  output_count,
vector_output_t  vector_output = nullptr,
categorical_data_t  categorical_data = nullptr,
infer_kind  infer_type = infer_kind::default_kind,
std::optional< index_type specified_chunk_size = std::nullopt,
raft_proto::device_id< D >  device = raft_proto::device_id<D>{},
raft_proto::cuda_stream  stream = raft_proto::cuda_stream{} 
)