22 using namespace shogun;
24 #ifndef DOXYGEN_SHOULD_SKIP_THIS
25 struct GRADIENT_THREAD_PARAM
61 void CInferenceMethod::init()
80 int32_t num_importance_samples,
float64_t ridge_size)
113 scaled_kernel(i,i)+=ridge_size;
126 ASSERT(log_likelihood.
vlen==num_importance_samples);
127 ASSERT(log_likelihood.
vlen==log_pdf_prior.vlen);
131 sum[i]=log_likelihood[i]+log_pdf_prior[i]-log_pdf_post_approx[i];
162 for (
index_t i=0; i<num_deriv; i++)
166 GRADIENT_THREAD_PARAM thread_params;
168 thread_params.inf=
this;
169 thread_params.obj=node->data;
170 thread_params.param=node->key;
171 thread_params.grad=result;
172 thread_params.lock=&lock;
180 pthread_t* threads=SG_MALLOC(pthread_t, num_deriv);
181 GRADIENT_THREAD_PARAM* thread_params=SG_MALLOC(GRADIENT_THREAD_PARAM,
184 for (
index_t t=0; t<num_deriv; t++)
188 thread_params[t].inf=
this;
189 thread_params[t].obj=node->data;
190 thread_params[t].param=node->key;
191 thread_params[t].grad=result;
192 thread_params[t].lock=&lock;
195 (
void*)&thread_params[t]);
198 for (
index_t t=0; t<num_deriv; t++)
199 pthread_join(threads[t], NULL);
201 SG_FREE(thread_params);
211 GRADIENT_THREAD_PARAM* thread_param=(GRADIENT_THREAD_PARAM*)p;
217 CLock* lock=thread_param->lock;
219 REQUIRE(param,
"Parameter should not be NULL\n");
220 REQUIRE(obj,
"Object of the parameter should not be NULL\n");
239 else if (obj==inf->
m_mean)
246 SG_SERROR(
"Can't compute derivative of negative log marginal "
251 grad->
add(param, gradient);
267 "Number of training features must be greater than zero\n")
270 "Number of labels must be greater than zero\n")
272 "Number of training vectors must match number of labels, which is "
273 "%d, but number of training vectors is %d\n",