Remove the VPM denoiser.

The VPM denoiser give bad results, is slow and has not been used in
practice. Instead we use the VP8 denoiser. Testing this denoiser takes
up a lot of runtime on linux_memcheck (about 4 minutes) which we can do
without.

BUG=
R=stefan@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/16069004

git-svn-id: http://webrtc.googlecode.com/svn/trunk@6688 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
pbos@webrtc.org 2014-07-15 09:50:40 +00:00
parent 2adc51c86e
commit bc73871251
20 changed files with 11 additions and 456 deletions

View File

@ -249,7 +249,6 @@
'video_processing/main/test/unit_test/color_enhancement_test.cc',
'video_processing/main/test/unit_test/content_metrics_test.cc',
'video_processing/main/test/unit_test/deflickering_test.cc',
'video_processing/main/test/unit_test/denoising_test.cc',
'video_processing/main/test/unit_test/video_processing_unittest.cc',
'video_processing/main/test/unit_test/video_processing_unittest.h',
],

View File

@ -25,8 +25,6 @@ source_set("video_processing") {
"main/source/content_analysis.h",
"main/source/deflickering.cc",
"main/source/deflickering.h",
"main/source/denoising.cc",
"main/source/denoising.h",
"main/source/frame_preprocessor.cc",
"main/source/frame_preprocessor.h",
"main/source/spatial_resampler.cc",

View File

@ -1 +1,6 @@
stefan@webrtc.org
mikhal@webrtc.org
marpan@webrtc.org
henrik.lundin@webrtc.org
per-file BUILD.gn=kjellander@webrtc.org

View File

@ -1,4 +0,0 @@
stefan@webrtc.org
mikhal@webrtc.org
marpan@webrtc.org
henrik.lundin@webrtc.org

View File

@ -176,17 +176,6 @@ class VideoProcessingModule : public Module {
*/
virtual int32_t Deflickering(I420VideoFrame* frame, FrameStats* stats) = 0;
/**
Denoises a video frame. Every frame from the stream should be passed in.
Has a fixed-point implementation.
\param[in,out] frame
Pointer to the video frame.
\return The number of modified pixels on success, -1 on failure.
*/
virtual int32_t Denoising(I420VideoFrame* frame) = 0;
/**
Detects if a video frame is excessively bright or dark. Returns a
warning if this is the case. Multiple frames should be passed in before

View File

@ -23,7 +23,6 @@ LOCAL_SRC_FILES := \
color_enhancement.cc \
content_analysis.cc \
deflickering.cc \
denoising.cc \
frame_preprocessor.cc \
spatial_resampler.cc \
video_decimator.cc \

View File

@ -1,156 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/modules/video_processing/main/source/denoising.h"
#include <string.h>
namespace webrtc {
// Down-sampling in time (unit: number of frames)
enum { kSubsamplingTime = 0 };
// Sub-sampling in width (unit: power of 2.
enum { kSubsamplingWidth = 0 };
// Sub-sampling in height (unit: power of 2)
enum { kSubsamplingHeight = 0 };
// (Q8) De-noising filter parameter
enum { kDenoiseFiltParam = 179 };
// (Q8) 1 - filter parameter
enum { kDenoiseFiltParamRec = 77 };
// (Q8) De-noising threshold level
enum { kDenoiseThreshold = 19200 };
VPMDenoising::VPMDenoising()
: id_(0),
moment1_(NULL),
moment2_(NULL) {
Reset();
}
VPMDenoising::~VPMDenoising() {
if (moment1_) {
delete [] moment1_;
moment1_ = NULL;
}
if (moment2_) {
delete [] moment2_;
moment2_ = NULL;
}
}
int32_t VPMDenoising::ChangeUniqueId(const int32_t id) {
id_ = id;
return VPM_OK;
}
void VPMDenoising::Reset() {
frame_size_ = 0;
denoise_frame_cnt_ = 0;
if (moment1_) {
delete [] moment1_;
moment1_ = NULL;
}
if (moment2_) {
delete [] moment2_;
moment2_ = NULL;
}
}
int32_t VPMDenoising::ProcessFrame(I420VideoFrame* frame) {
assert(frame);
int32_t thevar;
int k;
int jsub, ksub;
int32_t diff0;
uint32_t tmp_moment1;
uint32_t tmp_moment2;
uint32_t tmp;
int32_t num_pixels_changed = 0;
if (frame->IsZeroSize()) {
return VPM_GENERAL_ERROR;
}
int width = frame->width();
int height = frame->height();
/* Size of luminance component */
const uint32_t y_size = height * width;
/* Initialization */
if (y_size != frame_size_) {
delete [] moment1_;
moment1_ = NULL;
delete [] moment2_;
moment2_ = NULL;
}
frame_size_ = y_size;
if (!moment1_) {
moment1_ = new uint32_t[y_size];
memset(moment1_, 0, sizeof(uint32_t)*y_size);
}
if (!moment2_) {
moment2_ = new uint32_t[y_size];
memset(moment2_, 0, sizeof(uint32_t)*y_size);
}
/* Apply de-noising on each pixel, but update variance sub-sampled */
uint8_t* buffer = frame->buffer(kYPlane);
for (int i = 0; i < height; i++) { // Collect over height
k = i * width;
ksub = ((i >> kSubsamplingHeight) << kSubsamplingHeight) * width;
for (int j = 0; j < width; j++) { // Collect over width
jsub = ((j >> kSubsamplingWidth) << kSubsamplingWidth);
/* Update mean value for every pixel and every frame */
tmp_moment1 = moment1_[k + j];
tmp_moment1 *= kDenoiseFiltParam; // Q16
tmp_moment1 += ((kDenoiseFiltParamRec * ((uint32_t)buffer[k + j])) << 8);
tmp_moment1 >>= 8; // Q8
moment1_[k + j] = tmp_moment1;
tmp_moment2 = moment2_[ksub + jsub];
if ((ksub == k) && (jsub == j) && (denoise_frame_cnt_ == 0)) {
tmp = ((uint32_t)buffer[k + j] *
(uint32_t)buffer[k + j]);
tmp_moment2 *= kDenoiseFiltParam; // Q16
tmp_moment2 += ((kDenoiseFiltParamRec * tmp) << 8);
tmp_moment2 >>= 8; // Q8
}
moment2_[k + j] = tmp_moment2;
/* Current event = deviation from mean value */
diff0 = ((int32_t)buffer[k + j] << 8) - moment1_[k + j];
/* Recent events = variance (variations over time) */
thevar = moment2_[k + j];
thevar -= ((moment1_[k + j] * moment1_[k + j]) >> 8);
// De-noising criteria, i.e., when should we replace a pixel by its mean.
// 1) recent events are minor.
// 2) current events are minor.
if ((thevar < kDenoiseThreshold)
&& ((diff0 * diff0 >> 8) < kDenoiseThreshold)) {
// Replace with mean.
buffer[k + j] = (uint8_t)(moment1_[k + j] >> 8);
num_pixels_changed++;
}
}
}
denoise_frame_cnt_++;
if (denoise_frame_cnt_ > kSubsamplingTime)
denoise_frame_cnt_ = 0;
return num_pixels_changed;
}
} // namespace

View File

@ -1,42 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_PROCESSING_MAIN_SOURCE_DENOISING_H_
#define WEBRTC_MODULES_VIDEO_PROCESSING_MAIN_SOURCE_DENOISING_H_
#include "webrtc/modules/video_processing/main/interface/video_processing.h"
#include "webrtc/typedefs.h"
namespace webrtc {
class VPMDenoising {
public:
VPMDenoising();
~VPMDenoising();
int32_t ChangeUniqueId(int32_t id);
void Reset();
int32_t ProcessFrame(I420VideoFrame* frame);
private:
int32_t id_;
uint32_t* moment1_; // (Q8) First order moment (mean).
uint32_t* moment2_; // (Q8) Second order moment.
uint32_t frame_size_; // Size (# of pixels) of frame.
int denoise_frame_cnt_; // Counter for subsampling in time.
};
} // namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_PROCESSING_MAIN_SOURCE_DENOISING_H_

View File

@ -31,8 +31,6 @@
'content_analysis.h',
'deflickering.cc',
'deflickering.h',
'denoising.cc',
'denoising.h',
'frame_preprocessor.cc',
'frame_preprocessor.h',
'spatial_resampler.cc',

View File

@ -51,7 +51,6 @@ int32_t VideoProcessingModuleImpl::ChangeUniqueId(const int32_t id) {
id_ = id;
brightness_detection_.ChangeUniqueId(id);
deflickering_.ChangeUniqueId(id);
denoising_.ChangeUniqueId(id);
frame_pre_processor_.ChangeUniqueId(id);
return VPM_OK;
}
@ -66,7 +65,6 @@ VideoProcessingModuleImpl::VideoProcessingModuleImpl(const int32_t id)
mutex_(*CriticalSectionWrapper::CreateCriticalSection()) {
brightness_detection_.ChangeUniqueId(id);
deflickering_.ChangeUniqueId(id);
denoising_.ChangeUniqueId(id);
frame_pre_processor_.ChangeUniqueId(id);
}
@ -77,7 +75,6 @@ VideoProcessingModuleImpl::~VideoProcessingModuleImpl() {
void VideoProcessingModuleImpl::Reset() {
CriticalSectionScoped mutex(&mutex_);
deflickering_.Reset();
denoising_.Reset();
brightness_detection_.Reset();
frame_pre_processor_.Reset();
}
@ -146,11 +143,6 @@ int32_t VideoProcessingModuleImpl::Deflickering(I420VideoFrame* frame,
return deflickering_.ProcessFrame(frame, stats);
}
int32_t VideoProcessingModuleImpl::Denoising(I420VideoFrame* frame) {
CriticalSectionScoped mutex(&mutex_);
return denoising_.ProcessFrame(frame);
}
int32_t VideoProcessingModuleImpl::BrightnessDetection(
const I420VideoFrame& frame,
const FrameStats& stats) {

View File

@ -16,7 +16,6 @@
#include "webrtc/modules/video_processing/main/source/brightness_detection.h"
#include "webrtc/modules/video_processing/main/source/color_enhancement.h"
#include "webrtc/modules/video_processing/main/source/deflickering.h"
#include "webrtc/modules/video_processing/main/source/denoising.h"
#include "webrtc/modules/video_processing/main/source/frame_preprocessor.h"
namespace webrtc {
@ -36,8 +35,6 @@ class VideoProcessingModuleImpl : public VideoProcessingModule {
virtual int32_t Deflickering(I420VideoFrame* frame, FrameStats* stats);
virtual int32_t Denoising(I420VideoFrame* frame);
virtual int32_t BrightnessDetection(const I420VideoFrame& frame,
const FrameStats& stats);
@ -74,7 +71,6 @@ class VideoProcessingModuleImpl : public VideoProcessingModule {
int32_t id_;
CriticalSectionWrapper& mutex_;
VPMDeflickering deflickering_;
VPMDenoising denoising_;
VPMBrightnessDetection brightness_detection_;
VPMFramePreprocessor frame_pre_processor_;
};

View File

@ -1,136 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <stdio.h>
#include <stdlib.h>
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/modules/video_processing/main/interface/video_processing.h"
#include "webrtc/modules/video_processing/main/test/unit_test/video_processing_unittest.h"
#include "webrtc/system_wrappers/interface/tick_util.h"
#include "webrtc/test/testsupport/fileutils.h"
#include "webrtc/test/testsupport/gtest_disable.h"
namespace webrtc {
TEST_F(VideoProcessingModuleTest, DISABLED_ON_ANDROID(Denoising))
{
enum { NumRuns = 10 };
uint32_t frameNum = 0;
int64_t min_runtime = 0;
int64_t avg_runtime = 0;
const std::string denoise_filename =
webrtc::test::OutputPath() + "denoise_testfile.yuv";
FILE* denoiseFile = fopen(denoise_filename.c_str(), "wb");
ASSERT_TRUE(denoiseFile != NULL) <<
"Could not open output file: " << denoise_filename << "\n";
const std::string noise_filename =
webrtc::test::OutputPath() + "noise_testfile.yuv";
FILE* noiseFile = fopen(noise_filename.c_str(), "wb");
ASSERT_TRUE(noiseFile != NULL) <<
"Could not open noisy file: " << noise_filename << "\n";
printf("\nRun time [us / frame]:\n");
for (uint32_t run_idx = 0; run_idx < NumRuns; run_idx++)
{
TickTime t0;
TickTime t1;
TickInterval acc_ticks;
int32_t modifiedPixels = 0;
frameNum = 0;
scoped_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
while (fread(video_buffer.get(), 1, frame_length_, source_file_) ==
frame_length_)
{
EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0,
width_, height_,
0, kRotateNone, &video_frame_));
frameNum++;
uint8_t* sourceBuffer = video_frame_.buffer(kYPlane);
// Add noise to a part in video stream
// Random noise
// TODO: investigate the effectiveness of this test.
for (int ir = 0; ir < height_; ir++)
{
uint32_t ik = ir * width_;
for (int ic = 0; ic < width_; ic++)
{
uint8_t r = rand() % 16;
r -= 8;
if (ir < height_ / 4)
r = 0;
if (ir >= 3 * height_ / 4)
r = 0;
if (ic < width_ / 4)
r = 0;
if (ic >= 3 * width_ / 4)
r = 0;
/*uint8_t pixelValue = 0;
if (ir >= height_ / 2)
{ // Region 3 or 4
pixelValue = 170;
}
if (ic >= width_ / 2)
{ // Region 2 or 4
pixelValue += 85;
}
pixelValue += r;
sourceBuffer[ik + ic] = pixelValue;
*/
sourceBuffer[ik + ic] += r;
}
}
if (run_idx == 0)
{
if (PrintI420VideoFrame(video_frame_, noiseFile) < 0) {
return;
}
}
t0 = TickTime::Now();
ASSERT_GE(modifiedPixels = vpm_->Denoising(&video_frame_), 0);
t1 = TickTime::Now();
acc_ticks += (t1 - t0);
if (run_idx == 0)
{
if (PrintI420VideoFrame(video_frame_, noiseFile) < 0) {
return;
}
}
}
ASSERT_NE(0, feof(source_file_)) << "Error reading source file";
printf("%u\n", static_cast<int>(acc_ticks.Microseconds() / frameNum));
if (acc_ticks.Microseconds() < min_runtime || run_idx == 0)
{
min_runtime = acc_ticks.Microseconds();
}
avg_runtime += acc_ticks.Microseconds();
rewind(source_file_);
}
ASSERT_EQ(0, fclose(denoiseFile));
ASSERT_EQ(0, fclose(noiseFile));
printf("\nAverage run time = %d us / frame\n",
static_cast<int>(avg_runtime / frameNum / NumRuns));
printf("Min run time = %d us / frame\n\n",
static_cast<int>(min_runtime / frameNum));
}
} // namespace webrtc

View File

@ -82,8 +82,6 @@ TEST_F(VideoProcessingModuleTest, HandleNullBuffer) {
EXPECT_EQ(-1, vpm_->Deflickering(&videoFrame, &stats));
EXPECT_EQ(-1, vpm_->Denoising(&videoFrame));
EXPECT_EQ(-3, vpm_->BrightnessDetection(videoFrame, stats));
}
@ -113,8 +111,6 @@ TEST_F(VideoProcessingModuleTest, HandleBadSize) {
EXPECT_EQ(-1, vpm_->Deflickering(&video_frame_, &stats));
EXPECT_EQ(-1, vpm_->Denoising(&video_frame_));
EXPECT_EQ(-3, vpm_->BrightnessDetection(video_frame_, stats));
EXPECT_EQ(VPM_PARAMETER_ERROR, vpm_->SetTargetResolution(0,0,0));
@ -143,19 +139,6 @@ TEST_F(VideoProcessingModuleTest, IdenticalResultsAfterReset) {
ASSERT_EQ(0, vpm_->Deflickering(&video_frame2, &stats));
EXPECT_TRUE(CompareFrames(video_frame_, video_frame2));
ASSERT_EQ(frame_length_, fread(video_buffer.get(), 1, frame_length_,
source_file_));
// Using ConvertToI420 to add stride to the image.
EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0,
width_, height_,
0, kRotateNone, &video_frame_));
video_frame2.CopyFrame(video_frame_);
EXPECT_TRUE(CompareFrames(video_frame_, video_frame2));
ASSERT_GE(vpm_->Denoising(&video_frame_), 0);
vpm_->Reset();
ASSERT_GE(vpm_->Denoising(&video_frame2), 0);
EXPECT_TRUE(CompareFrames(video_frame_, video_frame2));
ASSERT_EQ(frame_length_, fread(video_buffer.get(), 1, frame_length_,
source_file_));
EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0,

View File

@ -103,8 +103,8 @@ enum ViEErrors {
kViEImageProcessInvalidCaptureId, // No capture device exist with the provided capture id.
kViEImageProcessFilterExists, // RegisterCaptureEffectFilter,RegisterSendEffectFilter,RegisterRenderEffectFilter - Effect filter already registered.
kViEImageProcessFilterDoesNotExist, // DeRegisterCaptureEffectFilter,DeRegisterSendEffectFilter,DeRegisterRenderEffectFilter - Effect filter not registered.
kViEImageProcessAlreadyEnabled, // EnableDeflickering,EnableDenoising,EnableColorEnhancement- Function already enabled.
kViEImageProcessAlreadyDisabled, // EnableDeflickering,EnableDenoising,EnableColorEnhancement- Function already disabled.
kViEImageProcessAlreadyEnabled, // EnableDeflickering,EnableColorEnhancement- Function already enabled.
kViEImageProcessAlreadyDisabled, // EnableDeflickering,EnableColorEnhancement- Function already disabled.
kViEImageProcessUnknownError // An unknown error has occurred. Check the log file.
};

View File

@ -11,7 +11,6 @@
// This sub-API supports the following functionalities:
// - Effect filters
// - Deflickering
// - Denoising
// - Color enhancement
#ifndef WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_IMAGE_PROCESS_H_
@ -85,9 +84,10 @@ class WEBRTC_DLLEXPORT ViEImageProcess {
// not all of them succeed. Enabling this function will remove the flicker.
virtual int EnableDeflickering(const int capture_id, const bool enable) = 0;
// Some cameras produce very noisy captured images, especially in lowlight
// conditions. This functionality will reduce the camera noise.
virtual int EnableDenoising(const int capture_id, const bool enable) = 0;
// TODO(pbos): Remove this function when removed from fakewebrtcvideoengine.h.
virtual int EnableDenoising(const int capture_id, const bool enable) {
return -1;
}
// This function enhances the colors on the decoded video stream, enabled by
// default.

View File

@ -197,20 +197,6 @@ void ViEAutoTest::ViEImageProcessAPITest()
EXPECT_NE(0, ViE.image_process->RegisterSendEffectFilter(
tbCapture.captureId, effectFilter));
//
// Denoising
//
EXPECT_EQ(0, ViE.image_process->EnableDenoising(tbCapture.captureId, true));
// If the denoising is already enabled, it will just reuturn 0.
EXPECT_EQ(0, ViE.image_process->EnableDenoising(tbCapture.captureId, true));
EXPECT_EQ(0, ViE.image_process->EnableDenoising(
tbCapture.captureId, false));
// If the denoising is already disabled, it will just reuturn 0.
EXPECT_EQ(0, ViE.image_process->EnableDenoising(
tbCapture.captureId, false));
EXPECT_NE(0, ViE.image_process->EnableDenoising(
tbChannel.videoChannel, true));
//
// Deflickering
//

View File

@ -56,7 +56,6 @@ ViECapturer::ViECapturer(int capture_id,
brightness_frame_stats_(NULL),
current_brightness_level_(Normal),
reported_brightness_level_(Normal),
denoising_enabled_(false),
observer_cs_(CriticalSectionWrapper::CreateCriticalSection()),
observer_(NULL),
overuse_detector_(new OveruseFrameDetector(Clock::GetRealTimeClock())) {
@ -404,28 +403,6 @@ int32_t ViECapturer::DecImageProcRefCount() {
return 0;
}
int32_t ViECapturer::EnableDenoising(bool enable) {
CriticalSectionScoped cs(deliver_cs_.get());
if (enable) {
if (denoising_enabled_) {
// Already enabled, nothing need to be done.
return 0;
}
denoising_enabled_ = true;
if (IncImageProcRefCount() != 0) {
return -1;
}
} else {
if (denoising_enabled_ == false) {
// Already disabled, nothing need to be done.
return 0;
}
denoising_enabled_ = false;
DecImageProcRefCount();
}
return 0;
}
int32_t ViECapturer::EnableDeflickering(bool enable) {
CriticalSectionScoped cs(deliver_cs_.get());
if (enable) {
@ -516,9 +493,6 @@ void ViECapturer::DeliverI420Frame(I420VideoFrame* video_frame) {
LOG_F(LS_ERROR) << "Could not get frame stats.";
}
}
if (denoising_enabled_) {
image_proc_module_->Denoising(video_frame);
}
if (brightness_frame_stats_) {
if (image_proc_module_->GetFrameStats(brightness_frame_stats_,
*video_frame) == 0) {

View File

@ -94,7 +94,6 @@ class ViECapturer
// Effect filter.
int32_t RegisterEffectFilter(ViEEffectFilter* effect_filter);
int32_t EnableDenoising(bool enable);
int32_t EnableDeflickering(bool enable);
int32_t EnableBrightnessAlarm(bool enable);
@ -180,7 +179,6 @@ class ViECapturer
VideoProcessingModule::FrameStats* brightness_frame_stats_;
Brightness current_brightness_level_;
Brightness reported_brightness_level_;
bool denoising_enabled_;
// Statistics observer.
scoped_ptr<CriticalSectionWrapper> observer_cs_;

View File

@ -182,29 +182,6 @@ int ViEImageProcessImpl::EnableDeflickering(const int capture_id,
return 0;
}
int ViEImageProcessImpl::EnableDenoising(const int capture_id,
const bool enable) {
LOG_F(LS_INFO) << "capture_id: " << capture_id
<< " enable: " << (enable ? "on" : "off");
ViEInputManagerScoped is(*(shared_data_->input_manager()));
ViECapturer* vie_capture = is.Capture(capture_id);
if (!vie_capture) {
shared_data_->SetLastError(kViEImageProcessInvalidCaptureId);
return -1;
}
if (vie_capture->EnableDenoising(enable) != 0) {
if (enable) {
shared_data_->SetLastError(kViEImageProcessAlreadyEnabled);
} else {
shared_data_->SetLastError(kViEImageProcessAlreadyDisabled);
}
return -1;
}
return 0;
}
int ViEImageProcessImpl::EnableColorEnhancement(const int video_channel,
const bool enable) {
LOG_F(LS_INFO) << "video_channel: " << video_channel

View File

@ -35,7 +35,6 @@ class ViEImageProcessImpl
ViEEffectFilter& render_filter);
virtual int DeregisterRenderEffectFilter(const int video_channel);
virtual int EnableDeflickering(const int capture_id, const bool enable);
virtual int EnableDenoising(const int capture_id, const bool enable);
virtual int EnableColorEnhancement(const int video_channel,
const bool enable);
virtual void RegisterPreEncodeCallback(