There you are this example compiled a ready to run. Also, it contains a small sample scene to test it. All the media used comes with the distribution of Irrlicht.
This code can be used freely for any kind of project. The only condition i ask in return is that you give some credit
Code: Select all
/**Simple postprocessing example, Blur / HDR-bloom postprocessing effect
By Santiago A. Navascues using the Irrlicht Engine
*/
/**What is "postprocessing"?
The post processing is a routine done after the main processing of the scene,
when the main scene has been already rendered.Normally, we would end here,
but with postprocessing things work different. The first and main diference
between the normal rendering and a postprocessed scene is that the main
rendering isn't done to the back buffer, but to a rendering target texture.
(RTT for short)
Then, this texture is used as the input for another rendering, using an
screenaligned quad that will serve us to re-render the scene. This
screen aligned quad has a material that mixes, blurs, decolorize...
In one word, process the input RTT in which our main scene is rendered
This material uses to be written in a shader file. And gives us as output
a "postprocessed" scene, if this scene is rendered to an RTT again, the
postprocessing can be repeated, and the postprocessing routines can then
be chained.
To avoid diferences between DirectX and OPEN GL, i will make a simple
postprocessing system that won't rely on any shaders. But all the Postpro-
cessing routines out there use the same basic system, that is, render to a
texture, and use that texture to render a screen aligned quad. The good
thing is that this can go to platforms which doesn't support shaders,
but that support irrlicht completely ;)
WARNING!: this code requires NPOT textures!
Also, i will make use of some tricks to simplify stuff, but the meat of it
is there
*/
/**
What this example covers:
-The Creation of a custom Scene Node, the ScreenQuad
-The usage of the 2D drawing routines to create an effect.
-The creation of simple effects using the materials
-Rendering to a texture
-The Rendering on demand of an ISceneNode
*/
#include <iostream>
#include <irrlicht.h>
#define _WIDTH_ 1024
#define _HEIGHT_ 768
using namespace irr;
using namespace std;
//Code taken kindly from the driverChoice.h :)
static irr::video::E_DRIVER_TYPE driverChoiceConsole(bool allDrivers=true)
{
const char* const names[] = {"NullDriver","Software Renderer","Burning's Video","Direct3D 8.1","Direct3D 9.0c","OpenGL 1.x/2.x/3.x"};
printf("Please select the driver you want:\n");
irr::u32 i=0;
for (i=irr::video::EDT_COUNT; i>0; --i)
{
if (allDrivers || (irr::IrrlichtDevice::isDriverSupported(irr::video::E_DRIVER_TYPE(i-1))))
printf(" (%c) %s\n", 'a'+irr::video::EDT_COUNT-i, names[i-1]);
}
char c;
std::cin >> c;
c = irr::video::EDT_COUNT+'a'-c;
for (i=irr::video::EDT_COUNT; i>0; --i)
{
if (!(allDrivers || (irr::IrrlichtDevice::isDriverSupported(irr::video::E_DRIVER_TYPE(i-1)))))
--c;
if ((char)i==c)
return irr::video::E_DRIVER_TYPE(i-1);
}
return irr::video::EDT_COUNT;
}
/**Class CScreenQuadSceneNode:
Our screen aligned quad. When this quad is rendered, it simply covers the
screen with its texture, and uses its material to render the effect
Any Scene node out there needs, at least, all the abstract methods of the
ISceneNode interface implemented in order to be useful.
These methods are 5, besides the appropriate constructors and destructors:
The "on register for rendering" method,
The "render" method,
The "material count request" return method,
The "material request" method,
And the "AABB request" method.
*/
class CScreenQuadSceneNode : public scene::ISceneNode{
core::aabbox3df aabb; //An axis aligned bounding box. Actually not needed.
video::SMaterial material; //The material used to render the Scene Node
video::S3DVertex2TCoords vertices[4]; //The vertices of the Scene Node.
//Normally we wouldn't need more
//than one set of UV coordinates.
//But if we are to use the builtin materials, this is necesary
public:
CScreenQuadSceneNode::CScreenQuadSceneNode(scene::ISceneNode* parent, scene::ISceneManager* mgr, s32 id)
:ISceneNode(parent,mgr,id)
{
f32 shiftX,shiftY;
core::dimension2d<u32> currentResolution;
/**Here we initialize the vertices of the screen Aligned quad*/
currentResolution = mgr->getVideoDriver()->getScreenSize();
aabb.reset(0,0,0);
shiftX = 0.5/currentResolution.Width; //This small shift is necesary to compensate the texture sampling bias
shiftY = 0.5/currentResolution.Height; //It avoids that our effect becomes too blurry.
vertices[0] = video::S3DVertex2TCoords(
-1.0f,-1.0f,0.0f,
0.0f,0.0f,-1.0f,
video::SColor(255,255,255,255),
shiftX,1+shiftY,
shiftX,1+shiftY);
vertices[1] = video::S3DVertex2TCoords(
1.0f,-1.0,0.0f,
0.0f,0.0f,-1.0f,
video::SColor(255,255,255,255),
1.0f+shiftX,1+shiftY,
1.0f+shiftX,1+shiftY);
vertices[2] = video::S3DVertex2TCoords(
-1.0f,1.0,0.0f,
0.0f,0.0f,-1.0f,
video::SColor(255,255,255,255),
shiftX,shiftY,
shiftX,shiftY);
vertices[3] = video::S3DVertex2TCoords(
1.0f,1.0f,0.0f,
0.0f,0.0f,-1.0f,
video::SColor(255,255,255,255),
1.0f+shiftX,shiftY,
1.0f+shiftX,shiftY);
/**Now we proceed to initialize the appropriate settings for the material we are going to use
We can alter these later, but for the time being, initializing then here will do no harm*/
material.Lighting = false; //No need for lighting.
material.MaterialType = video::EMT_LIGHTMAP_ADD; //This will add both first and second textures :)
material.BackfaceCulling=false; //not needed, but simplifies things
setAutomaticCulling(scene::EAC_OFF); //We don't need this scene
//node to be culled because we render it in screen space.
}
CScreenQuadSceneNode::~CScreenQuadSceneNode()
{
}
const core::aabbox3df& CScreenQuadSceneNode::getBoundingBox() const
{
return aabb;
}
void CScreenQuadSceneNode::OnRegisterSceneNode()
{
//This method is empty because it is best for us to render this scene node manually.
//So, it is never really rendered on its own, if we don't tell it to do so.
}
void CScreenQuadSceneNode::render()
{
video::IVideoDriver* drv = getSceneManager()->getVideoDriver();
core::matrix4 proj;
u16 indices[] = {0,1,2,3,1,2};
//A triangle list
drv->setMaterial(material);
drv->setTransform(video::ETS_PROJECTION, core::IdentityMatrix);
drv->setTransform(video::ETS_VIEW, core::IdentityMatrix);
drv->setTransform(video::ETS_WORLD, core::IdentityMatrix);
drv->drawIndexedTriangleList(&vertices[0],4,&indices[0],2);
}
u32 CScreenQuadSceneNode::getMaterialCount()
{
return 1; //There is only one material
}
video::SMaterial& CScreenQuadSceneNode::getMaterial(irr::u32 i)
{
return material;//We always return the same material, so there is no need for more.
}
};
/**Once we are done setting up the new Scene Node, it is time to start the engine*/
int main()
{
char c;
int effectType;
video::E_DRIVER_TYPE driverType = driverChoiceConsole(true);
/**Not that we couldn't chain the effects though...*/
cout<<"Which postproduction effect do you want?"<<endl
<<"a) motion Blur"<<endl
<<"b) Bloom/HDR"<<endl;
cin>>c;
c=toupper(c);
switch(c){
case 'A':
effectType=0;
break;
case 'B':
effectType=1;
break;
default:
effectType=1;
break;
}
SIrrlichtCreationParameters prm;
prm.DriverType = driverType;
prm.Bits = 32;
prm.WindowSize = core::dimension2du(_WIDTH_,_HEIGHT_);
//prm.Fullscreen = true;
IrrlichtDevice* dev = createDeviceEx(prm);
if(dev){
scene::ISceneManager* smgr = dev->getSceneManager();
video::IVideoDriver* drv = dev->getVideoDriver();
if(drv->queryFeature(video::EVDF_RENDER_TO_TARGET)){
if(drv->queryFeature(video::EVDF_RENDER_TO_TARGET)){
if(drv->queryFeature(video::EVDF_TEXTURE_NPOT)){
drv->setTextureCreationFlag(video::ETCF_ALWAYS_32_BIT,true);
drv->setFog(video::SColor(255,226,177,141),video::EFT_FOG_LINEAR,100,1000,0.01);
/**Important stuff, Rendering textures setup.*/
video::ITexture* mainTarget = drv->addRenderTargetTexture(prm.WindowSize,"mainTarget");
video::ITexture* rtt0;
video::ITexture* temp;
//These colors are needed to modify the colors of the RTT in order for them to mix adequately
//Using a shader, the mix is done within the shader code, but we are using just the
//fixed pipeline this time, so, we need some extra stuff. Normally, postprocessing is not
//this extended.
video::SColor colors[] =
{
video::SColor(255,96,96,96),
video::SColor(255,96,96,96),
video::SColor(255,96,96,96),
video::SColor(255,96,96,96)
};
video::SColor colors1[] =
{
video::SColor(255,224,224,224),
video::SColor(255,224,224,224),
video::SColor(255,224,224,224),
video::SColor(255,224,224,224),
};
video::SColor colors2[] =
{
video::SColor(255,32,32,32),
video::SColor(255,32,32,32),
video::SColor(255,32,32,32),
video::SColor(255,32,32,32)
};
switch(effectType){
case 0://motion blur
rtt0 = drv->addRenderTargetTexture(prm.WindowSize,"rtt0");
temp = drv->addRenderTargetTexture(prm.WindowSize,"temp");//Mantiene temporalmente un resultado.
break;
case 1://HDR-bloom effect, we only need one texture for this :)
rtt0 = drv->addRenderTargetTexture(core::dimension2du(32,32),"rtt0");
break;
}
CScreenQuadSceneNode* screenQuad = new CScreenQuadSceneNode(smgr->getRootSceneNode(),smgr,10);
screenQuad->getMaterial(0).setTexture(0,mainTarget);
screenQuad->getMaterial(0).setTexture(1,rtt0);
smgr->addCameraSceneNodeFPS();
smgr->loadScene("postPro.irr");
int lastFPS = -1;
while(dev->run()){
drv->beginScene();
switch(effectType){
case 0:
/**motionBlur.
We have the previous rendered frame stored, so, we render a new frame, and blend it
with the previous, the effect will look like things have a motion trail, and after
the scene is rendered, we draw it again to the first texture in order to repeat the
process*/
drv->setRenderTarget(rtt0,true,true);
drv->draw2DImage(temp,core::rect<s32>(0,0,_WIDTH_,_HEIGHT_),core::rect<s32>(0,0,_WIDTH_,_HEIGHT_),
0,colors1);//Scale the colors of the previous render
drv->setRenderTarget(mainTarget,true,true,video::SColor(255,128,160,160));
smgr->drawAll();//Draw the main scene
drv->setRenderTarget(temp,true,true);
drv->draw2DImage(mainTarget,core::rect<s32>(0,0,_WIDTH_,_HEIGHT_),core::rect<s32>(0,0,_WIDTH_,_HEIGHT_),
0,colors2);//Scale the colors of the main Scene
drv->setRenderTarget(mainTarget,true,true);
drv->draw2DImage(temp,core::rect<s32>(0,0,_WIDTH_,_HEIGHT_),core::rect<s32>(0,0,_WIDTH_,_HEIGHT_));
//Return TEMP to the mainTarget
drv->setRenderTarget(temp,true,true);
screenQuad->render();//Draw the screenquad into temp
drv->setRenderTarget(video::ERT_FRAME_BUFFER,true,true);
drv->draw2DImage(temp,core::position2di(0,0));//Draw the resulting image
break;
case 1:
/**Bloom.
We render the scene to the main RTT, and then, we scale and darken the RTT in order
to apply it to the screen quad, we are using inmediate values because of simplicity*/
drv->setRenderTarget(mainTarget,true,true,video::SColor(255,128,160,160));
smgr->drawAll();
drv->setRenderTarget(rtt0,true,true,video::SColor(0,0,0,0));
drv->draw2DImage(mainTarget,core::rect<s32>(0,0,32,32),core::rect<s32>(0,0,_WIDTH_,_HEIGHT_),
0,colors
);
drv->setRenderTarget(video::ERT_FRAME_BUFFER,true,true);
screenQuad->render();
break;
}
drv->endScene();
int fps = drv->getFPS();
if (lastFPS != fps)
{
core::stringw str = L"Postproduction Example - Irrlicht Engine [";
str += drv->getName();
str += "] FPS:";
str += fps;
dev->setWindowCaption(str.c_str());
lastFPS = fps;
}
}
}else{
cout<<"This driver doesn't support Non power of two textures!."<<endl<<"This sample works only with support for Non power of two rendering textures"<<endl;
}
}else{
cout<<"This driver doesn't support the render to textures!!."<<endl<<"Rendering to textures is required"<<endl;
}
}else{
cout<<"This driver doesn't support Multitextures!!!."<<endl<<"Multitexturing is required"<<endl;
}
}
return 0;
}
Motion Blur
Bloom (it is not very noticeable, though...)