Share

Create a complex imager - Arnold Developer Guide

Object outline imager

In this example, we will create a slightly more complex Imager that draws an outline of pixels of a given color around the border of objects in the render. Please be sure to check out Create Simple Imager before trying this example.

In this example, we use the optional imager_prepare API to set up additional resources required in our imager_evaluate stage. First, we tweak the schedule type of the Imager to be AtImagerSchedule::FULL_FRAME. This changes the scheduling of the imager so that it receives the full frame of pixels of the render, rather than individual buckets of pixels. This is so that our algorithm can search neighbouring pixels. Secondly, we add the object AOV as an input so that we can access it with AiImagerAddInput() during imager_evaluate.

We use the object AOV at evaluation time to determine which objects lie in which pixels and thus, find the object edges.

Finally, we use the AiImagerAddOutput() API to create an additional output to our driver. We use this additional output as a place to write just our edge pixels.

See Creating a Simple Plugin for further information about compiling the imagers.

complex_imager.cpp

#include <ai.h>

AI_IMAGER_NODE_EXPORT_METHODS(ImagerMtd);

node_parameters
{
   AiParameterNode("filter", nullptr);
   AiParameterRGB("color", 0.f, 1.f, 0.f);
   AiParameterFlt("blend", 1.f);
   AiParameterUInt("radius", 3);
}

node_initialize
{
}

node_update
{
}

namespace
{
   static AtString filter_str("filter");
   static AtString rgba_str("RGBA");
   static AtString obj_str("object");
   static AtString closest_filter_str("closest_filter");
   static AtString color_str("color");
   static AtString blend_str("blend");
   static AtString radius_str("radius");
   static AtString edge_str("edge");
}

imager_prepare
{
   // Set the imager schedule type to full frame always as we need access to all
   // neighbouring pixels
   schedule = AtImagerSchedule::FULL_FRAME;
   AtNode* filter = (AtNode*)AiNodeGetPtr(node, filter_str);
   if (!filter)
      AiMsgError("No filter set!");
   if (AiNodeEntryGetNameAtString(AiNodeGetNodeEntry(filter)) != closest_filter_str)
      AiMsgError("Imager requires closest_filter!");
   // Request the object AOV to be accessible to the imager at evaluation time
   AiImagerAddInput(render_session, node, driver, AI_TYPE_NODE, obj_str, filter);
   // Also add another output for us to write just our outline to
   AiImagerAddOutput(render_session, node, driver, AI_TYPE_RGBA, edge_str);
}

imager_evaluate
{

   AtRGBA*  rgba = nullptr;
   AtRGBA*  edge = nullptr;
   AtNode** obj  = nullptr;
   int aov_type = 0;
   const void *bucket_data;
   AtString output_name;
   // Iterate over the outputs and grab pointers to the ones we need
   while (AiOutputIteratorGetNext(iterator, &output_name, &aov_type, &bucket_data))
   {
      if (output_name == rgba_str)
         rgba = (AtRGBA*)bucket_data;
      if (output_name == edge_str)
         edge = (AtRGBA*)bucket_data;
      if (output_name == obj_str)
         obj = (AtNode**)bucket_data;
   }

   if (!rgba || !obj)
   {
      AiMsgError("Failed to find all required outputs");
      return;
   }

   AtRGB color = AiNodeGetRGB(node, color_str);
   float blend = AiNodeGetFlt(node, blend_str);
   const int radius = AiNodeGetUInt(node, radius_str);
   const int radius2 = radius + radius;
   // Iterate over our framebuffer of pixels
   for (int y = 0; y < bucket_size_y; ++y)
   for (int x = 0; x < bucket_size_x; ++x)
   {
      int idx = y * bucket_size_x + x;
      AtNode* curr_obj = obj[idx];
      if (!curr_obj)
         continue;
      bool is_edge = false;
      // Check our neighbourhood window and compare object pointer to detect if
      // our pixel is near an edge.
      for (int i = 0; i < radius2; i++)
      {
         for (int j = 0; j < radius2; j++)
         {
            unsigned int sub_x = AiMin(AiMax(x - radius + i, 0), bucket_size_x -1);
            unsigned int sub_y = AiMin(AiMax(y - radius + j, 0), bucket_size_y -1);
            int sub_idx = sub_y * bucket_size_x + sub_x;
            if (curr_obj != obj[sub_idx])
               is_edge = true;
         }
         if (is_edge)
            break;
      }

      // If our pixel is an edge pixel, write our color to our RGBA and edge outputs
      if (is_edge)
      {
         rgba[idx] = AiLerp(blend, rgba[idx], AtRGBA(color,rgba[idx].a));
         edge[idx] = AtRGBA(color, blend);
      }
   }
}

node_finish
{
}

node_loader
{
   if (i>0) return false;
   node->methods     = (AtNodeMethods*) ImagerMtd;
   node->output_type = AI_TYPE_NODE;
   node->name        = "complex_imager";
   node->node_type   = AI_NODE_IMAGER;
   strcpy(node->version, AI_VERSION);
   return true;
}

complex_imager_example.ass

options
{
 AA_samples 3
 GI_diffuse_depth 0
 outputs "RGBA RGBA filter out"
}

persp_camera
{
 name persp
 position 5 5 5
 look_at 0 0 0
 up 0 1 0
 fov 54
 shutter_start 0
 shutter_end  1
}

box_filter
{
  name filter
}

closest_filter
{
  name myClosestFilter
}

complex_imager
{
  name imager
  filter myClosestFilter
}

driver_exr
{
  name out
  filename complex_imager.exr
  tiled false
  input imager
}

standard_surface
{
 name myshader
 base 0.8
 base_color 1 0 0
}

standard_surface
{
 name myshader_ground
 base 0.8
 base_color 1 1 1
}

point_light
{
 name mylight
 position 5 5 5
 intensity 100
 color 1 1 1
}

plane
{
 name __ground__plane__
 point 0 0 0
 normal 0 1 0
 shader myshader_ground
}

sphere
{
 name sphere0
 center 1 1 POINT 0 0 0
 radius 1 1 FLOAT 2
 shader myshader
}

complex imager RGBA render complex imager edge render

Was this information helpful?