Skip to content

Sample Code#

The following sections list the code samples that are part of the pylon SDK.

Location on Windows#

The pylon sample solutions can be found under <⁠SDK ROOT>\Development\Samples\C++. There are sample solutions available for Microsoft Visual Studio 2010. A sample solution for Visual Studio 2010 can be converted to the required format, e.g., to a Microsoft Visual Studio 2019 solution, by later Visual Studio versions. Additionally, there are CMakeLists.txt files available with one central CMakeLists.txt file at <⁠SDK ROOT>\Development\Samples for all samples.

Using Visual Studio 2015

Visual Studio 2015 doesn't have a native CMake integration.

If there is no CMake >= 3.14 installed, download a CMake Windows installer from https://cmake.org/download. Launch the .msi installer and follow the instructions.

Location on Linux#

The pylon samples can be found under <⁠SDK ROOT>\Samples. There is a GNU make file available for each sample.

Location on macOS#

The pylon samples can be found in the pylon Camera Software Suite, in the Samples folder. There is an Apple Xcode project available for each sample. The pylon for macOS samples need to be copied from the pylon Camera Software Suite to a writable location, e.g. your home directory, before they can be used with Apple Xcode.

Include Files Used by Samples#

The following include files are used by the samples shown below.

CameraEventPrinter.h#

This include file is part of the pylon sample solution that can be found under <⁠SDK ROOT>\Development\Samples\C++.

// Contains a Camera Event Handler that prints a message for each event method call.

#ifndef INCLUDED_CAMERAEVENTPRINTER_H_4683453
#define INCLUDED_CAMERAEVENTPRINTER_H_4683453

#include <pylon/CameraEventHandler.h>
#include <pylon/ParameterIncludes.h>
#include <iostream>

namespace Pylon
{
    class CInstantCamera;

    class CCameraEventPrinter : public CCameraEventHandler
    {
    public:
        virtual void OnCameraEvent( CInstantCamera& camera, intptr_t userProvidedId, GenApi::INode* pNode )
        {
            std::cout << "OnCameraEvent event for device " << camera.GetDeviceInfo().GetModelName() << std::endl;
            std::cout << "User provided ID: " << userProvidedId << std::endl;
            std::cout << "Event data node name: " << pNode->GetName() << std::endl;
            CParameter value( pNode );
            if (value.IsValid())
            {
                std::cout << "Event node data: " << value.ToString() << std::endl;
            }
            std::cout << std::endl;
        }
    };
}

#endif /* INCLUDED_CAMERAEVENTPRINTER_H_4683453 */

ConfigurationEventPrinter.h#

This include file is part of the pylon sample solution that can be found under <⁠SDK ROOT>\Development\Samples\C++.

// Contains a Configuration Event Handler that prints a message for each event method call.

#ifndef INCLUDED_CONFIGURATIONEVENTPRINTER_H_663006
#define INCLUDED_CONFIGURATIONEVENTPRINTER_H_663006

#include <pylon/ConfigurationEventHandler.h>
#include <iostream>

namespace Pylon
{
    class CInstantCamera;

    class CConfigurationEventPrinter : public CConfigurationEventHandler
    {
    public:
        void OnAttach( CInstantCamera& /*camera*/ )
        {
            std::cout << "OnAttach event" << std::endl;
        }

        void OnAttached( CInstantCamera& camera )
        {
            std::cout << "OnAttached event for device " << camera.GetDeviceInfo().GetModelName() << std::endl;
        }

        void OnOpen( CInstantCamera& camera )
        {
            std::cout << "OnOpen event for device " << camera.GetDeviceInfo().GetModelName() << std::endl;
        }

        void OnOpened( CInstantCamera& camera )
        {
            std::cout << "OnOpened event for device " << camera.GetDeviceInfo().GetModelName() << std::endl;
        }

        void OnGrabStart( CInstantCamera& camera )
        {
            std::cout << "OnGrabStart event for device " << camera.GetDeviceInfo().GetModelName() << std::endl;
        }

        void OnGrabStarted( CInstantCamera& camera )
        {
            std::cout << "OnGrabStarted event for device " << camera.GetDeviceInfo().GetModelName() << std::endl;
        }

        void OnGrabStop( CInstantCamera& camera )
        {
            std::cout << "OnGrabStop event for device " << camera.GetDeviceInfo().GetModelName() << std::endl;
        }

        void OnGrabStopped( CInstantCamera& camera )
        {
            std::cout << "OnGrabStopped event for device " << camera.GetDeviceInfo().GetModelName() << std::endl;
        }

        void OnClose( CInstantCamera& camera )
        {
            std::cout << "OnClose event for device " << camera.GetDeviceInfo().GetModelName() << std::endl;
        }

        void OnClosed( CInstantCamera& camera )
        {
            std::cout << "OnClosed event for device " << camera.GetDeviceInfo().GetModelName() << std::endl;
        }

        void OnDestroy( CInstantCamera& camera )
        {
            std::cout << "OnDestroy event for device " << camera.GetDeviceInfo().GetModelName() << std::endl;
        }

        void OnDestroyed( CInstantCamera& /*camera*/ )
        {
            std::cout << "OnDestroyed event" << std::endl;
        }

        void OnDetach( CInstantCamera& camera )
        {
            std::cout << "OnDetach event for device " << camera.GetDeviceInfo().GetModelName() << std::endl;
        }

        void OnDetached( CInstantCamera& camera )
        {
            std::cout << "OnDetached event for device " << camera.GetDeviceInfo().GetModelName() << std::endl;
        }

        void OnGrabError( CInstantCamera& camera, const char* errorMessage )
        {
            std::cout << "OnGrabError event for device " << camera.GetDeviceInfo().GetModelName() << std::endl;
            std::cout << "Error Message: " << errorMessage << std::endl;
        }

        void OnCameraDeviceRemoved( CInstantCamera& camera )
        {
            std::cout << "OnCameraDeviceRemoved event for device " << camera.GetDeviceInfo().GetModelName() << std::endl;
        }
    };
}

#endif /* INCLUDED_CONFIGURATIONEVENTPRINTER_H_663006 */

ImageEventPrinter.h#

This include file is part of the pylon sample solution that can be found under <⁠SDK ROOT>\Development\Samples\C++.

// Contains an Image Event Handler that prints a message for each event method call.

#ifndef INCLUDED_IMAGEEVENTPRINTER_H_7884943
#define INCLUDED_IMAGEEVENTPRINTER_H_7884943

#include <pylon/ImageEventHandler.h>
#include <pylon/GrabResultPtr.h>
#include <iostream>

namespace Pylon
{
    class CInstantCamera;

    class CImageEventPrinter : public CImageEventHandler
    {
    public:

        virtual void OnImagesSkipped( CInstantCamera& camera, size_t countOfSkippedImages )
        {
            std::cout << "OnImagesSkipped event for device " << camera.GetDeviceInfo().GetModelName() << std::endl;
            std::cout << countOfSkippedImages << " images have been skipped." << std::endl;
            std::cout << std::endl;
        }


        virtual void OnImageGrabbed( CInstantCamera& camera, const CGrabResultPtr& ptrGrabResult )
        {
            std::cout << "OnImageGrabbed event for device " << camera.GetDeviceInfo().GetModelName() << std::endl;

            // Image grabbed successfully?
            if (ptrGrabResult->GrabSucceeded())
            {
                std::cout << "SizeX: " << ptrGrabResult->GetWidth() << std::endl;
                std::cout << "SizeY: " << ptrGrabResult->GetHeight() << std::endl;
                const uint8_t* pImageBuffer = (uint8_t*) ptrGrabResult->GetBuffer();
                std::cout << "Gray value of first pixel: " << (uint32_t) pImageBuffer[0] << std::endl;
                std::cout << std::endl;
            }
            else
            {
                std::cout << "Error: " << std::hex << ptrGrabResult->GetErrorCode() << std::dec << " " << ptrGrabResult->GetErrorDescription() << std::endl;
            }
        }
    };
}

#endif /* INCLUDED_IMAGEEVENTPRINTER_H_7884943 */

PixelFormatAndAoiConfiguration.h#

This include file is part of the pylon sample solution that can be found under <⁠SDK ROOT>\Development\Samples\C++.

// Contains a configuration that sets pixel data format and Image AOI.

#ifndef INCLUDED_PIXELFORMATANDAOICONFIGURATION_H_00104928
#define INCLUDED_PIXELFORMATANDAOICONFIGURATION_H_00104928

#include <pylon/ConfigurationEventHandler.h>
#include <pylon/ParameterIncludes.h>

namespace Pylon
{
    class CInstantCamera;
}
class CPixelFormatAndAoiConfiguration : public Pylon::CConfigurationEventHandler
{
public:
    void OnOpened( Pylon::CInstantCamera& camera )
    {
        try
        {
            // Allow all the names in the namespace GenApi to be used without qualification.
            using namespace Pylon;

            // Get the camera control object.
            GenApi::INodeMap& nodemap = camera.GetNodeMap();

            // Get the parameters for setting the image area of interest (Image AOI).
            CIntegerParameter width( nodemap, "Width" );
            CIntegerParameter height( nodemap, "Height" );
            CIntegerParameter offsetX( nodemap, "OffsetX" );
            CIntegerParameter offsetY( nodemap, "OffsetY" );

            // Maximize the Image AOI.
            offsetX.TrySetToMinimum(); // Set to minimum if writable.
            offsetY.TrySetToMinimum(); // Set to minimum if writable.
            width.SetToMaximum();
            height.SetToMaximum();

            // Set the pixel data format.
            CEnumParameter( nodemap, "PixelFormat" ).SetValue( "Mono8" );
        }
        catch (const Pylon::GenericException& e)
        {
            throw RUNTIME_EXCEPTION( "Could not apply configuration. const GenericException caught in OnOpened method msg=%hs", e.what() );
        }
    }
};

#endif /* INCLUDED_PIXELFORMATANDAOICONFIGURATION_H_00104928 */

SampleImageCreator.h#

This include file is part of the pylon sample solution that can be found under <⁠SDK ROOT>\Development\Samples\C++.

// Contains functions for creating sample images.

#ifndef INCLUDED_SAMPLEIMAGECREATOR_H_2792867
#define INCLUDED_SAMPLEIMAGECREATOR_H_2792867

#include <pylon/PylonImage.h>
#include <pylon/Pixel.h>
#include <pylon/ImageFormatConverter.h>

namespace SampleImageCreator
{
    Pylon::CPylonImage CreateJuliaFractal( Pylon::EPixelType pixelType, uint32_t width, uint32_t height )
    {
        // Allow all the names in the namespace Pylon to be used without qualification.
        using namespace Pylon;

        // Define Constants.
        static const SRGB8Pixel palette[] =
        {
            {0, 28, 50}, {0, 42, 75}, {0, 56, 100}, {0, 70, 125}, {0, 84, 150},
            {0, 50, 0}, {0, 100, 0}, {0, 150, 0}, {0, 200, 0}, {0, 250, 0},
            {50, 0, 0}, {100, 0, 0}, {150, 0, 0}, {200, 0, 0}, {250, 0, 0}
        };
        uint32_t numColors = sizeof( palette ) / sizeof( palette[0] );

        const double cX = -0.735;
        const double cY = 0.11;
        const double cMaxX = 1.6;
        const double cMinX = -1.6;
        const double cMaxY = 1;
        const double cMinY = -1;
        const uint32_t cMaxIterations = 50;

        // Create image.
        CPylonImage juliaFractal( CPylonImage::Create( PixelType_RGB8packed, width, height ) );

        // Get the pointer to the first pixel.
        SRGB8Pixel* pCurrentPixel = (SRGB8Pixel*) juliaFractal.GetBuffer();

        // Compute the fractal.
        for (uint32_t pixelY = 0; pixelY < height; ++pixelY)
        {
            for (uint32_t pixelX = 0; pixelX < width; ++pixelX, ++pCurrentPixel)
            {
                long double x = ((cMaxX - cMinX) / width) * pixelX + cMinX;
                long double y = cMaxY - pixelY * ((cMaxY - cMinY) / height);
                long double xd = 0;
                long double yd = 0;
                uint32_t i = 0;

                for (; i < cMaxIterations; ++i)
                {
                    xd = x * x - y * y + cX;
                    yd = 2 * x * y + cY;
                    x = xd;
                    y = yd;
                    if ((x * x + y * y) > 4)
                    {
                        break;
                    }
                }

                if (i >= cMaxIterations)
                {
                    *pCurrentPixel = palette[0];
                }
                else
                {
                    *pCurrentPixel = palette[i % numColors];
                }
            }
        }

        // Convert the image to the target format if needed.
        if (juliaFractal.GetPixelType() != pixelType)
        {
            CImageFormatConverter converter;
            converter.OutputPixelFormat = pixelType;
            converter.OutputBitAlignment = OutputBitAlignment_MsbAligned;
            converter.Convert( juliaFractal, CPylonImage( juliaFractal ) );
        }

        // Return the image.
        return juliaFractal;
    }


    Pylon::CPylonImage CreateMandelbrotFractal( Pylon::EPixelType pixelType, uint32_t width, uint32_t height )
    {
        // Allow all the names in the namespace Pylon to be used without qualification.
        using namespace Pylon;

        // Define constants.
        static const SRGB8Pixel palette[] =
        {
            {0, 28, 50}, {0, 42, 75}, {0, 56, 100}, {0, 70, 125}, {0, 84, 150},
            {0, 50, 0}, {0, 100, 0}, {0, 150, 0}, {0, 200, 0}, {0, 250, 0},
            {50, 0, 0}, {100, 0, 0}, {150, 0, 0}, {200, 0, 0}, {250, 0, 0}
        };
        uint32_t numColors = sizeof( palette ) / sizeof( palette[0] );

        const double  cMaxX = 1.0;
        const double  cMinX = -2.0;
        const double  cMaxY = 1.2;
        const double  cMinY = -1.2;
        const uint32_t cMaxIterations = 50;

        // Create image.
        CPylonImage mandelbrotFractal( CPylonImage::Create( PixelType_RGB8packed, width, height ) );

        // Get the pointer to the first pixel.
        SRGB8Pixel* pCurrentPixel = (SRGB8Pixel*) mandelbrotFractal.GetBuffer();

        // Compute the fractal.
        for (uint32_t pixelY = 0; pixelY < height; ++pixelY)
        {
            for (uint32_t pixelX = 0; pixelX < width; ++pixelX, ++pCurrentPixel)
            {
                long double xStart = ((cMaxX - cMinX) / width) * pixelX + cMinX;
                long double yStart = cMaxY - pixelY * ((cMaxY - cMinY) / height);
                long double x = xStart;
                long double y = yStart;
                long double xd = 0;
                long double yd = 0;
                uint32_t i = 0;

                for (; i < cMaxIterations; ++i)
                {
                    xd = x * x - y * y + xStart;
                    yd = 2 * x * y + yStart;
                    x = xd;
                    y = yd;
                    if ((x * x + y * y) > 4)
                    {
                        break;
                    }
                }

                if (i >= cMaxIterations)
                {
                    *pCurrentPixel = palette[0];
                }
                else
                {
                    *pCurrentPixel = palette[i % numColors];
                }
            }
        }

        // Convert the image to the target format if needed.
        if (mandelbrotFractal.GetPixelType() != pixelType)
        {
            CImageFormatConverter converter;
            converter.OutputPixelFormat = pixelType;
            converter.OutputBitAlignment = OutputBitAlignment_MsbAligned;
            converter.Convert( mandelbrotFractal, CPylonImage( mandelbrotFractal ) );
        }

        // Return the image.
        return mandelbrotFractal;
    }

}

#endif /* INCLUDED_SAMPLEIMAGECREATOR_H_2792867 */

DeviceRemovalHandling#

This sample is part of the pylon sample solution that can be found under <⁠SDK ROOT>\Development\Samples\C++.

// DeviceRemovalHandling.cpp
/*
    Note: Before getting started, Basler recommends reading the "Programmer's Guide" topic
    in the pylon C++ API documentation delivered with pylon.
    If you are upgrading to a higher major version of pylon, Basler also
    strongly recommends reading the "Migrating from Previous Versions" topic in the pylon C++ API documentation.

    This sample program demonstrates how to be informed about the removal of a camera device.
    It also shows how to reconnect to a removed device.

    Attention:
    If you build this sample in debug mode and run it using a GigE camera device, pylon will set the heartbeat
    timeout to 60 minutes. This is done to allow debugging and single-stepping without losing the camera
    connection due to missing heartbeats. However, with this setting, it would take 60 minutes for the
    application to notice that a GigE device has been disconnected.
    As a workaround, the heartbeat timeout is set to 1000 ms.
*/

// Include files to use the pylon API.
#include <pylon/PylonIncludes.h>
#include "../include/ConfigurationEventPrinter.h"

// Namespace for using pylon objects.
using namespace Pylon;

// Namespace for using cout.
using namespace std;


// When using device-specific Instant Camera classes there are specific Configuration event handler classes available which can be used, for example
// Pylon::CBaslerUniversalConfigurationEventHandler.
//Example of a configuration event handler that handles device removal events.
class CSampleConfigurationEventHandler : public Pylon::CConfigurationEventHandler
{
public:
    // This method is called from a different thread when the camera device removal has been detected.
    void OnCameraDeviceRemoved( CInstantCamera& /*camera*/ )
    {
        // Print two new lines, just for improving printed output.
        cout << endl << endl;
        cout << "CSampleConfigurationEventHandler::OnCameraDeviceRemoved called." << std::endl;
    }
};

// Time to wait in quarters of seconds.
static const uint32_t c_loopCounterInitialValue = 60 * 4;

 int main( int /*argc*/, char* /*argv*/[] )
{
    // The exit code of the sample application.
    int exitCode = 0;

    // Before using any pylon methods, the pylon runtime must be initialized.
    PylonInitialize();

    try
    {
        // Declare a local counter used for waiting.
        int loopCount = 0;

        // Get the transport layer factory.
        CTlFactory& tlFactory = CTlFactory::GetInstance();

        // Create an instant camera object with the camera device found first.
        CInstantCamera camera( tlFactory.CreateFirstDevice() );

        // Print the camera information.
        cout << "Using device " << camera.GetDeviceInfo().GetModelName() << endl;
        cout << "Friendly Name: " << camera.GetDeviceInfo().GetFriendlyName() << endl;
        cout << "Full Name    : " << camera.GetDeviceInfo().GetFullName() << endl;
        cout << "SerialNumber : " << camera.GetDeviceInfo().GetSerialNumber() << endl;
        cout << endl;

        // For demonstration purposes only, register another configuration event handler that handles device removal.
        camera.RegisterConfiguration( new CSampleConfigurationEventHandler, RegistrationMode_Append, Cleanup_Delete );

        // For demonstration purposes only, add a sample configuration event handler to print out information
        // about camera use.
        camera.RegisterConfiguration( new CConfigurationEventPrinter, RegistrationMode_Append, Cleanup_Delete );

        // Open the camera. Camera device removal is only detected while the camera is open.
        camera.Open();

        // Now, try to detect that the camera has been removed:

        // Ask the user to disconnect a device
        loopCount = c_loopCounterInitialValue;
        cout << endl << "Please disconnect the device (timeout " << loopCount / 4 << "s) " << endl;

        /////////////////////////////////////////////////// don't single step beyond this line  (see comments above)

        // Before testing the callbacks, we manually set the heartbeat timeout to a short value when using GigE cameras.
        // Since for debug versions the heartbeat timeout has been set to 5 minutes, it would take up to 5 minutes
        // until detection of the device removal.
        CIntegerParameter heartbeat( camera.GetTLNodeMap(), "HeartbeatTimeout" );
        heartbeat.TrySetValue( 1000, IntegerValueCorrection_Nearest );  // set to 1000 ms timeout if writable

        try
        {
            // Get a camera parameter using generic parameter access.
            CIntegerParameter width( camera.GetNodeMap(), "Width" );

            // The following loop accesses the camera. It could also be a loop that is
            // grabbing images. The device removal is handled in the exception handler.
            while (loopCount > 0)
            {
                // Print a "." every few seconds to tell the user we're waiting for the callback.
                if (--loopCount % 4 == 0)
                {
                    cout << ".";
                    cout.flush();
                }
                WaitObject::Sleep( 250 );

                // Change the width value in the camera depending on the loop counter.
                // Any access to the camera like setting parameters or grabbing images
                // will fail throwing an exception if the camera has been disconnected.
                width.SetValue( width.GetMax() - (width.GetInc() * (loopCount % 2)) );
            }

        }
        catch (const GenericException& e)
        {
            // An exception occurred. Is it because the camera device has been physically removed?

            // Known issue: Wait until the system safely detects a possible removal.
            WaitObject::Sleep( 1000 );

            if (camera.IsCameraDeviceRemoved())
            {
                // The camera device has been removed. This caused the exception.
                cout << endl;
                cout << "The camera has been removed from the computer." << endl;
                cout << "The camera device removal triggered an expected exception:" << endl
                    << e.GetDescription() << endl;
            }
            else
            {
                // An unexpected error has occurred.

                // In this example it is handled by exiting the program.
                throw;
            }
        }

        if (!camera.IsCameraDeviceRemoved())
            cout << endl << "Timeout expired" << endl;

        /////////////////////////////////////////////////// Safe to use single stepping (see comments above).

        // Now try to find the detached camera after it has been attached again:

        // Create a device info object for remembering the camera properties.
        CDeviceInfo info;

        // Remember the camera properties that allow detecting the same camera again.
        info.SetDeviceClass( camera.GetDeviceInfo().GetDeviceClass() );
        info.SetSerialNumber( camera.GetDeviceInfo().GetSerialNumber() );

        // Destroy the Pylon Device representing the detached camera device.
        // It can't be used anymore.
        camera.DestroyDevice();

        // Ask the user to connect the same device.
        loopCount = c_loopCounterInitialValue;
        cout << endl << "Please connect the same device to the computer again (timeout " << loopCount / 4 << "s) " << endl;

        // Create a filter containing the CDeviceInfo object info which describes the properties of the device we are looking for.
        DeviceInfoList_t filter;
        filter.push_back( info );

        for (; loopCount > 0; --loopCount)
        {
            // Print a . every few seconds to tell the user we're waiting for the camera to be attached
            if (loopCount % 4 == 0)
            {
                cout << ".";
                cout.flush();
            }

            // Try to find the camera we are looking for.
            DeviceInfoList_t devices;
            if (tlFactory.EnumerateDevices( devices, filter ) > 0)
            {
                // Print two new lines, just for improving printed output.
                cout << endl << endl;

                // The camera has been found. Create and attach it to the Instant Camera object.
                camera.Attach( tlFactory.CreateDevice( devices[0] ) );
                //Exit waiting
                break;
            }

            WaitObject::Sleep( 250 );
        }

        // If the camera has been found.
        if (camera.IsPylonDeviceAttached())
        {
            // Print the camera information.
            cout << endl;
            cout << "Using device " << camera.GetDeviceInfo().GetModelName() << endl;
            cout << "Friendly Name: " << camera.GetDeviceInfo().GetFriendlyName() << endl;
            cout << "Full Name    : " << camera.GetDeviceInfo().GetFullName() << endl;
            cout << "SerialNumber : " << camera.GetDeviceInfo().GetSerialNumber() << endl;
            cout << endl;

            // All configuration objects and other event handler objects are still registered.
            // The configuration objects will parameterize the camera device and the instant
            // camera will be ready for operation again.

            // Open the camera.
            camera.Open();

            // Now the Instant Camera object can be used as before.
        }
        else // Timeout
        {
            cout << endl << "Timeout expired." << endl;
        }
    }
    catch (const GenericException& e)
    {
        // Error handling.
        cerr << "An exception occurred." << endl
            << e.GetDescription() << endl;
        exitCode = 1;
    }

    // Comment the following two lines to disable waiting on exit.
    cerr << endl << "Press enter to exit." << endl;
    while (cin.get() != '\n');

    // Releases all pylon resources.
    PylonTerminate();

    return exitCode;
}

Grab#

This sample is part of the pylon sample solution that can be found under <⁠SDK ROOT>\Development\Samples\C++.

// Grab.cpp
/*
    Note: Before getting started, Basler recommends reading the "Programmer's Guide" topic
    in the pylon C++ API documentation delivered with pylon.
    If you are upgrading to a higher major version of pylon, Basler also
    strongly recommends reading the "Migrating from Previous Versions" topic in the pylon C++ API documentation.

    This sample illustrates how to grab and process images using the CInstantCamera class.
    The images are grabbed and processed asynchronously, i.e.,
    while the application is processing a buffer, the acquisition of the next buffer is done
    in parallel.

    The CInstantCamera class uses a pool of buffers to retrieve image data
    from the camera device. Once a buffer is filled and ready,
    the buffer can be retrieved from the camera object for processing. The buffer
    and additional image data are collected in a grab result. The grab result is
    held by a smart pointer after retrieval. The buffer is automatically reused
    when explicitly released or when the smart pointer object is destroyed.
*/

// Include files to use the pylon API.
#include <pylon/PylonIncludes.h>
#ifdef PYLON_WIN_BUILD
#    include <pylon/PylonGUI.h>
#endif

// Namespace for using pylon objects.
using namespace Pylon;

// Namespace for using cout.
using namespace std;

// Number of images to be grabbed.
static const uint32_t c_countOfImagesToGrab = 100;

int main( int /*argc*/, char* /*argv*/[] )
{
    // The exit code of the sample application.
    int exitCode = 0;

    // Before using any pylon methods, the pylon runtime must be initialized.
    PylonInitialize();

    try
    {
        // Create an instant camera object with the camera device found first.
        CInstantCamera camera( CTlFactory::GetInstance().CreateFirstDevice() );

        // Print the model name of the camera.
        cout << "Using device " << camera.GetDeviceInfo().GetModelName() << endl;

        // The parameter MaxNumBuffer can be used to control the count of buffers
        // allocated for grabbing. The default value of this parameter is 10.
        camera.MaxNumBuffer = 5;

        // Start the grabbing of c_countOfImagesToGrab images.
        // The camera device is parameterized with a default configuration which
        // sets up free-running continuous acquisition.
        camera.StartGrabbing( c_countOfImagesToGrab );

        // This smart pointer will receive the grab result data.
        CGrabResultPtr ptrGrabResult;

        // Camera.StopGrabbing() is called automatically by the RetrieveResult() method
        // when c_countOfImagesToGrab images have been retrieved.
        while (camera.IsGrabbing())
        {
            // Wait for an image and then retrieve it. A timeout of 5000 ms is used.
            camera.RetrieveResult( 5000, ptrGrabResult, TimeoutHandling_ThrowException );

            // Image grabbed successfully?
            if (ptrGrabResult->GrabSucceeded())
            {
                // Access the image data.
                cout << "SizeX: " << ptrGrabResult->GetWidth() << endl;
                cout << "SizeY: " << ptrGrabResult->GetHeight() << endl;
                const uint8_t* pImageBuffer = (uint8_t*) ptrGrabResult->GetBuffer();
                cout << "Gray value of first pixel: " << (uint32_t) pImageBuffer[0] << endl << endl;

#ifdef PYLON_WIN_BUILD
                // Display the grabbed image.
                Pylon::DisplayImage( 1, ptrGrabResult );
#endif
            }
            else
            {
                cout << "Error: " << std::hex << ptrGrabResult->GetErrorCode() << std::dec << " " << ptrGrabResult->GetErrorDescription() << endl;
            }
        }
    }
    catch (const GenericException& e)
    {
        // Error handling.
        cerr << "An exception occurred." << endl
            << e.GetDescription() << endl;
        exitCode = 1;
    }

    // Comment the following two lines to disable waiting on exit.
    cerr << endl << "Press enter to exit." << endl;
    while (cin.get() != '\n');

    // Releases all pylon resources.
    PylonTerminate();

    return exitCode;
}

Grab_CameraEvents#

This sample is part of the pylon sample solution that can be found under <⁠SDK ROOT>\Development\Samples\C++.

// Grab_CameraEvents.cpp
/*
    Note: Before getting started, Basler recommends reading the "Programmer's Guide" topic
    in the pylon C++ API documentation delivered with pylon.
    If you are upgrading to a higher major version of pylon, Basler also
    strongly recommends reading the "Migrating from Previous Versions" topic in the pylon C++ API documentation.

    It is shown in this sample how to register event handlers indicating the arrival of events
    sent by the camera. For demonstration purposes, several different handlers are registered
    for the same event.

    Basler USB3 Vision and GigE Vision cameras can send event messages. For example, when a sensor
    exposure has finished, the camera can send an Exposure End event to the computer. The event
    can be received by the computer before the image data of the finished exposure has been transferred
    completely. This sample demonstrates how to be notified when camera event message data is received.

    The event messages are automatically retrieved and processed by the InstantCamera classes.
    The information carried by event messages is exposed as parameter nodes in the camera node map
    and can be accessed like standard camera parameters. These nodes are updated
    when a camera event is received. You can register camera event handler objects that are
    triggered when event data has been received.

    These mechanisms are demonstrated for the Exposure End and the Event Overrun events.
    The Exposure End event carries the following information:
    * ExposureEndEventFrameID: Number of the image that has been exposed.
    * ExposureEndEventTimestamp: Time when the event was generated.
    The Event Overrun event is sent by the camera as a warning that events are being dropped. The
    notification contains no specific information about how many or which events have been dropped.
    Events may be dropped if events are generated at a high frequency and if there isn't enough
    bandwidth available to send the events.

    Note: Different camera series implement different versions of the Standard Feature Naming Convention (SFNC).
    That's why the name and the type of the parameters used can be different.
*/


// Include files to use the pylon API.
#include <pylon/PylonIncludes.h>

// Include file to use pylon universal instant camera parameters.
#include <pylon/BaslerUniversalInstantCamera.h>

// Include files used by samples.
#include "../include/ConfigurationEventPrinter.h"
#include "../include/CameraEventPrinter.h"

// Namespace for using pylon objects.
using namespace Pylon;

// Namespace for using pylon universal instant camera parameters.
using namespace Basler_UniversalCameraParams;

// Namespace for using cout.
using namespace std;

//Enumeration used for distinguishing different events.
enum MyEvents
{
    eMyExposureEndEvent = 100,
    eMyEventOverrunEvent = 200
    // More events can be added here.
};

// Number of images to be grabbed.
static const uint32_t c_countOfImagesToGrab = 5;


// Example handler for camera events.
class CSampleCameraEventHandler : public CBaslerUniversalCameraEventHandler
{
public:
    // Only very short processing tasks should be performed by this method. Otherwise, the event notification will block the
    // processing of images.
    virtual void OnCameraEvent( CBaslerUniversalInstantCamera& camera, intptr_t userProvidedId, GenApi::INode* /* pNode */ )
    {
        std::cout << std::endl;
        switch (userProvidedId)
        {
            case eMyExposureEndEvent: // Exposure End event
                if (camera.EventExposureEndFrameID.IsReadable()) // Applies to cameras based on SFNC 2.0 or later, e.g, USB cameras
                {
                    cout << "Exposure End event. FrameID: " << camera.EventExposureEndFrameID.GetValue() << " Timestamp: " << camera.EventExposureEndTimestamp.GetValue() << std::endl << std::endl;
                }
                else
                {
                    cout << "Exposure End event. FrameID: " << camera.ExposureEndEventFrameID.GetValue() << " Timestamp: " << camera.ExposureEndEventTimestamp.GetValue() << std::endl << std::endl;
                }
                break;
            case eMyEventOverrunEvent:  // Event Overrun event
                cout << "Event Overrun event. FrameID: " << camera.EventOverrunEventFrameID.GetValue() << " Timestamp: " << camera.EventOverrunEventTimestamp.GetValue() << std::endl << std::endl;
                break;
        }
    }
};

//Example of an image event handler.
class CSampleImageEventHandler : public CImageEventHandler
{
public:
    virtual void OnImageGrabbed( CInstantCamera& /*camera*/, const CGrabResultPtr& /*ptrGrabResult*/ )
    {
        cout << "CSampleImageEventHandler::OnImageGrabbed called." << std::endl;
        cout << std::endl;
        cout << std::endl;
    }
};

int main( int /*argc*/, char* /*argv*/[] )
{
    // The exit code of the sample application
    int exitCode = 0;

    // Before using any pylon methods, the pylon runtime must be initialized.
    PylonInitialize();

    // Create an example event handler. In the present case, we use one single camera handler for handling multiple camera events.
    // The handler prints a message for each received event.
    CSampleCameraEventHandler* pHandler1 = new CSampleCameraEventHandler;

    // Create another more generic event handler printing out information about the node for which an event callback
    // is fired.
    CCameraEventPrinter* pHandler2 = new CCameraEventPrinter;

    try
    {
        // Create an instant camera object with the first found camera device.
        CBaslerUniversalInstantCamera camera( CTlFactory::GetInstance().CreateFirstDevice() );

        // Register the standard configuration event handler for enabling software triggering.
        // The software trigger configuration handler replaces the default configuration
        // as all currently registered configuration handlers are removed by setting the registration mode to RegistrationMode_ReplaceAll.
        camera.RegisterConfiguration( new CSoftwareTriggerConfiguration, RegistrationMode_ReplaceAll, Cleanup_Delete );

        // For demonstration purposes only, registers an event handler configuration to print out information about camera use.
        // The event handler configuration is appended to the registered software trigger configuration handler by setting 
        // registration mode to RegistrationMode_Append.
        camera.RegisterConfiguration( new CConfigurationEventPrinter, RegistrationMode_Append, Cleanup_Delete ); // Camera use.

        // For demonstration purposes only, register another image event handler.
        camera.RegisterImageEventHandler( new CSampleImageEventHandler, RegistrationMode_Append, Cleanup_Delete );

        // Camera event processing must be activated first, the default is off.
        camera.GrabCameraEvents = true;


        // Open the camera for setting parameters.
        camera.Open();

        // Check if the device supports events.
        if (!camera.EventSelector.IsWritable())
        {
            throw RUNTIME_EXCEPTION( "The device doesn't support events." );
        }



        // Cameras based on SFNC 2.0 or later, e.g., USB cameras
        if (camera.GetSfncVersion() >= Sfnc_2_0_0)
        {
            // Register an event handler for the Exposure End event. For each event type, there is a "data" node
            // representing the event. The actual data that is carried by the event is held by child nodes of the
            // data node. In the case of the Exposure End event, the child nodes are EventExposureEndFrameID and EventExposureEndTimestamp.
            // The CSampleCameraEventHandler demonstrates how to access the child nodes within
            // a callback that is fired for the parent data node.
            // The user-provided ID eMyExposureEndEvent can be used to distinguish between multiple events (not shown).
            camera.RegisterCameraEventHandler( pHandler1, "EventExposureEndData", eMyExposureEndEvent, RegistrationMode_ReplaceAll, Cleanup_None );
            // The handler is registered for both, the EventExposureEndFrameID and the EventExposureEndTimestamp
            // node. These nodes represent the data carried by the Exposure End event.
            // For each Exposure End event received, the handler will be called twice, once for the frame ID, and
            // once for the time stamp.
            camera.RegisterCameraEventHandler( pHandler2, "EventExposureEndFrameID", eMyExposureEndEvent, RegistrationMode_Append, Cleanup_None );
            camera.RegisterCameraEventHandler( pHandler2, "EventExposureEndTimestamp", eMyExposureEndEvent, RegistrationMode_Append, Cleanup_None );
        }
        else
        {
            // Register an event handler for the Exposure End event. For each event type, there is a "data" node
            // representing the event. The actual data that is carried by the event is held by child nodes of the
            // data node. In the case of the Exposure End event, the child nodes are ExposureEndEventFrameID, ExposureEndEventTimestamp,
            // and ExposureEndEventStreamChannelIndex. The CSampleCameraEventHandler demonstrates how to access the child nodes within
            // a callback that is fired for the parent data node.
            camera.RegisterCameraEventHandler( pHandler1, "ExposureEndEventData", eMyExposureEndEvent, RegistrationMode_ReplaceAll, Cleanup_None );

            // Register the same handler for a second event. The user-provided ID can be used
            // to distinguish between the events.
            camera.RegisterCameraEventHandler( pHandler1, "EventOverrunEventData", eMyEventOverrunEvent, RegistrationMode_Append, Cleanup_None );

            // The handler is registered for both, the ExposureEndEventFrameID and the ExposureEndEventTimestamp
            // node. These nodes represent the data carried by the Exposure End event.
            // For each Exposure End event received, the handler will be called twice, once for the frame ID, and
            // once for the time stamp.
            camera.RegisterCameraEventHandler( pHandler2, "ExposureEndEventFrameID", eMyExposureEndEvent, RegistrationMode_Append, Cleanup_None );
            camera.RegisterCameraEventHandler( pHandler2, "ExposureEndEventTimestamp", eMyExposureEndEvent, RegistrationMode_Append, Cleanup_None );
        }

        // Enable sending of Exposure End events.
        // Select the event to receive.
        camera.EventSelector.SetValue( EventSelector_ExposureEnd );

        // Enable it.
        if (!camera.EventNotification.TrySetValue( EventNotification_On ))
        {
            // scout-f, scout-g, and aviator GigE cameras use a different value
            camera.EventNotification.SetValue( EventNotification_GenICamEvent );
        }


        // Enable event notification for the EventOverrun event, if available
        if (camera.EventSelector.TrySetValue( EventSelector_EventOverrun ))
        {
            // Enable it.
            if (!camera.EventNotification.TrySetValue( EventNotification_On ))
            {
                // scout-f, scout-g, and aviator GigE cameras use a different value
                camera.EventNotification.SetValue( EventNotification_GenICamEvent );
            }
        }


        // Start the grabbing of c_countOfImagesToGrab images.
        camera.StartGrabbing( c_countOfImagesToGrab );

        // This smart pointer will receive the grab result data.
        CGrabResultPtr ptrGrabResult;

        // Camera.StopGrabbing() is called automatically by the RetrieveResult() method
        // when c_countOfImagesToGrab images have been retrieved.
        while (camera.IsGrabbing())
        {
            // Execute the software trigger. Wait up to 1000 ms for the camera to be ready for trigger.
            if (camera.WaitForFrameTriggerReady( 1000, TimeoutHandling_ThrowException ))
            {
                camera.ExecuteSoftwareTrigger();
            }

            // Retrieve grab results and notify the camera event and image event handlers.
            camera.RetrieveResult( 5000, ptrGrabResult, TimeoutHandling_ThrowException );
            // Nothing to do here with the grab result, the grab results are handled by the registered event handler.
        }

        // Disable sending Exposure End events.
        camera.EventSelector.SetValue( EventSelector_ExposureEnd );
        camera.EventNotification.SetValue( EventNotification_Off );

        // Disable sending Event Overrun events.
        if (camera.EventSelector.TrySetValue( EventSelector_EventOverrun ))
        {
            camera.EventNotification.SetValue( EventNotification_Off );
        }
    }
    catch (const GenericException& e)
    {
        // Error handling.
        cerr << "An exception occurred." << endl
            << e.GetDescription() << endl;
        exitCode = 1;
    }

    // Delete the event handlers.
    delete pHandler1;
    delete pHandler2;

    // Comment the following two lines to disable waiting on exit.
    cerr << endl << "Press enter to exit." << endl;
    while (cin.get() != '\n');

    // Releases all pylon resources.
    PylonTerminate();

    return exitCode;
}

Grab_ChunkImage#

This sample is part of the pylon sample solution that can be found under <⁠SDK ROOT>\Development\Samples\C++.

// Grab_ChunkImage.cpp
/*
    Note: Before getting started, Basler recommends reading the "Programmer's Guide" topic
    in the pylon C++ API documentation delivered with pylon.
    If you are upgrading to a higher major version of pylon, Basler also
    strongly recommends reading the "Migrating from Previous Versions" topic in the pylon C++ API documentation.

    Basler cameras provide chunk features: The cameras can generate certain information about each image,
    e.g. frame counters, timestamps and CRC checksums, that is appended to the image data as data "chunks".
    This sample illustrates how to enable chunk features, how to grab images and how to process the appended
    data. When the camera is in chunk mode, it transfers data blocks that are partitioned into chunks. The first
    chunk is always the image data. When chunk features are enabled, the image data chunk is followed by chunks
    containing the information generated by the chunk features.
*/

// Include files to use the pylon API
#include <pylon/PylonIncludes.h>
#ifdef PYLON_WIN_BUILD
#    include <pylon/PylonGUI.h>
#endif

// Include file to use pylon universal instant camera parameters.
#include <pylon/BaslerUniversalInstantCamera.h>

// Namespace for using pylon objects.
using namespace Pylon;

// Namespace for using pylon universal instant camera parameters.
using namespace Basler_UniversalCameraParams;

// Namespace for using cout.
using namespace std;


// Example of a device-specific handler for image events.
class CSampleImageEventHandler : public CBaslerUniversalImageEventHandler
{
public:
    virtual void OnImageGrabbed( CBaslerUniversalInstantCamera& /*camera*/, const CBaslerUniversalGrabResultPtr& ptrGrabResult )
    {
        // Image grabbed successfully?
        if (ptrGrabResult->GrabSucceeded())
        {
            // The chunk data is attached to the grab result and can be accessed anywhere.

            // Generic parameter access:
            // This shows the access via the chunk data node map. This method is available for all grab result types.
            CIntegerParameter chunkTimestamp( ptrGrabResult->GetChunkDataNodeMap(), "ChunkTimestamp" );

            // Access the chunk data attached to the result.
            // Before accessing the chunk data, you should check to see
            // if the chunk is readable. When it is readable, the buffer
            // contains the requested chunk data.
            if (chunkTimestamp.IsReadable())
                cout << "OnImageGrabbed: TimeStamp (Result) accessed via node map: " << chunkTimestamp.GetValue() << endl;

            // Native parameter access:
            // When using the device-specific grab results the chunk data can be accessed
            // via the members of the grab result data.
            if (ptrGrabResult->ChunkTimestamp.IsReadable())
                cout << "OnImageGrabbed: TimeStamp (Result) accessed via result member: " << ptrGrabResult->ChunkTimestamp.GetValue() << endl;
        }
    }
};

// Number of images to be grabbed.
static const uint32_t c_countOfImagesToGrab = 5;

int main( int /*argc*/, char* /*argv*/[] )
{
    // The exit code of the sample application.
    int exitCode = 0;

    // Before using any pylon methods, the pylon runtime must be initialized.
    PylonInitialize();

    try
    {
        // Create an instant camera object with the first found camera device.
        CBaslerUniversalInstantCamera camera( CTlFactory::GetInstance().CreateFirstDevice() );

        // Print the model name of the camera.
        cout << "Using device " << camera.GetDeviceInfo().GetModelName() << endl;

        // Register an image event handler that accesses the chunk data.
        camera.RegisterImageEventHandler( new CSampleImageEventHandler, RegistrationMode_Append, Cleanup_Delete );

        // Open the camera.
        camera.Open();

        // A GenICam node map is required for accessing chunk data. That's why a small node map is required for each grab result.
        // Creating a lot of node maps can be time consuming.
        // The node maps are usually created dynamically when StartGrabbing() is called.
        // To avoid a delay caused by node map creation in StartGrabbing() you have the option to create
        // a static pool of node maps once before grabbing.
        //camera.StaticChunkNodeMapPoolSize = camera.MaxNumBuffer.GetValue();

        // Enable chunks in general.
        if (!camera.ChunkModeActive.TrySetValue( true ))
        {
            throw RUNTIME_EXCEPTION( "The camera doesn't support chunk features" );
        }

        // Enable time stamp chunks.
        camera.ChunkSelector.SetValue( ChunkSelector_Timestamp );
        camera.ChunkEnable.SetValue( true );

        // Enable frame counter chunks?
        if (camera.ChunkSelector.TrySetValue( ChunkSelector_Framecounter ))
        {
            // USB camera devices provide generic counters.
            // An explicit FrameCounter value is not provided by USB camera devices.
            // Enable frame counter chunks.
            camera.ChunkEnable.SetValue( true );
        }

        // Enable CRC checksum chunks.
        camera.ChunkSelector.SetValue( ChunkSelector_PayloadCRC16 );
        camera.ChunkEnable.SetValue( true );

        // Start the grabbing of c_countOfImagesToGrab images.
        // The camera device is parameterized with a default configuration which
        // sets up free-running continuous acquisition.
        camera.StartGrabbing( c_countOfImagesToGrab );

        // This smart pointer will receive the grab result data.
        CBaslerUniversalGrabResultPtr ptrGrabResult;

        // Camera.StopGrabbing() is called automatically by the RetrieveResult() method
        // when c_countOfImagesToGrab images have been retrieved.
        while (camera.IsGrabbing())
        {
            // Wait for an image and then retrieve it. A timeout of 5000 ms is used.
            // RetrieveResult calls the image event handler's OnImageGrabbed method.
            camera.RetrieveResult( 5000, ptrGrabResult, TimeoutHandling_ThrowException );
            cout << "GrabSucceeded: " << ptrGrabResult->GrabSucceeded() << endl;

            // Image grabbed successfully?
            if (ptrGrabResult->GrabSucceeded())
            {
#ifdef PYLON_WIN_BUILD
                // Display the image
                Pylon::DisplayImage( 1, ptrGrabResult );
#endif

                // The result data is automatically filled with received chunk data.
                // (Note:  This is not the case when using the low-level API)
                cout << "SizeX: " << ptrGrabResult->GetWidth() << endl;
                cout << "SizeY: " << ptrGrabResult->GetHeight() << endl;
                const uint8_t* pImageBuffer = (uint8_t*) ptrGrabResult->GetBuffer();
                cout << "Gray value of first pixel: " << (uint32_t) pImageBuffer[0] << endl;

                // Check to see if a buffer containing chunk data has been received.
                if (PayloadType_ChunkData != ptrGrabResult->GetPayloadType())
                {
                    throw RUNTIME_EXCEPTION( "Unexpected payload type received." );
                }

                // Since we have activated the CRC Checksum feature, we can check
                // the integrity of the buffer first.
                // Note: Enabling the CRC Checksum feature is not a prerequisite for using
                // chunks. Chunks can also be handled when the CRC Checksum feature is deactivated.
                if (ptrGrabResult->HasCRC() && ptrGrabResult->CheckCRC() == false)
                {
                    throw RUNTIME_EXCEPTION( "Image was damaged!" );
                }

                // Access the chunk data attached to the result.
                // Before accessing the chunk data, you should check to see
                // if the chunk is readable. When it is readable, the buffer
                // contains the requested chunk data.
                if (ptrGrabResult->ChunkTimestamp.IsReadable())
                {
                    cout << "TimeStamp (Result): " << ptrGrabResult->ChunkTimestamp.GetValue() << endl;
                }

                // USB camera devices provide generic counters. An explicit FrameCounter value is not provided by USB camera devices.
                if (ptrGrabResult->ChunkFramecounter.IsReadable())
                {
                    cout << "FrameCounter (Result): " << ptrGrabResult->ChunkFramecounter.GetValue() << endl;
                }

                cout << endl;
            }
            else
            {
                cout << "Error: " << std::hex << ptrGrabResult->GetErrorCode() << std::dec << " " << ptrGrabResult->GetErrorDescription() << endl;
            }
        }

        // Disable chunk mode.
        camera.ChunkModeActive.SetValue( false );
    }
    catch (const GenericException& e)
    {
        // Error handling.
        cerr << "An exception occurred." << endl
            << e.GetDescription() << endl;
        exitCode = 1;
    }

    // Comment the following two lines to disable waiting on exit.
    cerr << endl << "Press enter to exit." << endl;
    while (cin.get() != '\n');

    // Releases all pylon resources.
    PylonTerminate();

    return exitCode;
}

Grab_MultiCast#

This sample is part of the pylon sample solution that can be found under <⁠SDK ROOT>\Development\Samples\C++.

// Grab_MultiCast.cpp
/*
    Note: Before getting started, Basler recommends reading the "Programmer's Guide" topic
    in the pylon C++ API documentation delivered with pylon.
    If you are upgrading to a higher major version of pylon, Basler also
    strongly recommends reading the "Migrating from Previous Versions" topic in the pylon C++ API documentation.

    This sample demonstrates how to open a camera in multicast mode
    and how to receive a multicast stream.

    Two instances of this application must be started simultaneously on different computers.
    The first application started on computer A acts as the controlling application and has full access to the GigE camera.
    The second instance started on computer B opens the camera in monitor mode.
    This instance is not able to control the camera but can receive multicast streams.

    To get the sample running, start this application first on computer A in control mode.
    After computer A has begun to receive frames, start the second instance of this
    application on computer B in monitor mode.
*/

// Include files to use the pylon API.
#include <pylon/PylonIncludes.h>
#ifdef PYLON_WIN_BUILD
#    include <pylon/PylonGUI.h>
#endif

// Include file to use pylon universal instant camera parameters.
#include <pylon/BaslerUniversalInstantCamera.h>

// Include files used by samples.
#include "../include/ConfigurationEventPrinter.h"
#include "../include/ImageEventPrinter.h"

// Include file for _kbhit
#if defined(PYLON_WIN_BUILD)
#include <conio.h>
#elif defined(PYLON_UNIX_BUILD)
#    include <stdio.h>
#    include <termios.h>
#    include <unistd.h>
#    include <fcntl.h>
#endif

// Namespace for using pylon objects.
using namespace Pylon;

// Namespace for using cout.
using namespace std;

// Namespace for using pylon universal instant camera parameters.
using namespace Basler_UniversalCameraParams;
using namespace Basler_UniversalStreamParams;

// Number of images to be grabbed.
static const uint32_t c_countOfImagesToGrab = 100;


bool KeyPressed( void )
{
#if defined(PYLON_WIN_BUILD)
    return _kbhit() != 0;
#elif defined(PYLON_UNIX_BUILD)
    struct termios savedTermios;
    int savedFL;
    struct termios termios;
    int ch;

    tcgetattr( STDIN_FILENO, &savedTermios );
    savedFL = fcntl( STDIN_FILENO, F_GETFL, 0 );

    termios = savedTermios;
    termios.c_lflag &= ~(ICANON | ECHO);
    tcsetattr( STDIN_FILENO, TCSANOW, &termios );
    fcntl( STDIN_FILENO, F_SETFL, savedFL | O_NONBLOCK );

    ch = getchar();

    fcntl( STDIN_FILENO, F_SETFL, savedFL );
    tcsetattr( STDIN_FILENO, TCSANOW, &savedTermios );

    if (ch != EOF)
    {
        ungetc( ch, stdin );
    }

    return ch != EOF;
#endif
}

int main( int /*argc*/, char* /*argv*/[] )
{
    // The exit code of the sample application.
    int exitCode = 0;

    // Before using any pylon methods, the pylon runtime must be initialized.
    PylonInitialize();

    // Query the user for the mode to use.
    // Ask the user to launch the multicast controlling application or the multicast monitoring application.
    cout << "Start multicast sample in (c)ontrol or in (m)onitor mode? (c/m) "; cout.flush();

    char key;

    do
    {
        cin.get( key );
        // Remove newline from stdin.
        cin.get();
    }
    while ((key != 'c') && (key != 'm') && (key != 'C') && (key != 'M'));

    bool monitorMode = (key == 'm') || (key == 'M');

    try
    {
        // Only look for GigE cameras.
        CDeviceInfo info;
        info.SetDeviceClass( Pylon::BaslerGigEDeviceClass );

        // Create an instant camera object for the GigE camera found first.
        CBaslerUniversalInstantCamera camera( CTlFactory::GetInstance().CreateFirstDevice( info ) );

        // The default configuration must be removed when monitor mode is selected
        // because the monitoring application is not allowed to modify any parameter settings.
        if (monitorMode)
        {
            camera.RegisterConfiguration( (CConfigurationEventHandler*) NULL, RegistrationMode_ReplaceAll, Cleanup_None );
        }

        // For demonstration purposes only, registers an event handler configuration to print out information about camera use.
        // The event handler configuration is appended to the registered software trigger configuration handler by setting 
        // registration mode to RegistrationMode_Append.
        camera.RegisterConfiguration( new CConfigurationEventPrinter, RegistrationMode_Append, Cleanup_Delete ); // Camera use.
        camera.RegisterImageEventHandler( new CImageEventPrinter, RegistrationMode_Append, Cleanup_Delete );     // Image grabbing.

        // Print the model name of the camera.
        cout << "Using device " << camera.GetDeviceInfo().GetModelName() << endl;

        // Monitor mode selected.
        if (monitorMode)
        {
            // Set MonitorModeActive to true to act as monitor
            camera.MonitorModeActive = true;

            // Open the camera.
            camera.Open();

            // Select transmission type. If the camera is already controlled by another application
            // and configured for multicast, the active camera configuration can be used
            // (IP Address and Port will be set automatically).
            camera.GetStreamGrabberParams().TransmissionType = TransmissionType_UseCameraConfig;

            // Alternatively, the stream grabber could be explicitly set to "multicast"...
            // In this case, the IP Address and the IP port must also be set.
            //
            //camera.GetStreamGrabberParams().TransmissionType = TransmissionType_Multicast;
            //camera.GetStreamGrabberParams().DestinationAddr = "239.0.0.1";
            //camera.GetStreamGrabberParams().DestinationPort = 49152;

            if (camera.GetStreamGrabberParams().DestinationAddr.GetValue() != "0.0.0.0" &&
                 camera.GetStreamGrabberParams().DestinationPort.GetValue() != 0)
            {
                camera.StartGrabbing( c_countOfImagesToGrab );
            }
            else
            {
                cerr << endl << "Failed to open stream grabber (monitor mode): The acquisition is not yet started by the controlling application." << endl;
                cerr << endl << "Start the controlling application before starting the monitor application" << endl;
            }
        }
        // Controlling mode selected.
        else
        {
            // Open the camera.
            camera.Open();

            // Set transmission type to "multicast"...
            // In this case, the IP Address and the IP port must also be set.
            camera.GetStreamGrabberParams().TransmissionType = TransmissionType_Multicast;
            // camera.GetStreamGrabberParams().DestinationAddr = "239.0.0.1";    // These are default values.
            // camera.GetStreamGrabberParams().DestinationPort = 49152;

            // Maximize the image area of interest (Image AOI).
            camera.OffsetX.TrySetToMinimum();
            camera.OffsetY.TrySetToMinimum();
            camera.Width.SetToMaximum();
            camera.Height.SetToMaximum();

            // Set the pixel data format.
            camera.PixelFormat.SetValue( PixelFormat_Mono8 );

            camera.StartGrabbing();
        }

        // This smart pointer will receive the grab result data.
        CGrabResultPtr ptrGrabResult;

        // Camera.StopGrabbing() is called automatically by the RetrieveResult() method
        // when c_countOfImagesToGrab images have been retrieved in monitor mode
        // or when a key is pressed and the camera object is destroyed.
        while (!KeyPressed() && camera.IsGrabbing())
        {
            // Wait for an image and then retrieve it. A timeout of 5000 ms is used.
            camera.RetrieveResult( 5000, ptrGrabResult, TimeoutHandling_ThrowException );

#ifdef PYLON_WIN_BUILD
            // Display the image
            Pylon::DisplayImage( 1, ptrGrabResult );
#endif

            // The grab result could now be processed here.
        }
    }
    catch (const GenericException& e)
    {
        // Error handling
        cerr << "An exception occurred." << endl << e.GetDescription() << endl;
        exitCode = 1;
    }

    // Comment the following three lines to disable wait on exit.
    cerr << endl << "Press enter to exit." << endl;
    while (cin.get() != '\n');

    // Releases all pylon resources.
    PylonTerminate();

    return exitCode;
}

Grab_MultipleCameras#

This sample is part of the pylon sample solution that can be found under <⁠SDK ROOT>\Development\Samples\C++.

// Grab_MultipleCameras.cpp
/*
    Note: Before getting started, Basler recommends reading the "Programmer's Guide" topic
    in the pylon C++ API documentation delivered with pylon.
    If you are upgrading to a higher major version of pylon, Basler also
    strongly recommends reading the "Migrating from Previous Versions" topic in the pylon C++ API documentation.

    This sample illustrates how to grab and process images from multiple cameras
    using the CInstantCameraArray class. The CInstantCameraArray class represents
    an array of instant camera objects. It provides almost the same interface
    as the instant camera for grabbing.
    The main purpose of the CInstantCameraArray is to simplify waiting for images and
    camera events of multiple cameras in one thread. This is done by providing a single
    RetrieveResult method for all cameras in the array.
    Alternatively, the grabbing can be started using the internal grab loop threads
    of all cameras in the CInstantCameraArray. The grabbed images can then be processed by one or more
    image event handlers. Please note that this is not shown in this example.
*/

// Include files to use the pylon API.
#include <pylon/PylonIncludes.h>
#ifdef PYLON_WIN_BUILD
#    include <pylon/PylonGUI.h>
#endif

// Namespace for using pylon objects.
using namespace Pylon;

// Namespace for using cout.
using namespace std;

// Number of images to be grabbed.
static const uint32_t c_countOfImagesToGrab = 10;

// Limits the amount of cameras used for grabbing.
// It is important to manage the available bandwidth when grabbing with multiple cameras.
// This applies, for instance, if two GigE cameras are connected to the same network adapter via a switch.
// To manage the bandwidth, the GevSCPD interpacket delay parameter and the GevSCFTD transmission delay
// parameter can be set for each GigE camera device.
// The "Controlling Packet Transmission Timing with the Interpacket and Frame Transmission Delays on Basler GigE Vision Cameras"
// Application Notes (AW000649xx000)
// provide more information about this topic.
// The bandwidth used by a GigE camera device can be limited by adjusting the packet size.
static const size_t c_maxCamerasToUse = 2;

int main( int /*argc*/, char* /*argv*/[] )
{
    // The exit code of the sample application.
    int exitCode = 0;

    // Before using any pylon methods, the pylon runtime must be initialized.
    PylonInitialize();

    try
    {
        // Get the transport layer factory.
        CTlFactory& tlFactory = CTlFactory::GetInstance();

        // Get all attached devices and exit application if no device is found.
        DeviceInfoList_t devices;
        if (tlFactory.EnumerateDevices( devices ) == 0)
        {
            throw RUNTIME_EXCEPTION( "No camera present." );
        }

        // Create an array of instant cameras for the found devices and avoid exceeding a maximum number of devices.
        CInstantCameraArray cameras( min( devices.size(), c_maxCamerasToUse ) );

        // Create and attach all Pylon Devices.
        for (size_t i = 0; i < cameras.GetSize(); ++i)
        {
            cameras[i].Attach( tlFactory.CreateDevice( devices[i] ) );

            // Print the model name of the camera.
            cout << "Using device " << cameras[i].GetDeviceInfo().GetModelName() << endl;
        }

        // Starts grabbing for all cameras starting with index 0. The grabbing
        // is started for one camera after the other. That's why the images of all
        // cameras are not taken at the same time.
        // However, a hardware trigger setup can be used to cause all cameras to grab images synchronously.
        // According to their default configuration, the cameras are
        // set up for free-running continuous acquisition.
        cameras.StartGrabbing();

        // This smart pointer will receive the grab result data.
        CGrabResultPtr ptrGrabResult;

        // Grab c_countOfImagesToGrab from the cameras.
        for (uint32_t i = 0; i < c_countOfImagesToGrab && cameras.IsGrabbing(); ++i)
        {
            cameras.RetrieveResult( 5000, ptrGrabResult, TimeoutHandling_ThrowException );

            // Image grabbed successfully?
            if (ptrGrabResult->GrabSucceeded())
            {
                // When the cameras in the array are created the camera context value
                // is set to the index of the camera in the array.
                // The camera context is a user settable value.
                // This value is attached to each grab result and can be used
                // to determine the camera that produced the grab result.
                intptr_t cameraContextValue = ptrGrabResult->GetCameraContext();

#ifdef PYLON_WIN_BUILD
                // Show the image acquired by each camera in the window related to each camera.
                Pylon::DisplayImage( cameraContextValue, ptrGrabResult );
#endif

                // Print the index and the model name of the camera.
                cout << "Camera " << cameraContextValue << ": " << cameras[cameraContextValue].GetDeviceInfo().GetModelName() << endl;

                // Now, the image data can be processed.
                cout << "GrabSucceeded: " << ptrGrabResult->GrabSucceeded() << endl;
                cout << "SizeX: " << ptrGrabResult->GetWidth() << endl;
                cout << "SizeY: " << ptrGrabResult->GetHeight() << endl;
                const uint8_t* pImageBuffer = (uint8_t*) ptrGrabResult->GetBuffer();
                cout << "Gray value of first pixel: " << (uint32_t) pImageBuffer[0] << endl << endl;
            }
            else
            {
                cout << "Error: " << std::hex << ptrGrabResult->GetErrorCode() << std::dec << " " << ptrGrabResult->GetErrorDescription() << endl;
            }
        }
    }
    catch (const GenericException& e)
    {
        // Error handling
        cerr << "An exception occurred." << endl
            << e.GetDescription() << endl;
        exitCode = 1;
    }

    // Comment the following two lines to disable waiting on exit.
    cerr << endl << "Press enter to exit." << endl;
    while (cin.get() != '\n');

    // Releases all pylon resources.
    PylonTerminate();

    return exitCode;
}

Grab_Strategies#

This sample is part of the pylon sample solution that can be found under <⁠SDK ROOT>\Development\Samples\C++.

// Grab_Strategies.cpp
/*
    Note: Before getting started, Basler recommends reading the "Programmer's Guide" topic
    in the pylon C++ API documentation delivered with pylon.
    If you are upgrading to a higher major version of pylon, Basler also
    strongly recommends reading the "Migrating from Previous Versions" topic in the pylon C++ API documentation.

    This sample shows the use of the different grab strategies.

    There are different strategies to grab images with the Instant Camera grab engine:
    * One By One: This strategy is the default grab strategy. Acquisitioned images are processed in their arrival order.
    * Latest Image Only: Differs from the One By One strategy by a single image output queue. Therefore, only the latest
    image is kept in the output output queue, all other grabbed images are skipped. 
    * Latest Images: Extends the above strategies by adjusting the size of output queue. If the output queue has a size of
    1, it is equal to the Latest Image Only strategy. Consequently, setting the output queue size to 
    CInstantCamera::MaxNumBuffer is equal to One by One.
    * Upcoming Image Grab: Ensures that the image grabbed is the next image received from the camera. When retrieving an 
    image, a buffer is queued into the input queue and then the call waits for the upcoming image. Subsequently, image data 
    is grabbed into the buffer and returned. Note: This strategy can't be used together with USB camera devices. 

*/

// Include files to use the pylon API.
#include <pylon/PylonIncludes.h>

// Include files used by samples.
#include "../include/ConfigurationEventPrinter.h"
#include "../include/ImageEventPrinter.h"

// Namespace for using pylon objects.
using namespace Pylon;

// Namespace for using cout.
using namespace std;


int main( int /*argc*/, char* /*argv*/[] )
{
    // The exit code of the sample application.
    int exitCode = 0;

    // Before using any pylon methods, the pylon runtime must be initialized.
    PylonInitialize();

    try
    {
        // This smart pointer will receive the grab result data.
        CGrabResultPtr ptrGrabResult;

        // Create an instant camera object for the camera device found first.
        CInstantCamera camera( CTlFactory::GetInstance().CreateFirstDevice() );

        // Register the standard configuration event handler for enabling software triggering.
        // The software trigger configuration handler replaces the default configuration
        // as all currently registered configuration handlers are removed by setting the registration mode to RegistrationMode_ReplaceAll.
        camera.RegisterConfiguration( new CSoftwareTriggerConfiguration, RegistrationMode_ReplaceAll, Cleanup_Delete );

        // For demonstration purposes only, registers an event handler configuration to print out information about camera use.
        // The event handler configuration is appended to the registered software trigger configuration handler by setting 
        // registration mode to RegistrationMode_Append.
        camera.RegisterConfiguration( new CConfigurationEventPrinter, RegistrationMode_Append, Cleanup_Delete );
        camera.RegisterImageEventHandler( new CImageEventPrinter, RegistrationMode_Append, Cleanup_Delete );

        // Print the model name of the camera.
        cout << "Using device " << camera.GetDeviceInfo().GetModelName() << endl;

        // The MaxNumBuffer parameter can be used to control the count of buffers
        // allocated for grabbing. The default value of this parameter is 10.
        camera.MaxNumBuffer = 15;

        // Open the camera.
        camera.Open();


        // Can the camera device be queried whether it is ready to accept the next frame trigger?
        if (camera.CanWaitForFrameTriggerReady())
        {
            cout << "Grab using the GrabStrategy_OneByOne default strategy:" << endl << endl;

            // The GrabStrategy_OneByOne strategy is used. The images are processed
            // in the order of their arrival.
            camera.StartGrabbing( GrabStrategy_OneByOne );

            // In the background, the grab engine thread retrieves the
            // image data and queues the buffers into the internal output queue.

            // Issue software triggers. For each call, wait up to 1000 ms until the camera is ready for triggering the next image.
            for (int i = 0; i < 3; ++i)
            {
                if (camera.WaitForFrameTriggerReady( 1000, TimeoutHandling_ThrowException ))
                {
                    camera.ExecuteSoftwareTrigger();
                }
            }

            // For demonstration purposes, wait for the last image to appear in the output queue.
            WaitObject::Sleep( 3 * 1000 );

            // Check that grab results are waiting.
            if (camera.GetGrabResultWaitObject().Wait( 0 ))
            {
                cout << endl << "Grab results wait in the output queue." << endl << endl;
            }

            // All triggered images are still waiting in the output queue
            // and are now retrieved.
            // The grabbing continues in the background, e.g. when using hardware trigger mode,
            // as long as the grab engine does not run out of buffers.
            int nBuffersInQueue = 0;
            while (camera.RetrieveResult( 0, ptrGrabResult, TimeoutHandling_Return ))
            {
                nBuffersInQueue++;
            }
            cout << "Retrieved " << nBuffersInQueue << " grab results from output queue." << endl << endl;

            //Stop the grabbing.
            camera.StopGrabbing();



            cout << endl << "Grab using strategy GrabStrategy_LatestImageOnly:" << endl << endl;

            // The GrabStrategy_LatestImageOnly strategy is used. The images are processed
            // in the order of their arrival but only the last received image
            // is kept in the output queue.
            // This strategy can be useful when the acquired images are only displayed on the screen.
            // If the processor has been busy for a while and images could not be displayed automatically
            // the latest image is displayed when processing time is available again.
            camera.StartGrabbing( GrabStrategy_LatestImageOnly );

            // Execute the software trigger, wait actively until the camera accepts the next frame trigger or until the timeout occurs.
            for (int i = 0; i < 3; ++i)
            {
                if (camera.WaitForFrameTriggerReady( 1000, TimeoutHandling_ThrowException ))
                {
                    camera.ExecuteSoftwareTrigger();
                }
            }

            // Wait for all images.
            WaitObject::Sleep( 3 * 1000 );

            // Check whether the grab result is waiting.
            if (camera.GetGrabResultWaitObject().Wait( 0 ))
            {
                cout << endl << "A grab result waits in the output queue." << endl << endl;
            }

            // Only the last received image is waiting in the internal output queue
            // and is now retrieved.
            // The grabbing continues in the background, e.g. when using the hardware trigger mode.
            nBuffersInQueue = 0;
            while (camera.RetrieveResult( 0, ptrGrabResult, TimeoutHandling_Return ))
            {
                cout << "Skipped " << ptrGrabResult->GetNumberOfSkippedImages() << " images." << endl;
                nBuffersInQueue++;
            }

            cout << "Retrieved " << nBuffersInQueue << " grab result from output queue." << endl << endl;

            //Stop the grabbing.
            camera.StopGrabbing();



            cout << endl << "Grab using strategy GrabStrategy_LatestImages:" << endl << endl;

            // The GrabStrategy_LatestImages strategy is used. The images are processed
            // in the order of their arrival, but only a number of the images received last
            // are kept in the output queue.

            // The size of the output queue can be adjusted.
            // When using this strategy the OutputQueueSize parameter can be changed during grabbing.
            camera.OutputQueueSize = 2;

            camera.StartGrabbing( GrabStrategy_LatestImages );

            // Execute the software trigger, wait actively until the camera accepts the next frame trigger or until the timeout occurs.
            for (int i = 0; i < 3; ++i)
            {
                if (camera.WaitForFrameTriggerReady( 1000, TimeoutHandling_ThrowException ))
                {
                    camera.ExecuteSoftwareTrigger();
                }
            }

            // Wait for all images.
            WaitObject::Sleep( 3 * 1000 );

            // Check whether the grab results are waiting.
            if (camera.GetGrabResultWaitObject().Wait( 0 ))
            {
                cout << endl << "Grab results wait in the output queue." << endl << endl;
            }

            // Only the images received last are waiting in the internal output queue
            // and are now retrieved.
            // The grabbing continues in the background, e.g. when using the hardware trigger mode.
            nBuffersInQueue = 0;
            while (camera.RetrieveResult( 0, ptrGrabResult, TimeoutHandling_Return ))
            {
                if (ptrGrabResult->GetNumberOfSkippedImages())
                {
                    cout << "Skipped " << ptrGrabResult->GetNumberOfSkippedImages() << " image." << endl;
                }
                nBuffersInQueue++;
            }

            cout << "Retrieved " << nBuffersInQueue << " grab results from output queue." << endl << endl;

            // When setting the output queue size to 1 this strategy is equivalent to the GrabStrategy_LatestImageOnly grab strategy.
            camera.OutputQueueSize = 1;

            // When setting the output queue size to CInstantCamera::MaxNumBuffer this strategy is equivalent to GrabStrategy_OneByOne.
            camera.OutputQueueSize = camera.MaxNumBuffer;

            //Stop the grabbing.
            camera.StopGrabbing();



            // The Upcoming Image grab strategy can't be used together with USB camera devices.
            // For more information, see the advanced topics section of the pylon Programmer's Guide.
            if (!camera.IsUsb())
            {
                cout << endl << "Grab using the GrabStrategy_UpcomingImage strategy:" << endl << endl;

                // Reconfigure the camera to use continuous acquisition.
                CAcquireContinuousConfiguration().OnOpened( camera );

                // The GrabStrategy_UpcomingImage strategy is used. A buffer for grabbing
                // is queued each time when RetrieveResult()
                // is called. The image data is grabbed into the buffer and returned.
                // This ensures that the image grabbed is the next image
                // received from the camera.
                // All images are still transported to the computer.
                camera.StartGrabbing( GrabStrategy_UpcomingImage );

                // Queues a buffer for grabbing and waits for the grab to finish.
                camera.RetrieveResult( 5000, ptrGrabResult, TimeoutHandling_ThrowException );

                // Sleep.
                WaitObject::Sleep( 1000 );

                // Check no grab result is waiting, because no buffers are queued for grabbing.
                if (!camera.GetGrabResultWaitObject().Wait( 0 ))
                {
                    cout << "No grab result waits in the output queue." << endl << endl;
                }

                //Stop the grabbing.
                camera.StopGrabbing();
            }
        }
        else
        {
            // See the documentation of CInstantCamera::CanWaitForFrameTriggerReady() for more information.
            cout << endl;
            cout << "This sample can only be used with cameras that can be queried whether they are ready to accept the next frame trigger.";
            cout << endl;
            cout << endl;
        }
    }
    catch (const GenericException& e)
    {
        // Error handling.
        cerr << "An exception occurred." << endl
            << e.GetDescription() << endl;
        exitCode = 1;
    }

    // Comment the following two lines to disable waiting on exit.
    cerr << endl << "Press enter to exit." << endl;
    while (cin.get() != '\n');

    // Releases all pylon resources.
    PylonTerminate();

    return exitCode;
}

Grab_UsingActionCommand#

This sample is part of the pylon sample solution that can be found under <⁠SDK ROOT>\Development\Samples\C++.

// Grab_UsingActionCommand.cpp
/*
    Note: Before getting started, Basler recommends reading the "Programmer's Guide" topic
    in the pylon C++ API documentation delivered with pylon.
    If you are upgrading to a higher major version of pylon, Basler also
    strongly recommends reading the "Migrating from Previous Versions" topic in the pylon C++ API documentation.

    This sample shows how to issue a GigE Vision ACTION_CMD to multiple cameras.
    By using an action command multiple cameras can be triggered at the same time
    compared to software triggering, which must be triggered individually.

    To make the configuration of multiple cameras easier this sample uses the CInstantCameraArray class.
    It also uses a CActionTriggerConfiguration to set up the basic action command features.
*/

#include <time.h>   // for time
#include <stdlib.h> // for rand & srand

// Include files to use the pylon API.
#include <pylon/PylonIncludes.h>
#ifdef PYLON_WIN_BUILD
#   include <pylon/PylonGUI.h>
#endif

#include <pylon/BaslerUniversalInstantCameraArray.h>
#include <pylon/Info.h>
#include <pylon/gige/GigETransportLayer.h>
#include <pylon/gige/ActionTriggerConfiguration.h>
#include <pylon/gige/BaslerGigEDeviceInfo.h>


// Namespace for using pylon universal instant camera parameters.
using namespace Basler_UniversalCameraParams;

// Namespace for using pylon objects.
using namespace Pylon;

// Namespace for using cout.
using namespace std;

// Limits the amount of cameras used for grabbing.
// It is important to manage the available bandwidth when grabbing with multiple
// cameras. This applies, for instance, if two GigE cameras are connected to the
// same network adapter via a switch. To manage the bandwidth, the GevSCPD
// interpacket delay parameter and the GevSCFTD transmission delay parameter can
// be set for each GigE camera device. The "Controlling Packet Transmission Timing
// with the Interpacket and Frame Transmission Delays on Basler GigE Vision Cameras"
// Application Note (AW000649xx000) provides more information about this topic.
static const uint32_t c_maxCamerasToUse = 2;


int main( int /*argc*/, char* /*argv*/[] )
{
    int exitCode = 0;

    // Before using any pylon methods, the pylon runtime must be initialized.
    PylonInitialize();

    try
    {
        // Get the GigE transport layer.
        // We'll need it later to issue the action commands.
        CTlFactory& tlFactory = CTlFactory::GetInstance();
        IGigETransportLayer* pTL = dynamic_cast<IGigETransportLayer*>(tlFactory.CreateTl( BaslerGigEDeviceClass ));
        if (pTL == NULL)
        {
            throw RUNTIME_EXCEPTION( "No GigE transport layer available." );
        }


        // In this sample we use the transport layer directly to enumerate cameras.
        // By calling EnumerateDevices on the TL we get get only GigE cameras.
        // You could also accomplish this by using a filter and
        // let the Transport Layer Factory enumerate.
        DeviceInfoList_t allDeviceInfos;
        if (pTL->EnumerateDevices( allDeviceInfos ) == 0)
        {
            throw RUNTIME_EXCEPTION( "No GigE cameras present." );
        }

        // Only use cameras in the same subnet as the first one.
        DeviceInfoList_t usableDeviceInfos;
        usableDeviceInfos.push_back( allDeviceInfos[0] );
        const String_t subnet( allDeviceInfos[0].GetSubnetAddress() );

        // Start with index 1 as we have already added the first one above.
        // We will also limit the number of cameras to c_maxCamerasToUse.
        for (size_t i = 1; i < allDeviceInfos.size() && usableDeviceInfos.size() < c_maxCamerasToUse; ++i)
        {
            if (subnet == allDeviceInfos[i].GetSubnetAddress())
            {
                // Add this deviceInfo to the ones we will be using.
                usableDeviceInfos.push_back( allDeviceInfos[i] );
            }
            else
            {
                cerr << "Camera will not be used because it is in a different subnet "
                    << subnet << "!" << endl;
            }
        }

        // In this sample we'll use an CBaslerGigEInstantCameraArray to access multiple cameras.
        CBaslerUniversalInstantCameraArray cameras( usableDeviceInfos.size() );

        // Seed the random number generator and generate a random device key value.
        srand( (unsigned) time( NULL ) );
        const uint32_t DeviceKey = rand();

        // For this sample we configure all cameras to be in the same group.
        const uint32_t GroupKey = 0x112233;

        // For the following sample we use the CActionTriggerConfiguration to configure the camera.
        // It will set the DeviceKey, GroupKey and GroupMask features. It will also
        // configure the camera FrameTrigger and set the TriggerSource to the action command.
        // You can look at the implementation of CActionTriggerConfiguration in <pylon/gige/ActionTriggerConfiguration.h>
        // to see which features are set.

        // Create all GigE cameras and attach them to the InstantCameras in the array.
        for (size_t i = 0; i < cameras.GetSize(); ++i)
        {
            cameras[i].Attach( tlFactory.CreateDevice( usableDeviceInfos[i] ) );
            // We'll use the CActionTriggerConfiguration, which will set up the cameras to wait for an action command.
            cameras[i].RegisterConfiguration( new CActionTriggerConfiguration( DeviceKey, GroupKey, AllGroupMask ), RegistrationMode_Append, Cleanup_Delete );
            // Set the context. This will help us later to correlate the grab result to a camera in the array.
            cameras[i].SetCameraContext( i );

            const CBaslerGigEDeviceInfo& di = cameras[i].GetDeviceInfo();

            // Print the model name of the camera.
            cout << "Using camera " << i << ": " << di.GetModelName() << " (" << di.GetIpAddress() << ")" << endl;
        }

        // Open all cameras.
        // This will apply the CActionTriggerConfiguration specified above.
        cameras.Open();

        //////////////////////////////////////////////////////////////////////
        //////////////////////////////////////////////////////////////////////
        // Use an Action Command to Trigger Multiple Cameras at the Same Time.
        //////////////////////////////////////////////////////////////////////
        //////////////////////////////////////////////////////////////////////

        cout << endl << "Issuing an action command." << endl;

        // Starts grabbing for all cameras.
        // The cameras won't transmit any image data, because they are configured to wait for an action command.
        cameras.StartGrabbing();

        // Now we issue the action command to all devices in the subnet.
        // The devices with a matching DeviceKey, GroupKey and valid GroupMask will grab an image.
        pTL->IssueActionCommand( DeviceKey, GroupKey, AllGroupMask, subnet );

        // This smart pointer will receive the grab result data.
        CBaslerUniversalGrabResultPtr ptrGrabResult;

        // Retrieve images from all cameras.
        const int DefaultTimeout_ms = 5000;
        for (size_t i = 0; i < usableDeviceInfos.size() && cameras.IsGrabbing(); ++i)
        {
            // CInstantCameraArray::RetrieveResult will return grab results in the order they arrive.
            cameras.RetrieveResult( DefaultTimeout_ms, ptrGrabResult, TimeoutHandling_ThrowException );

            // When the cameras in the array are created the camera context value
            // is set to the index of the camera in the array.
            // The camera context is a user-settable value.
            // This value is attached to each grab result and can be used
            // to determine the camera that produced the grab result.
            intptr_t cameraIndex = ptrGrabResult->GetCameraContext();


            // Image grabbed successfully?
            if (ptrGrabResult->GrabSucceeded())
            {
#ifdef PYLON_WIN_BUILD
                // Show the image acquired by each camera in the window related to the camera.
                // DisplayImage supports up to 32 image windows.
                if (cameraIndex <= 31)
                    Pylon::DisplayImage( cameraIndex, ptrGrabResult );
#endif
                // Print the index and the model name of the camera.
                cout << "Camera " << cameraIndex << ": " << cameras[cameraIndex].GetDeviceInfo().GetModelName() <<
                    " (" << cameras[cameraIndex].GetDeviceInfo().GetIpAddress() << ")" << endl;

                // You could process the image here by accessing the image buffer.
                cout << "GrabSucceeded: " << ptrGrabResult->GrabSucceeded() << endl;
                const uint8_t* pImageBuffer = (uint8_t*) ptrGrabResult->GetBuffer();
                cout << "Gray value of first pixel: " << (uint32_t) pImageBuffer[0] << endl << endl;
            }
            else
            {
                // If a buffer has been incompletely grabbed, the network bandwidth is possibly insufficient for transferring
                // multiple images simultaneously. See note above c_maxCamerasToUse.
                cout << "Error: " << std::hex << ptrGrabResult->GetErrorCode() << std::dec << " " << ptrGrabResult->GetErrorDescription() << endl;
            }
        }

        // In case you want to trigger again you should wait for the camera
        // to become trigger-ready before issuing the next action command.
        // To avoid overtriggering you should call cameras[0].WaitForFrameTriggerReady
        // (see Grab_UsingGrabLoopThread sample for details).

        cameras.StopGrabbing();

        // Close all cameras.
        cameras.Close();
    }
    catch (const GenericException& e)
    {
        // Error handling
        cerr << "An exception occurred." << endl
            << e.GetDescription() << endl;
        exitCode = 1;
    }

    // Comment the following two lines to disable waiting on exit.
    cerr << endl << "Press enter to exit." << endl;
    while (cin.get() != '\n');

    // Releases all pylon resources.
    PylonTerminate();

    return exitCode;
}

Grab_UsingBufferFactory#

This sample is part of the pylon sample solution that can be found under <⁠SDK ROOT>\Development\Samples\C++.

// Grab_UsingBufferFactory.cpp
/*
    Note: Before getting started, Basler recommends reading the "Programmer's Guide" topic
    in the pylon C++ API documentation delivered with pylon.
    If you are upgrading to a higher major version of pylon, Basler also
    strongly recommends reading the "Migrating from Previous Versions" topic in the pylon C++ API documentation.

    This sample demonstrates how to use a user-provided buffer factory.
    Using a buffer factory is optional and intended for advanced use cases only.
    A buffer factory is only necessary if you want to grab into externally supplied buffers.
*/

// Include files to use the pylon API.
#include <pylon/PylonIncludes.h>
#ifdef PYLON_WIN_BUILD
#include <pylon/PylonGUI.h>
#endif

// Namespace for using pylon objects.
using namespace Pylon;

// Namespace for using cout.
using namespace std;

// Number of images to be grabbed.
static const uint32_t c_countOfImagesToGrab = 5;


// A user-provided buffer factory.
class MyBufferFactory : public IBufferFactory
{
public:
    MyBufferFactory()
        : m_lastBufferContext( 1000 )
    {
    }

    virtual ~MyBufferFactory()
    {
    }


    // Will be called when the Instant Camera object needs to allocate a buffer.
    // Return the buffer and context data in the output parameters.
    // In case of an error, new() will throw an exception
    // which will be forwarded to the caller to indicate an error.
    // Warning: This method can be called by different threads.
    virtual void AllocateBuffer( size_t bufferSize, void** pCreatedBuffer, intptr_t& bufferContext )
    {
        try
        {
            // Allocate buffer for pixel data.
            // If you already have a buffer allocated by your image processing library, you can use this instead.
            // In this case, you must modify the delete code (see below) accordingly.
            *pCreatedBuffer = new uint8_t[bufferSize];
            // The context information is never changed by the Instant Camera and can be used
            // by the buffer factory to manage the buffers.
            // The context information can be retrieved from a grab result by calling
            // ptrGrabResult->GetBufferContext();
            bufferContext = ++m_lastBufferContext;

            cout << "Created buffer " << bufferContext << ", " << *pCreatedBuffer << endl;
        }
        catch (const std::exception&)
        {
            // In case of an error you must free the memory you may have already allocated.
            if (*pCreatedBuffer != NULL)
            {
                uint8_t* p = reinterpret_cast<uint8_t*>(pCreatedBuffer);
                delete[] p;
                *pCreatedBuffer = NULL;
            }

            // Rethrow exception.
            // AllocateBuffer can also just return with *pCreatedBuffer = NULL to indicate
            // that no buffer is available at the moment.
            throw;
        }
    }


    // Frees a previously allocated buffer.
    // Warning: This method can be called by different threads.
    virtual void FreeBuffer( void* pCreatedBuffer, intptr_t bufferContext )
    {
        uint8_t* p = reinterpret_cast<uint8_t*>(pCreatedBuffer);
        delete[] p;
        cout << "Freed buffer " << bufferContext << ", " << pCreatedBuffer << endl;
    }


    // Destroys the buffer factory.
    // This will be used when you pass the ownership of the buffer factory instance to pylon
    // by defining Cleanup_Delete. pylon will call this function to destroy the instance
    // of the buffer factory. If you don't pass the ownership to pylon (Cleanup_None),
    // this method will be ignored.
    virtual void DestroyBufferFactory()
    {
        delete this;
    }


protected:

    unsigned long m_lastBufferContext;
};


int main( int /*argc*/, char* /*argv*/[] )
{
    // The exit code of the sample application.
    int exitCode = 0;

    // Before using any pylon methods, the pylon runtime must be initialized.
    PylonInitialize();

    try
    {
        // The buffer factory must be created first because objects on the
        // stack are destroyed in reverse order of creation.
        // The buffer factory must exist longer than the Instant Camera object
        // in this sample.
        MyBufferFactory myFactory;

        // Create an instant camera object with the camera device found first.
        CInstantCamera camera( CTlFactory::GetInstance().CreateFirstDevice() );

        // Print the model name of the camera.
        cout << "Using device " << camera.GetDeviceInfo().GetModelName() << endl;

        // Use our own implementation of a buffer factory.
        // Since we control the lifetime of the factory object, we pass Cleanup_None.
        camera.SetBufferFactory( &myFactory, Cleanup_None );

        // The parameter MaxNumBuffer can be used to control the count of buffers
        // allocated for grabbing. The default value of this parameter is 10.
        camera.MaxNumBuffer = 5;

        // If the 'BufferHandlingMode_Stream' is used, make sure to set
        // camera.MaxNumQueuedBuffer to a value smaller than or equal to the value
        // of camera.MaxNumBuffer.
        // Note: The USB3 Vision and GenTL transport layers do not support the
        // 'BufferHandlingMode_Stream' mode.

        // Start the grabbing of c_countOfImagesToGrab images.
        // The camera device is parameterized with a default configuration which
        // sets up free-running continuous acquisition.
        camera.StartGrabbing( c_countOfImagesToGrab );

        // This smart pointer will receive the grab result data.
        CGrabResultPtr ptrGrabResult;

        // Camera.StopGrabbing() is called automatically by the RetrieveResult() method
        // when c_countOfImagesToGrab images have been retrieved.
        while (camera.IsGrabbing())
        {
            // Wait for an image and then retrieve it. A timeout of 5000 ms is used.
            camera.RetrieveResult( 5000, ptrGrabResult, TimeoutHandling_ThrowException );

            // Image grabbed successfully?
            if (ptrGrabResult->GrabSucceeded())
            {
                // Access the image data.
                cout << "Context: " << ptrGrabResult->GetBufferContext() << endl;
                cout << "SizeX: " << ptrGrabResult->GetWidth() << endl;
                cout << "SizeY: " << ptrGrabResult->GetHeight() << endl;
                const uint8_t* pImageBuffer = (uint8_t*) ptrGrabResult->GetBuffer();
                cout << "First value of pixel data: " << (uint32_t) pImageBuffer[0] << endl << endl;

#ifdef PYLON_WIN_BUILD
                // Display the grabbed image.
                Pylon::DisplayImage( 1, ptrGrabResult );
#endif
            }
            else
            {
                cout << "Error: " << std::hex << ptrGrabResult->GetErrorCode() << std::dec << " " << ptrGrabResult->GetErrorDescription();
            }
        }
    }
    catch (const GenericException& e)
    {
        // Error handling.
        cerr << "An exception occurred." << endl
            << e.GetDescription() << endl;
        exitCode = 1;
    }

    // Comment the following two lines to disable waiting on exit.
    cerr << endl << "Press enter to exit." << endl;
    while (cin.get() != '\n');

    // Releases all pylon resources.
    PylonTerminate();

    return exitCode;
}

Grab_UsingExposureEndEvent#

This sample is part of the pylon sample solution that can be found under <⁠SDK ROOT>\Development\Samples\C++.

// Grab_UsingExposureEndEvent.cpp
/*
    Note: Before getting started, Basler recommends reading the "Programmer's Guide" topic
    in the pylon C++ API documentation delivered with pylon.
    If you are upgrading to a higher major version of pylon, Basler also
    strongly recommends reading the "Migrating from Previous Versions" topic in the pylon C++ API documentation.

    This sample shows how to use the Exposure End event to speed up image acquisition.
    For example, when a sensor exposure is finished, the camera can send an Exposure End event to the computer.
    The computer can receive the event before the image data of the finished exposure has been transferred completely.
    This avoids unnecessary delays, e.g., when an image object moves before the related image data transfer is complete.

    Note: This sample shows how to match incoming images using the camera.EventExposureEndFrameID
          and the ptrGrabResult->GetBlockID() values. For ace 2 camera models,
          camera.EventExposureEndFrameID and ptrGrabResult->GetBlockID() don't contain matching values.
          The ptrGrabResult->GetBlockID() equivalent is the chunk value represented by the camera.ChunkSelector FrameID.
          Please see the Grab_ChunkImage sample for more information about how to determine the
          correct chunk value to use instead of ptrGrabResult->GetBlockID().
*/

// Include files to use the pylon API.
#include <pylon/PylonIncludes.h>

// Include files used by samples.
#include "../include/ConfigurationEventPrinter.h"

#include <iomanip>

#ifdef PYLON_UNIX_BUILD
#    include <sys/time.h>
#endif

// Include file to use pylon universal instant camera parameters.
#include <pylon/BaslerUniversalInstantCamera.h>

// Namespace for using pylon objects.
using namespace Pylon;

// Namespace for using pylon universal instant camera parameters.
using namespace Basler_UniversalCameraParams;

// Namespace for using cout.
using namespace std;

// Enumeration used for distinguishing different events.
enum MyEvents
{
    eMyExposureEndEvent,      // Triggered by a camera event.
    eMyImageReceivedEvent,    // Triggered by the receipt of an image.
    eMyMoveEvent,             // Triggered when the imaged item or the sensor head can be moved.
    eMyNoEvent                // Used as default setting.
};

// Names of possible events for printed output.
const char* MyEventNames[] =
{
    "ExposureEndEvent     ",
    "ImageReceived        ",
    "Move                 ",
    "NoEvent              "
};

// Used for logging received events without outputting the information on the screen
// because outputting will change the timing.
// This class is used for demonstration purposes only.
struct LogItem
{
    LogItem()
        : eventType( eMyNoEvent )
        , frameNumber( 0 )
    {
    }

    LogItem( MyEvents event, uint16_t frameNr )
        : eventType( event )
        , frameNumber( frameNr )
    {
        //Warning. The values measured may not be correct on older computer hardware.
#if defined(PYLON_WIN_BUILD)
        QueryPerformanceCounter( &time );
#elif defined(PYLON_UNIX_BUILD)
        struct timeval tv;

        gettimeofday( &tv, NULL );
        time = static_cast<unsigned long long>(tv.tv_sec) * 1000L + static_cast<unsigned long long>(tv.tv_usec) / 1000LL;
#endif
    }


#if defined(PYLON_WIN_BUILD)
    LARGE_INTEGER time; // Timestamps recorded.
#elif defined(PYLON_UNIX_BUILD)
    unsigned long long time; // Timestamps recorded.
#endif
    MyEvents eventType; // Type of the event received.
    uint16_t frameNumber; // Frame number of the event received.
};


// Helper function for printing a log.
// This function is used for demonstration purposes only.
void PrintLog( const std::vector<LogItem>& aLog )
{
#if defined(PYLON_WIN_BUILD)
    // Get the computer timer frequency.
    LARGE_INTEGER timerFrequency;
    QueryPerformanceFrequency( &timerFrequency );
#endif

    cout << std::endl << "Warning. The time values printed may not be correct on older computer hardware." << std::endl << std::endl;
    // Print the event information header.
    cout << "Time [ms]    " << "Event                 " << "Frame Number" << std::endl;
    cout << "------------ " << "--------------------- " << "-----------" << std::endl;

    // Print the logged information.
    size_t logSize = aLog.size();
    for (size_t i = 0; i < logSize; ++i)
    {
        // Calculate the time elapsed between the events.
        double time_ms = 0;
        if (i)
        {
#if defined(PYLON_WIN_BUILD)
            __int64 oldTicks = ((__int64) aLog[i - 1].time.HighPart << 32) + (__int64) aLog[i - 1].time.LowPart;
            __int64 newTicks = ((__int64) aLog[i].time.HighPart << 32) + (__int64) aLog[i].time.LowPart;
            long double timeDifference = (long double) (newTicks - oldTicks);
            long double ticksPerSecond = (long double) (((__int64) timerFrequency.HighPart << 32) + (__int64) timerFrequency.LowPart);
            time_ms = (timeDifference / ticksPerSecond) * 1000;
#elif defined(PYLON_UNIX_BUILD)
            time_ms = aLog[i].time - aLog[i - 1].time;
#endif
        }

        // Print the event information.
        cout << setw( 12 ) << fixed << setprecision( 4 ) << time_ms << " " << MyEventNames[aLog[i].eventType] << " " << aLog[i].frameNumber << std::endl;
    }
}


// Number of images to be grabbed.
static const uint32_t c_countOfImagesToGrab = 20;


// Example handler for GigE camera events.
// Additional handling is required for GigE camera events because the event network packets may get lost, duplicated, or delayed in the network.
class CEventHandler : public CBaslerUniversalCameraEventHandler, public CBaslerUniversalImageEventHandler
{
public:
    CEventHandler()
        : m_nextExpectedFrameNumberImage( 0 )
        , m_nextExpectedFrameNumberExposureEnd( 0 )
        , m_nextFrameNumberForMove( 0 )
        , m_isGigE( false )
    {
        // Reserve space to log camera, image, and move events.
        m_log.reserve( c_countOfImagesToGrab * 3 );
    }

    void Initialize( int value, bool isGigE )
    {
        m_nextExpectedFrameNumberImage = value;
        m_nextExpectedFrameNumberExposureEnd = value;
        m_nextFrameNumberForMove = value;
        m_isGigE = isGigE;
    }

    // This method is called when a camera event has been received.
    virtual void OnCameraEvent( CBaslerUniversalInstantCamera& camera, intptr_t userProvidedId, GenApi::INode* /* pNode */ )
    {
        if (userProvidedId == eMyExposureEndEvent)
        {
            // An Exposure End event has been received.
            uint16_t frameNumber;
            if (camera.GetSfncVersion() < Sfnc_2_0_0)
            {
                frameNumber = (uint16_t) camera.ExposureEndEventFrameID.GetValue();
            }
            else
            {
                frameNumber = (uint16_t) camera.EventExposureEndFrameID.GetValue();
            }
            m_log.push_back( LogItem( eMyExposureEndEvent, frameNumber ) );

            if (GetIncrementedFrameNumber( frameNumber ) != m_nextExpectedFrameNumberExposureEnd)
            {
                // Check whether the imaged item or the sensor head can be moved.
                if (frameNumber == m_nextFrameNumberForMove)
                {
                    MoveImagedItemOrSensorHead();
                }

                // Check for missing Exposure End events.
                if (frameNumber != m_nextExpectedFrameNumberExposureEnd)
                {
                    throw RUNTIME_EXCEPTION( "An Exposure End event has been lost. Expected frame number is %d but got frame number %d.", m_nextExpectedFrameNumberExposureEnd, frameNumber );
                }
                IncrementFrameNumber( m_nextExpectedFrameNumberExposureEnd );
            }
        }
        else
        {
            PYLON_ASSERT2( false, "The sample has been modified and a new event has been registered. Add handler code above." );
        }
    }

    // This method is called when an image has been grabbed.
    virtual void OnImageGrabbed( CBaslerUniversalInstantCamera& /*camera*/, const CBaslerUniversalGrabResultPtr& ptrGrabResult )
    {
        // An image has been received.
        uint16_t frameNumber = (uint16_t) ptrGrabResult->GetBlockID();
        m_log.push_back( LogItem( eMyImageReceivedEvent, frameNumber ) );

        // Check whether the imaged item or the sensor head can be moved.
        // This will be the case if the Exposure End has been lost or if the Exposure End is received later than the image.
        if (frameNumber == m_nextFrameNumberForMove)
        {
            MoveImagedItemOrSensorHead();
        }

        // Check for missing images.
        if (frameNumber != m_nextExpectedFrameNumberImage)
        {
            throw RUNTIME_EXCEPTION( "An image has been lost. Expected frame number is %d but got frame number %d.", m_nextExpectedFrameNumberImage, frameNumber );
        }
        IncrementFrameNumber( m_nextExpectedFrameNumberImage );
    }

    void MoveImagedItemOrSensorHead()
    {
        // The imaged item or the sensor head can be moved now...
        // The camera may not be ready yet for a trigger at this point because the sensor is still being read out.
        // See the documentation of the CInstantCamera::WaitForFrameTriggerReady() method for more information.
        m_log.push_back( LogItem( eMyMoveEvent, m_nextFrameNumberForMove ) );
        IncrementFrameNumber( m_nextFrameNumberForMove );
    }

    void PrintLog()
    {
        ::PrintLog( m_log );
    }

private:
    void IncrementFrameNumber( uint16_t& frameNumber )
    {
        frameNumber = GetIncrementedFrameNumber( frameNumber );
    }

    uint16_t GetIncrementedFrameNumber( uint16_t frameNumber )
    {
        ++frameNumber;

        if (m_isGigE)
        {
            if (frameNumber == 0)
            {
                // Zero is not a valid frame number.
                ++frameNumber;
            }
        }


        return frameNumber;
    }

    uint16_t m_nextExpectedFrameNumberImage;
    uint16_t m_nextExpectedFrameNumberExposureEnd;
    uint16_t m_nextFrameNumberForMove;

    bool m_isGigE;

    std::vector<LogItem> m_log;
};



int main( int /*argc*/, char* /*argv*/[] )
{
    // Exit code of the sample application.
    int exitCode = 0;

    // Before using any pylon methods, the pylon runtime must be initialized.
    PylonInitialize();

    try
    {
        // Create the event handler.
        CEventHandler eventHandler;

        // Create an instant camera object with the first camera device found.
        CBaslerUniversalInstantCamera camera( CTlFactory::GetInstance().CreateFirstDevice() );

        // Camera models behave differently regarding IDs and counters. Set initial values.
        if (camera.IsGigE())
        {
            eventHandler.Initialize( 1, true );
        }
        else
        {
            eventHandler.Initialize( 0, false );
        }


        // For demonstration purposes only, add sample configuration event handlers to print information
        // about camera use and image grabbing.
        camera.RegisterConfiguration( new CConfigurationEventPrinter, RegistrationMode_Append, Cleanup_Delete ); // Camera use.

        // Register the event handler.
        camera.RegisterImageEventHandler( &eventHandler, RegistrationMode_Append, Cleanup_None );

        // Camera event processing must be enabled first. The default is off.
        camera.GrabCameraEvents = true;

        // Open the camera to configure parameters.
        camera.Open();

        // Check whether the device supports events.
        if (!camera.EventSelector.IsWritable())
        {
            throw RUNTIME_EXCEPTION( "The device doesn't support events." );
        }

        if (camera.GetSfncVersion() < Sfnc_2_0_0)
        {
            camera.RegisterCameraEventHandler( &eventHandler, "ExposureEndEventData", eMyExposureEndEvent, RegistrationMode_ReplaceAll, Cleanup_None );
        }
        else
        {
            camera.RegisterCameraEventHandler( &eventHandler, "EventExposureEndData", eMyExposureEndEvent, RegistrationMode_ReplaceAll, Cleanup_None );
        }


        // Enable the sending of Exposure End events.
        // Select the event to be received.
        if (camera.EventSelector.TrySetValue( EventSelector_ExposureEnd ))
        {   // Enable it.
            if (!camera.EventNotification.TrySetValue( EventNotification_On ))
            {
                // scout-f, scout-g, and aviator GigE cameras use a different value.
                camera.EventNotification.SetValue( EventNotification_GenICamEvent );
            }
        }


        // Start grabbing of c_countOfImagesToGrab images.
        // The camera device is operated in a default configuration that
        // sets up free-running continuous acquisition.
        camera.StartGrabbing( c_countOfImagesToGrab );

        // This smart pointer will receive the grab result data.
        CGrabResultPtr ptrGrabResult;

        // Camera.StopGrabbing() is called automatically by the RetrieveResult() method
        // when c_countOfImagesToGrab images have been retrieved.
        while (camera.IsGrabbing())
        {
            // Retrieve grab results and notify the camera event and image event handlers.
            camera.RetrieveResult( 5000, ptrGrabResult, TimeoutHandling_ThrowException );
            // Nothing to do here with the grab result The grab results are handled by the registered event handlers.
        }

        // Disable the sending of Exposure End events.
        if (camera.EventSelector.TrySetValue( EventSelector_ExposureEnd ))
        {
            camera.EventNotification.SetValue( EventNotification_Off );
        }

        // Disable the sending of Frame Start Overtrigger events.
        if (camera.EventSelector.TrySetValue( EventSelector_FrameStartOvertrigger ))
        {
            camera.EventNotification.SetValue( EventNotification_Off );
        }

        if (camera.EventSelector.TrySetValue( EventSelector_EventOverrun ))
        {

            // Disable sending Event Overrun events.
            camera.EventSelector.SetValue( EventSelector_EventOverrun );
            camera.EventNotification.SetValue( EventNotification_Off );
        }

        // Print the recorded log showing the timing of events and images.
        eventHandler.PrintLog();
    }
    catch (const GenericException& e)
    {
        // Error handling.
        cerr << "An exception occurred." << endl
            << e.GetDescription() << endl;
        exitCode = 1;
    }

    // Comment the following two lines to disable waiting on exit.
    cerr << endl << "Press enter to exit." << endl;
    while (cin.get() != '\n');

    // Releases all pylon resources.
    PylonTerminate();

    return exitCode;
}

Grab_UsingGrabLoopThread#

This sample is part of the pylon sample solution that can be found under <⁠SDK ROOT>\Development\Samples\C++.

// Grab_UsingGrabLoopThread.cpp
/*
    Note: Before getting started, Basler recommends reading the "Programmer's Guide" topic
    in the pylon C++ API documentation delivered with pylon.
    If you are upgrading to a higher major version of pylon, Basler also
    strongly recommends reading the "Migrating from Previous Versions" topic in the pylon C++ API documentation.

    This sample illustrates how to grab and process images using the grab loop thread
    provided by the Instant Camera class.
*/

// Include files to use the pylon API.
#include <pylon/PylonIncludes.h>
#ifdef PYLON_WIN_BUILD
#    include <pylon/PylonGUI.h>
#endif

// Include files used by samples.
#include "../include/ConfigurationEventPrinter.h"
#include "../include/ImageEventPrinter.h"

// Namespace for using pylon objects.
using namespace Pylon;

// Namespace for using cout.
using namespace std;

//Example of an image event handler.
class CSampleImageEventHandler : public CImageEventHandler
{
public:
    virtual void OnImageGrabbed( CInstantCamera& /*camera*/, const CGrabResultPtr& ptrGrabResult )
    {
        cout << "CSampleImageEventHandler::OnImageGrabbed called." << std::endl;

#ifdef PYLON_WIN_BUILD
        // Display the image
        Pylon::DisplayImage( 1, ptrGrabResult );
#endif
    }
};

int main( int /*argc*/, char* /*argv*/[] )
{
    // The exit code of the sample application.
    int exitCode = 0;

    // Before using any pylon methods, the pylon runtime must be initialized.
    PylonInitialize();

    try
    {
        // Create an instant camera object for the camera device found first.
        CInstantCamera camera( CTlFactory::GetInstance().CreateFirstDevice() );

        // Register the standard configuration event handler for enabling software triggering.
        // The software trigger configuration handler replaces the default configuration
        // as all currently registered configuration handlers are removed by setting the registration mode to RegistrationMode_ReplaceAll.
        camera.RegisterConfiguration( new CSoftwareTriggerConfiguration, RegistrationMode_ReplaceAll, Cleanup_Delete );

        // For demonstration purposes only, registers an event handler configuration to print out information about camera use.
        // The event handler configuration is appended to the registered software trigger configuration handler by setting 
        // registration mode to RegistrationMode_Append.
        camera.RegisterConfiguration( new CConfigurationEventPrinter, RegistrationMode_Append, Cleanup_Delete );

        // The image event printer serves as sample image processing.
        // When using the grab loop thread provided by the Instant Camera object, an image event handler processing the grab
        // results must be created and registered.
        camera.RegisterImageEventHandler( new CImageEventPrinter, RegistrationMode_Append, Cleanup_Delete );

        // For demonstration purposes only, register another image event handler.
        camera.RegisterImageEventHandler( new CSampleImageEventHandler, RegistrationMode_Append, Cleanup_Delete );

        // Open the camera device.
        camera.Open();

        // Can the camera device be queried whether it is ready to accept the next frame trigger?
        if (camera.CanWaitForFrameTriggerReady())
        {
            // Start the grabbing using the grab loop thread, by setting the grabLoopType parameter
            // to GrabLoop_ProvidedByInstantCamera. The grab results are delivered to the image event handlers.
            // The GrabStrategy_OneByOne default grab strategy is used.
            camera.StartGrabbing( GrabStrategy_OneByOne, GrabLoop_ProvidedByInstantCamera );

            // Wait for user input to trigger the camera or exit the program.
            // The grabbing is stopped, the device is closed and destroyed automatically when the camera object goes out of scope.

            bool runLoop = true;
            while (runLoop)
            {
                cout << endl << "Enter \"t\" to trigger the camera or \"e\" to exit and press enter? (t/e) "; cout.flush();

                string userInput;
                getline(cin, userInput);

                for (size_t i = 0; i < userInput.size(); ++i)
                {
                    char key = userInput[i];
                    if ((key == 't' || key == 'T'))
                    {
                        // Execute the software trigger. Wait up to 1000 ms for the camera to be ready for trigger.
                        if (camera.WaitForFrameTriggerReady( 1000, TimeoutHandling_ThrowException ))
                        {
                            camera.ExecuteSoftwareTrigger();
                        }
                    }
                    else if ((key == 'e') || (key == 'E'))
                    {
                        runLoop = false;
                        break;
                    }
                }

                // Wait some time to allow the OnImageGrabbed handler print its output,
                // so the printed text on the console is in the expected order.
                WaitObject::Sleep( 250 );
            }
        }
        else
        {
            // See the documentation of CInstantCamera::CanWaitForFrameTriggerReady() for more information.
            cout << endl << "This sample can only be used with cameras that can be queried whether they are ready to accept the next frame trigger." << endl;
        }
    }
    catch (const GenericException& e)
    {
        // Error handling.
        cerr << "An exception occurred." << endl << e.GetDescription() << endl;
        exitCode = 1;
    }

    // Comment the following two lines to disable waiting on exit.
    cerr << endl << "Press enter to exit." << endl;
    while (cin.get() != '\n');

    // Releases all pylon resources.
    PylonTerminate();

    return exitCode;
}

Grab_UsingSequencer#

This sample is part of the pylon sample solution that can be found under <⁠SDK ROOT>\Development\Samples\C++.

// Grab_UsingSequencer.cpp
/*
    Note: Before getting started, Basler recommends reading the "Programmer's Guide" topic
    in the pylon C++ API documentation delivered with pylon.
    If you are upgrading to a higher major version of pylon, Basler also
    strongly recommends reading the "Migrating from Previous Versions" topic in the pylon C++ API documentation.

    This sample shows how to grab images using the sequencer feature of a camera.
    Three sequence sets are used for image acquisition. Each sequence set
    uses a different image height.
*/

// Include files to use the pylon API
#include <pylon/PylonIncludes.h>
#ifdef PYLON_WIN_BUILD
#    include <pylon/PylonGUI.h>
#endif

// Include file to use pylon universal instant camera parameters.
#include <pylon/BaslerUniversalInstantCamera.h>

using namespace Pylon;

// Namespace for using pylon universal instant camera parameters.
using namespace Basler_UniversalCameraParams;

// Namespace for using cout
using namespace std;

// Number of images to be grabbed.
static const uint32_t c_countOfImagesToGrab = 10;


int main( int /*argc*/, char* /*argv*/[] )
{
    // The exit code of the sample application.
    int exitCode = 0;

    // Before using any pylon methods, the pylon runtime must be initialized.
    PylonInitialize();

    try
    {
        // Create an instant camera object with the first found camera device.
        CBaslerUniversalInstantCamera camera( CTlFactory::GetInstance().CreateFirstDevice() );

        // Print the model name of the camera.
        cout << "Using device " << camera.GetDeviceInfo().GetModelName() << endl;

        // Register the standard configuration event handler for enabling software triggering.
        // The software trigger configuration handler replaces the default configuration
        // as all currently registered configuration handlers are removed by setting the registration mode to RegistrationMode_ReplaceAll.
        camera.RegisterConfiguration( new CSoftwareTriggerConfiguration, RegistrationMode_ReplaceAll, Cleanup_Delete );

        // Open the camera.
        camera.Open();

        if (camera.SequencerMode.IsWritable() || camera.SequenceEnable.IsWritable())
        {
            if (camera.GetSfncVersion() >= Sfnc_2_0_0) // Cameras based on SFNC 2.0 or later, e.g., USB cameras
            {
                // Disable the sequencer before changing parameters.
                // The parameters under control of the sequencer are locked
                // when the sequencer is enabled. For a list of parameters
                // controlled by the sequencer, see the Basler Product Documentation or the camera user's manual.
                camera.SequencerMode.SetValue( SequencerMode_Off );
                camera.SequencerConfigurationMode.SetValue( SequencerConfigurationMode_Off );

                // Maximize the grabbed image area of interest (Image AOI).
                camera.OffsetX.TrySetToMinimum();
                camera.OffsetY.TrySetToMinimum();
                camera.Width.SetToMaximum();
                camera.Height.SetToMaximum();

                // Set the pixel data format.
                // This parameter may be locked when the sequencer is enabled.
                camera.PixelFormat.SetValue( PixelFormat_Mono8 );

                // Set up sequence sets.

                // Set up sequence sets and turn sequencer configuration mode on.
                camera.SequencerConfigurationMode.SetValue( SequencerConfigurationMode_On );

                const int64_t initialSet = camera.SequencerSetSelector.GetMin();
                const int64_t incSet = camera.SequencerSetSelector.GetInc();
                int64_t curSet = initialSet;

                // Set the parameters for step 0; quarter height image.
                camera.SequencerSetSelector.SetValue( initialSet );
                { // valid for all sets
                    // reset on software signal 1;
                    camera.SequencerPathSelector.SetValue( 0 );
                    camera.SequencerSetNext.SetValue( initialSet );
                    camera.SequencerTriggerSource.SetValue( SequencerTriggerSource_SoftwareSignal1 );
                    // advance on Frame Start or Exposure Start (depends on camera family)
                    camera.SequencerPathSelector.SetValue( 1 );
                    const char* sequencerTrigger[] = { "FrameStart", "ExposureStart", NULL };
                    camera.SequencerTriggerSource.SetValue( sequencerTrigger );
                }
                camera.SequencerSetNext.SetValue( curSet + incSet );
                // quarter height
                camera.Height.SetValuePercentOfRange( 25.0 );
                camera.SequencerSetSave.Execute();

                // Set the parameters for step 1; half height image.
                curSet += incSet;
                camera.SequencerSetSelector.SetValue( curSet );
                // advance on Frame Start to next set
                camera.SequencerSetNext.SetValue( curSet + incSet );
                // half height
                camera.Height.SetValuePercentOfRange( 50.0 );
                camera.SequencerSetSave.Execute();

                // Set the parameters for step 2; full height image.
                curSet += incSet;
                camera.SequencerSetSelector.SetValue( curSet );
                // advance on Frame End to initial set,
                camera.SequencerSetNext.SetValue( initialSet ); // terminates sequence definition
                // full height
                camera.Height.SetValuePercentOfRange( 100.0 );
                camera.SequencerSetSave.Execute();
                // Enable the sequencer feature.
                // From here on you can't change the sequencer settings anymore.
                camera.SequencerConfigurationMode.SetValue( SequencerConfigurationMode_Off );
                camera.SequencerMode.SetValue( SequencerMode_On );
            }
            else
            {
                // Disable the sequencer before changing parameters.
                // The parameters under control of the sequencer are locked
                // when the sequencer is enabled. For a list of parameters
                // controlled by the sequencer, see the Basler Product Documentation or the camera user's manual.
                camera.SequenceEnable.SetValue( false );
                camera.SequenceConfigurationMode.TrySetValue( SequenceConfigurationMode_Off );

                // Maximize the grabbed image area of interest (Image AOI).
                camera.OffsetX.TrySetToMinimum();
                camera.OffsetY.TrySetToMinimum();
                camera.Width.SetToMaximum();
                camera.Height.SetToMaximum();

                // Set the pixel data format.
                // This parameter may be locked when the sequencer is enabled.
                camera.PixelFormat.SetValue( PixelFormat_Mono8 );

                // Set up sequence sets.

                // Turn configuration mode on if available.
                // Not supported by all cameras.
                camera.SequenceConfigurationMode.TrySetValue( SequenceConfigurationMode_On );

                // Configure how the sequence will advance.
                // 'Auto' refers to the auto sequence advance mode.
                // The advance from one sequence set to the next will occur automatically with each image acquired.
                // After the end of the sequence set cycle was reached a new sequence set cycle will start.
                camera.SequenceAdvanceMode = SequenceAdvanceMode_Auto;

                // Our sequence sets relate to three steps (0..2).
                // In each step we will increase the height of the Image AOI by one increment.
                camera.SequenceSetTotalNumber = 3;

                // Set the parameters for step 0; quarter height image.
                camera.SequenceSetIndex = 0;
                camera.Height.SetValuePercentOfRange( 25.0 );
                camera.SequenceSetStore.Execute();

                // Set the parameters for step 1; half height image.
                camera.SequenceSetIndex = 1;
                camera.Height.SetValuePercentOfRange( 50.0 );
                camera.SequenceSetStore.Execute();

                // Set the parameters for step 2; full height image.
                camera.SequenceSetIndex = 2;
                camera.Height.SetValuePercentOfRange( 100.0 );
                camera.SequenceSetStore.Execute();

                // Turn configuration mode off if available.
                // Not supported by all cameras.
                camera.SequenceConfigurationMode.TrySetValue( SequenceConfigurationMode_Off );

                // Enable the sequencer feature.
                // From here on you can't change the sequencer settings anymore.
                camera.SequenceEnable.SetValue( true );
            }


            // Start the grabbing of c_countOfImagesToGrab images.
            camera.StartGrabbing( c_countOfImagesToGrab );

            // This smart pointer will receive the grab result data.
            CGrabResultPtr grabResult;

            // Camera.StopGrabbing() is called automatically by the RetrieveResult() method
            // when c_countOfImagesToGrab images have been retrieved.
            while (camera.IsGrabbing())
            {
                // Execute the software trigger. Wait up to 1000 ms for the camera to be ready for trigger.
                if (camera.WaitForFrameTriggerReady( 1000, TimeoutHandling_ThrowException ))
                {
                    camera.ExecuteSoftwareTrigger();

                    // Wait for an image and then retrieve it. A timeout of 5000 ms is used.
                    camera.RetrieveResult( 5000, grabResult, TimeoutHandling_ThrowException );

                    // Image grabbed successfully?
                    if (grabResult->GrabSucceeded())
                    {
#ifdef PYLON_WIN_BUILD
                    // Display the grabbed image.
                        Pylon::DisplayImage( 1, grabResult );
#endif

                    // Access the image data.
                        cout << "SizeX: " << grabResult->GetWidth() << endl;
                        cout << "SizeY: " << grabResult->GetHeight() << endl;
                        const uint8_t* pImageBuffer = (uint8_t*) grabResult->GetBuffer();
                        cout << "Gray value of first pixel: " << (uint32_t) pImageBuffer[0] << endl << endl;
                    }
                    else
                    {
                        cout << "Error: " << std::hex << grabResult->GetErrorCode() << std::dec << " " << grabResult->GetErrorDescription() << endl;
                    }
                }

                // Wait for user input.
                cerr << endl << "Press enter to continue." << endl << endl;
                while (camera.IsGrabbing() && cin.get() != '\n');
            }

            // Disable the sequencer.
            if (camera.GetSfncVersion() >= Sfnc_2_0_0) // Cameras based on SFNC 2.0 or later, e.g., USB cameras
            {
                camera.SequencerMode.SetValue( SequencerMode_Off );
            }
            else
            {
                camera.SequenceEnable.SetValue( false );
            }
            camera.SequenceConfigurationMode.TrySetValue( SequenceConfigurationMode_Off );
        }
        else
        {
            cout << "The sequencer feature is not available for this camera." << endl;
        }
    }
    catch (const GenericException& e)
    {
        // Error handling.
        cerr << "An exception occurred." << endl
            << e.GetDescription() << endl;
        exitCode = 1;
    }

    // Comment the following two lines to disable waiting on exit.
    cerr << endl << "Press enter to exit." << endl;
    while (cin.get() != '\n');

    // Releases all pylon resources.
    PylonTerminate();

    return exitCode;
}

GUI_ImageWindow#

Note

This sample is only available on Windows.

This sample is part of the pylon sample solution that can be found under <⁠SDK ROOT>\Development\Samples\C++.

// GUI_ImageWindow.cpp
/*
    Note: Before getting started, Basler recommends reading the "Programmer's Guide" topic
    in the pylon C++ API documentation delivered with pylon.
    If you are upgrading to a higher major version of pylon, Basler also
    strongly recommends reading the "Migrating from Previous Versions" topic in the pylon C++ API documentation.

    This sample illustrates how to show images using the
    CPylonImageWindow class. Here, images are grabbed, split into
    multiple tiles and and each tile is shown in a separate image windows.
*/

// Include files to use the pylon API.
#include <pylon/PylonIncludes.h>
#include <pylon/PylonGUI.h>
#include "../include/SampleImageCreator.h"
#include <conio.h>

// Namespace for using pylon objects.
using namespace Pylon;

// Namespace for using cout.
using namespace std;

// Number of images to be grabbed.
static const uint32_t c_countOfImagesToGrab = 1000;

int main( int /*argc*/, char* /*argv*/[] )
{
    // The exit code of the sample application.
    int exitCode = 0;

    // Before using any pylon methods, the pylon runtime must be initialized.
    PylonInitialize();

    try
    {
        // Define constants.
        static const uint32_t cNumTilesX = 3;
        static const uint32_t cNumTilesY = 2;
        static const uint32_t cWindowBorderSizeX = 25;
        static const uint32_t cWindowBorderSizeY = 125;
        static const uint32_t cScreenStartX = 40;
        static const uint32_t cScreenStartY = 40;
        static const uint32_t cMaxIndex = 31;
        static const size_t   cNumWindows = cNumTilesY * cNumTilesX;
        static const uint32_t cMaxWidth = 640;
        static const uint32_t cMaxHeight = 480;

        // Create an array of image windows.
        CPylonImageWindow imageWindows[cNumWindows];

        // Create an Instant Camera object.
        CInstantCamera camera( CTlFactory::GetInstance().CreateFirstDevice() );

        // Print the model name of the camera.
        cout << "Using device " << camera.GetDeviceInfo().GetModelName() << endl;

        // Start the grab. Only display the latest image.
        camera.StartGrabbing( c_countOfImagesToGrab, GrabStrategy_LatestImageOnly );

        // This smart pointer will receive the grab result data.
        CGrabResultPtr ptrGrabResult;

        // Grab images and show the tiles of each image in separate image windows.
        while (camera.IsGrabbing())
        {
            // Wait for an image and then retrieve it. A timeout of 5000 ms is used.
            camera.RetrieveResult( 5000, ptrGrabResult, TimeoutHandling_ThrowException );

            // If the image was grabbed successfully.
            if (ptrGrabResult->GrabSucceeded())
            {
                // This image object is used for splitting the grabbed image into tiles.
                CPylonImage image;

                // Attach the grab result to a pylon image.
                image.AttachGrabResultBuffer( ptrGrabResult );

                // Compute tile sizes.
                uint32_t imageTileWidth = min( image.GetWidth(), cMaxWidth ) / cNumTilesX;
                uint32_t imageTileHeight = min( image.GetHeight(), cMaxHeight ) / cNumTilesY;
                imageTileWidth -= imageTileWidth % GetPixelIncrementX( image.GetPixelType() );
                imageTileHeight -= imageTileWidth % GetPixelIncrementY( image.GetPixelType() );

                uint32_t windowTileWidth = imageTileWidth + cWindowBorderSizeX;
                uint32_t windowTileHeight = imageTileHeight + cWindowBorderSizeY;

                // Create and display the tiles of the grabbed image.
                for (uint32_t indexTileX = 0; indexTileX < cNumTilesX; ++indexTileX)
                {
                    for (uint32_t indexTileY = 0; indexTileY < cNumTilesY; ++indexTileY)
                    {
                        size_t arrayIndex = indexTileY * cNumTilesX + indexTileX;
                        bool windowCreated = false;

                        if (!imageWindows[arrayIndex].IsValid())
                        {
                            // Create the image window and position the image window as a tile on the screen.
                            // The Image Window stores the last size and position.
                            // The last Image Window indices are used here to avoid changing
                            // the settings of the windows used for other samples.
                            size_t windowIndex = cMaxIndex - arrayIndex;
                            imageWindows[arrayIndex].Create( windowIndex,
                                                             cScreenStartX + indexTileX * windowTileWidth,
                                                             cScreenStartY + indexTileY * windowTileHeight,
                                                             windowTileWidth,
                                                             windowTileHeight
                                                             );

                            windowCreated = true;
                        }

                        // Get the image area of interest (Image AOI) that includes the tile. This is a zero copy operation.
                        CPylonImage tile = image.GetAoi( indexTileX * imageTileWidth, indexTileY * imageTileHeight, imageTileWidth, imageTileHeight );

                        // Set the tile image.
                        imageWindows[arrayIndex].SetImage( tile );

                        // Show the image.
                        imageWindows[arrayIndex].Show();

                        if (windowCreated)
                        {
                            // Wait a little to show how the windows appear on the screen.
                            ::Sleep( 200 );
                        }
                    }
                }
            }
            else
            {
                throw RUNTIME_EXCEPTION( "Error image grab failed: %hs", ptrGrabResult->GetErrorDescription().c_str() );
            }
        }

        // Destroy the windows.
        for (size_t arrayIndex = 0; arrayIndex < cNumWindows; ++arrayIndex)
        {
            // Close() closes and destroys the window.
            imageWindows[arrayIndex].Close();

            // Wait a little to show how the windows are removed from the screen.
            ::Sleep( 200 );
        }
    }
    catch (const GenericException& e)
    {
        // Error handling.
        cerr << "An exception occurred." << endl
            << e.GetDescription() << endl;
        exitCode = 1;
    }

    cerr << endl << "Press enter to exit." << endl;
    while (cin.get() != '\n');

    // Releases all pylon resources.
    PylonTerminate();

    return exitCode;
}

GUI_MFC#

Note

This sample is only available on Windows.

This sample is part of the pylon sample solution that can be found under <⁠SDK ROOT>\Development\Samples\C++.

This sample illustrates the use of a MFC GUI with the pylon C++ API to enumerate the attached cameras, to configure a camera, to start and stop the grab, and to display and store grabbed images. The sample shows how to use GUI controls to display and modify camera parameters.

GUI_MFCMultiCam#

Note

This sample is only available on Windows.

This sample is part of the pylon sample solution that can be found under <⁠SDK ROOT>\Development\Samples\C++.

This sample illustrates the use of an MFC dialog application using two cameras with the pylon C++ API. It shows how to enumerate the attached cameras, to open a specific camera by its serial number or its user-defined name. It also shows how to configure the cameras, to start and stop the grab, and to display the grabbed images side by side. In addition, the sample demonstrates how to use GUI controls to display and modify camera parameters.

ParametrizeCamera_AutoFunctions#

This sample is part of the pylon sample solution that can be found under <⁠SDK ROOT>\Development\Samples\C++.

// ParametrizeCamera_AutoFunctions.cpp
/*
    Note: Before getting started, Basler recommends reading the "Programmer's Guide" topic
    in the pylon C++ API documentation delivered with pylon.
    If you are upgrading to a higher major version of pylon, Basler also
    strongly recommends reading the "Migrating from Previous Versions" topic in the pylon C++ API documentation.

    Note: Different camera families implement different versions of the Standard Feature Naming Convention (SFNC).
    That's why the name and the type of the parameters used can be different.

    This sample illustrates how to use the Auto Functions feature of Basler cameras.
*/

// Include files to use the pylon API.
#include <pylon/PylonIncludes.h>
#ifdef PYLON_WIN_BUILD
#    include <pylon/PylonGUI.h>
#endif

// Include file to use pylon universal instant camera parameters.
#include <pylon/BaslerUniversalInstantCamera.h>

// Namespace for using pylon objects.
using namespace Pylon;

// Namespace for using cout.
using namespace std;

// Namespace for using pylon universal instant camera parameters.
using namespace Basler_UniversalCameraParams;

// Forward declarations for helper functions
bool IsColorCamera( CBaslerUniversalInstantCamera& camera );
void AutoGainOnce( CBaslerUniversalInstantCamera& camera );
void AutoGainContinuous( CBaslerUniversalInstantCamera& camera );
void AutoExposureOnce( CBaslerUniversalInstantCamera& camera );
void AutoExposureContinuous( CBaslerUniversalInstantCamera& camera );
void AutoWhiteBalance( CBaslerUniversalInstantCamera& camera );


int main( int /*argc*/, char* /*argv*/[] )
{
    // The exit code of the sample application.
    int exitCode = 0;

    // Before using any pylon methods, the pylon runtime must be initialized.
    PylonInitialize();

    try
    {
        // Create an instant camera object with the first found camera device.
        CBaslerUniversalInstantCamera camera( CTlFactory::GetInstance().CreateFirstDevice() );

        // Print the name of the used camera.
        cout << "Using device " << camera.GetDeviceInfo().GetModelName() << endl;

        // Register the standard event handler for configuring single frame acquisition.
        // This overrides the default configuration as all event handlers are removed by setting the registration mode to RegistrationMode_ReplaceAll.
        // Please note that the camera device auto functions do not require grabbing by single frame acquisition.
        // All available acquisition modes can be used.
        camera.RegisterConfiguration( new CAcquireSingleFrameConfiguration, RegistrationMode_ReplaceAll, Cleanup_Delete );

        // Open the camera.
        camera.Open();

        // Turn test image off.
        camera.TestImageSelector.TrySetValue( TestImageSelector_Off );
        camera.TestPattern.TrySetValue( TestPattern_Off );

        // Only area scan cameras support auto functions.
        if (camera.DeviceScanType.GetValue() == DeviceScanType_Areascan)
        {
            if (camera.GetSfncVersion() >= Sfnc_2_0_0) // Cameras based on SFNC 2.0 or later, e.g., USB cameras
            {
                // All area scan cameras support luminance control.
                // Carry out luminance control by using the "once" gain auto function.
                // For demonstration purposes only, set the gain to an initial value.
                camera.Gain.SetToMaximum();
                AutoGainOnce( camera );
                cerr << endl << "Press enter to continue." << endl;
                while (cin.get() != '\n');


                // Carry out luminance control by using the "continuous" gain auto function.
                // For demonstration purposes only, set the gain to an initial value.
                camera.Gain.SetToMaximum();
                AutoGainContinuous( camera );
                cerr << endl << "Press enter to continue." << endl;
                while (cin.get() != '\n');

                // For demonstration purposes only, set the exposure time to an initial value.
                camera.ExposureTime.SetToMinimum();

                // Carry out luminance control by using the "once" exposure auto function.
                AutoExposureOnce( camera );
                cerr << endl << "Press enter to continue." << endl;
                while (cin.get() != '\n');

                // For demonstration purposes only, set the exposure time to an initial value.
                camera.ExposureTime.SetToMinimum();

                // Carry out luminance control by using the "continuous" exposure auto function.
                AutoExposureContinuous( camera );
            }
            else
            {
                // All area scan cameras support luminance control.
                // Carry out luminance control by using the "once" gain auto function.
                // For demonstration purposes only, set the gain to an initial value.
                camera.GainRaw.SetToMaximum();
                AutoGainOnce( camera );
                cerr << endl << "Press enter to continue." << endl;
                while (cin.get() != '\n');


                // Carry out luminance control by using the "continuous" gain auto function.
                // For demonstration purposes only, set the gain to an initial value.
                camera.GainRaw.SetToMaximum();
                AutoGainContinuous( camera );
                cerr << endl << "Press enter to continue." << endl;
                while (cin.get() != '\n');

                // For demonstration purposes only, set the exposure time to an initial value.
                camera.ExposureTimeRaw.SetToMinimum();

                // Carry out luminance control by using the "once" exposure auto function.
                AutoExposureOnce( camera );
                cerr << endl << "Press enter to continue." << endl;
                while (cin.get() != '\n');

                // For demonstration purposes only, set the exposure time to an initial value.
                camera.ExposureTimeRaw.SetToMinimum();

                // Carry out luminance control by using the "continuous" exposure auto function.
                AutoExposureContinuous( camera );
            }

            // Only color cameras support the balance white auto function.
            if (IsColorCamera( camera ))
            {
                cerr << endl << "Press enter to continue." << endl;
                while (cin.get() != '\n');

                if (camera.GetSfncVersion() >= Sfnc_2_0_0) // Cameras based on SFNC 2.0 or later, e.g., USB cameras
                {
                    // For demonstration purposes only, set the initial balance ratio values:
                    camera.BalanceRatioSelector.SetValue( BalanceRatioSelector_Red );
                    camera.BalanceRatio.SetToMaximum();
                    camera.BalanceRatioSelector.SetValue( BalanceRatioSelector_Green );
                    camera.BalanceRatio.TrySetValuePercentOfRange( 50.0 );
                    camera.BalanceRatioSelector.SetValue( BalanceRatioSelector_Blue );
                    camera.BalanceRatio.SetToMinimum();
                }
                else
                {
                    // For demonstration purposes only, set the initial balance ratio values:
                    camera.BalanceRatioSelector.SetValue( BalanceRatioSelector_Red );
                    camera.BalanceRatioAbs.SetToMaximum();
                    camera.BalanceRatioSelector.SetValue( BalanceRatioSelector_Green );
                    camera.BalanceRatioAbs.TrySetValuePercentOfRange( 50.0 );
                    camera.BalanceRatioSelector.SetValue( BalanceRatioSelector_Blue );
                    camera.BalanceRatioAbs.SetToMinimum();
                }

                // Carry out white balance using the balance white auto function.
                AutoWhiteBalance( camera );
            }
        }
        else
        {
            cerr << "Only area scan cameras support auto functions." << endl;
        }

        // Close camera.
        camera.Close();

    }
    catch (const TimeoutException& e)
    {
        // Auto functions did not finish in time.
        // Maybe the cap on the lens is still on or there is not enough light.
        cerr << "A timeout has occurred." << endl
            << e.GetDescription() << endl;
        cerr << "Please make sure you remove the cap from the camera lens before running this sample." << endl;
        exitCode = 0;
    }
    catch (const GenericException& e)
    {
        // Error handling.
        cerr << "An exception occurred." << endl
            << e.GetDescription() << endl;
        exitCode = 1;
    }

    // Comment the following two lines to disable waiting on exit.
    cerr << endl << "Press enter to exit." << endl;
    while (cin.get() != '\n');

    // Releases all pylon resources.
    PylonTerminate();

    return exitCode;
}


void AutoGainOnce( CBaslerUniversalInstantCamera& camera )
{
    // Check whether the gain auto function is available.
    if (!camera.GainAuto.IsWritable())
    {
        cout << "The camera does not support Gain Auto." << endl << endl;
        return;
    }

    // Maximize the grabbed image area of interest (Image AOI).
    camera.OffsetX.TrySetToMinimum();
    camera.OffsetY.TrySetToMinimum();
    camera.Width.SetToMaximum();
    camera.Height.SetToMaximum();

    if (camera.AutoFunctionROISelector.IsWritable()) // Cameras based on SFNC 2.0 or later, e.g., USB cameras
    {
        // Set the Auto Function ROI for luminance statistics.
        // We want to use ROI1 for gathering the statistics

        camera.AutoFunctionROISelector.SetValue( AutoFunctionROISelector_ROI1 );
        camera.AutoFunctionROIUseBrightness.TrySetValue( true );   // ROI 1 is used for brightness control
        camera.AutoFunctionROISelector.SetValue( AutoFunctionROISelector_ROI2 );
        camera.AutoFunctionROIUseBrightness.TrySetValue( false );   // ROI 2 is not used for brightness control

        // Set the ROI (in this example the complete sensor is used)
        camera.AutoFunctionROISelector.SetValue( AutoFunctionROISelector_ROI1 );  // configure ROI 1
        camera.AutoFunctionROIOffsetX.SetToMinimum();
        camera.AutoFunctionROIOffsetY.SetToMinimum();
        camera.AutoFunctionROIWidth.SetToMaximum();
        camera.AutoFunctionROIHeight.SetToMaximum();
    }
    else if (camera.AutoFunctionAOISelector.IsWritable())
    {
        // Set the Auto Function AOI for luminance statistics.
        // Currently, AutoFunctionAOISelector_AOI1 is predefined to gather
        // luminance statistics.
        camera.AutoFunctionAOISelector.SetValue( AutoFunctionAOISelector_AOI1 );
        camera.AutoFunctionAOIOffsetX.SetToMinimum();
        camera.AutoFunctionAOIOffsetY.SetToMinimum();
        camera.AutoFunctionAOIWidth.SetToMaximum();
        camera.AutoFunctionAOIHeight.SetToMaximum();
    }

    if (camera.GetSfncVersion() >= Sfnc_2_0_0) // Cameras based on SFNC 2.0 or later, e.g., USB cameras
    {
        // Set the target value for luminance control.
        // A value of 0.3 means that the target brightness is 30 % of the maximum brightness of the raw pixel value read out from the sensor.
        // A value of 0.4 means 40 % and so forth.
        camera.AutoTargetBrightness.SetValue( 0.3 );

        // We are going to try GainAuto = Once.

        cout << "Trying 'GainAuto = Once'." << endl;
        cout << "Initial Gain = " << camera.Gain.GetValue() << endl;

        // Set the gain ranges for luminance control.
        camera.AutoGainLowerLimit.SetToMinimum();
        camera.AutoGainUpperLimit.SetToMaximum();
    }
    else
    {
        // Set the target value for luminance control. The value is always expressed
        // as an 8 bit value regardless of the current pixel data output format,
        // i.e., 0 -> black, 255 -> white.
        camera.AutoTargetValue.TrySetValue( 80 );

        // We are going to try GainAuto = Once.

        cout << "Trying 'GainAuto = Once'." << endl;
        cout << "Initial Gain = " << camera.GainRaw.GetValue() << endl;

        // Set the gain ranges for luminance control.
        camera.AutoGainRawLowerLimit.SetToMinimum();
        camera.AutoGainRawUpperLimit.SetToMaximum();
    }

    camera.GainAuto.SetValue( GainAuto_Once );

    // When the "once" mode of operation is selected,
    // the parameter values are automatically adjusted until the related image property
    // reaches the target value. After the automatic parameter value adjustment is complete, the auto
    // function will automatically be set to "off" and the new parameter value will be applied to the
    // subsequently grabbed images.

    int n = 0;
    while (camera.GainAuto.GetValue() != GainAuto_Off)
    {
        CBaslerUniversalGrabResultPtr ptrGrabResult;
        camera.GrabOne( 5000, ptrGrabResult );
#ifdef PYLON_WIN_BUILD
        Pylon::DisplayImage( 1, ptrGrabResult );

        //For demonstration purposes only. Wait until the image is shown.
        WaitObject::Sleep( 100 );
#endif

        //Make sure the loop is exited.
        if (++n > 100)
        {
            throw TIMEOUT_EXCEPTION( "The adjustment of auto gain did not finish." );
        }
    }

    cout << "GainAuto went back to 'Off' after " << n << " frames." << endl;
    if (camera.Gain.IsReadable()) // Cameras based on SFNC 2.0 or later, e.g., USB cameras
    {
        cout << "Final Gain = " << camera.Gain.GetValue() << endl << endl;
    }
    else
    {
        cout << "Final Gain = " << camera.GainRaw.GetValue() << endl << endl;
    }
}


void AutoGainContinuous( CBaslerUniversalInstantCamera& camera )
{
    // Check whether the Gain Auto feature is available.
    if (!camera.GainAuto.IsWritable())
    {
        cout << "The camera does not support Gain Auto." << endl << endl;
        return;
    }

    // Maximize the grabbed image area of interest (Image AOI).
    camera.OffsetX.TrySetToMinimum();
    camera.OffsetY.TrySetToMinimum();
    camera.Width.SetToMaximum();
    camera.Height.SetToMaximum();

    if (camera.AutoFunctionROISelector.IsWritable()) // Cameras based on SFNC 2.0 or later, e.g., USB cameras
    {
        // Set the Auto Function ROI for luminance statistics.
        // We want to use ROI1 for gathering the statistics

        camera.AutoFunctionROISelector.SetValue( AutoFunctionROISelector_ROI1 );
        camera.AutoFunctionROIUseBrightness.TrySetValue( true );   // ROI 1 is used for brightness control
        camera.AutoFunctionROISelector.SetValue( AutoFunctionROISelector_ROI2 );
        camera.AutoFunctionROIUseBrightness.TrySetValue( false );   // ROI 2 is not used for brightness control


        // Set the ROI (in this example the complete sensor is used)
        camera.AutoFunctionROISelector.SetValue( AutoFunctionROISelector_ROI1 );  // configure ROI 1
        camera.AutoFunctionROIOffsetX.SetToMinimum();
        camera.AutoFunctionROIOffsetY.SetToMinimum();
        camera.AutoFunctionROIWidth.SetToMaximum();
        camera.AutoFunctionROIHeight.SetToMaximum();
    }
    else if (camera.AutoFunctionAOISelector.IsWritable())
    {
        // Set the Auto Function AOI for luminance statistics.
        // Currently, AutoFunctionAOISelector_AOI1 is predefined to gather
        // luminance statistics.
        camera.AutoFunctionAOISelector.SetValue( AutoFunctionAOISelector_AOI1 );
        camera.AutoFunctionAOIOffsetX.SetToMinimum();
        camera.AutoFunctionAOIOffsetY.SetToMinimum();
        camera.AutoFunctionAOIWidth.SetToMaximum();
        camera.AutoFunctionAOIHeight.SetToMaximum();
    }

    if (camera.GetSfncVersion() >= Sfnc_2_0_0) // Cameras based on SFNC 2.0 or later, e.g., USB cameras
    {
        // Set the target value for luminance control.
        // A value of 0.3 means that the target brightness is 30 % of the maximum brightness of the raw pixel value read out from the sensor.
        // A value of 0.4 means 40 % and so forth.
        camera.AutoTargetBrightness.SetValue( 0.3 );

        // We are trying GainAuto = Continuous.
        cout << "Trying 'GainAuto = Continuous'." << endl;
        cout << "Initial Gain = " << camera.Gain.GetValue() << endl;

        camera.GainAuto.SetValue( GainAuto_Continuous );
    }
    else
    {
        // Set the target value for luminance control. The value is always expressed
        // as an 8 bit value regardless of the current pixel data output format,
        // i.e., 0 -> black, 255 -> white.
        camera.AutoTargetValue.TrySetValue( 80 );

        // We are trying GainAuto = Continuous.
        cout << "Trying 'GainAuto = Continuous'." << endl;
        cout << "Initial Gain = " << camera.GainRaw.GetValue() << endl;

        camera.GainAuto.SetValue( GainAuto_Continuous );
    }

    // When "continuous" mode is selected, the parameter value is adjusted repeatedly while images are acquired.
    // Depending on the current frame rate, the automatic adjustments will usually be carried out for
    // every or every other image unless the camera's micro controller is kept busy by other tasks.
    // The repeated automatic adjustment will proceed until the "once" mode of operation is used or
    // until the auto function is set to "off", in which case the parameter value resulting from the latest
    // automatic adjustment will operate unless the value is manually adjusted.
    for (int n = 0; n < 20; ++n)            // For demonstration purposes, we will grab "only" 20 images.
    {
        CBaslerUniversalGrabResultPtr ptrGrabResult;
        camera.GrabOne( 5000, ptrGrabResult );
#ifdef PYLON_WIN_BUILD
        Pylon::DisplayImage( 1, ptrGrabResult );

        //For demonstration purposes only. Wait until the image is shown.
        WaitObject::Sleep( 100 );
#endif
    }
    camera.GainAuto.SetValue( GainAuto_Off ); // Switch off GainAuto.

    if (camera.Gain.IsReadable()) // Cameras based on SFNC 2.0 or later, e.g., USB cameras
    {
        cout << "Final Gain = " << camera.Gain.GetValue() << endl << endl;
    }
    else
    {
        cout << "Final Gain = " << camera.GainRaw.GetValue() << endl << endl;
    }
}


void AutoExposureOnce( CBaslerUniversalInstantCamera& camera )
{
    // Check whether auto exposure is available
    if (!camera.ExposureAuto.IsWritable())
    {
        cout << "The camera does not support Exposure Auto." << endl << endl;
        return;
    }

    // Maximize the grabbed area of interest (Image AOI).
    camera.OffsetX.TrySetToMinimum();
    camera.OffsetY.TrySetToMinimum();
    camera.Width.SetToMaximum();
    camera.Height.SetToMaximum();

    if (camera.AutoFunctionROISelector.IsWritable())
    {
        // Set the Auto Function ROI for luminance statistics.
        // We want to use ROI1 for gathering the statistics
        camera.AutoFunctionROISelector.SetValue( AutoFunctionROISelector_ROI1 );
        camera.AutoFunctionROIUseBrightness.TrySetValue( true );   // ROI 1 is used for brightness control
        camera.AutoFunctionROISelector.SetValue( AutoFunctionROISelector_ROI2 );
        camera.AutoFunctionROIUseBrightness.TrySetValue( false );   // ROI 2 is not used for brightness control

        // Set the ROI (in this example the complete sensor is used)
        camera.AutoFunctionROISelector.SetValue( AutoFunctionROISelector_ROI1 );  // configure ROI 1
        camera.AutoFunctionROIOffsetX.SetToMinimum();
        camera.AutoFunctionROIOffsetY.SetToMinimum();
        camera.AutoFunctionROIWidth.SetToMaximum();
        camera.AutoFunctionROIHeight.SetToMaximum();
    }
    else if (camera.AutoFunctionAOISelector.IsWritable())
    {
        // Set the Auto Function AOI for luminance statistics.
        // Currently, AutoFunctionAOISelector_AOI1 is predefined to gather
        // luminance statistics.
        camera.AutoFunctionAOISelector.SetValue( AutoFunctionAOISelector_AOI1 );
        camera.AutoFunctionAOIOffsetX.SetToMinimum();
        camera.AutoFunctionAOIOffsetY.SetToMinimum();
        camera.AutoFunctionAOIWidth.SetToMaximum();
        camera.AutoFunctionAOIHeight.SetToMaximum();
    }

    if (camera.GetSfncVersion() >= Sfnc_2_0_0) // Cameras based on SFNC 2.0 or later, e.g., USB cameras
    {
        // Set the target value for luminance control.
        // A value of 0.3 means that the target brightness is 30 % of the maximum brightness of the raw pixel value read out from the sensor.
        // A value of 0.4 means 40 % and so forth.
        camera.AutoTargetBrightness.SetValue( 0.3 );

        // Try ExposureAuto = Once.
        cout << "Trying 'ExposureAuto = Once'." << endl;
        cout << "Initial exposure time = ";
        cout << camera.ExposureTime.GetValue() << " us" << endl;

        // Set the exposure time ranges for luminance control.
        camera.AutoExposureTimeLowerLimit.SetToMinimum();
        // Some cameras have a very high upper limit.
        // To avoid excessive execution times of the sample, we use 1000000 us (1 s) as the upper limit.
        // If you need longer exposure times, you can set this to the maximum value.
        camera.AutoExposureTimeUpperLimit.SetValue( 1 * 1000 * 1000, FloatValueCorrection_ClipToRange );

        camera.ExposureAuto.SetValue( ExposureAuto_Once );
    }
    else
    {
        // Set the target value for luminance control. The value is always expressed
        // as an 8 bit value regardless of the current pixel data output format,
        // i.e., 0 -> black, 255 -> white.
        camera.AutoTargetValue.SetValue( 80 );

        // Try ExposureAuto = Once.
        cout << "Trying 'ExposureAuto = Once'." << endl;
        cout << "Initial exposure time = ";
        cout << camera.ExposureTimeAbs.GetValue() << " us" << endl;

        // Set the exposure time ranges for luminance control.
        camera.AutoExposureTimeAbsLowerLimit.SetToMinimum();
        // Some cameras have a very high upper limit.
        // To avoid excessive execution times of the sample, we use 1000000 us (1 s) as the upper limit.
        // If you need longer exposure times, you can set this to the maximum value.
        camera.AutoExposureTimeAbsUpperLimit.SetValue( 1 * 1000 * 1000, FloatValueCorrection_ClipToRange );

        camera.ExposureAuto.SetValue( ExposureAuto_Once );
    }

    // When the "once" mode of operation is selected,
    // the parameter values are automatically adjusted until the related image property
    // reaches the target value. After the automatic parameter value adjustment is complete, the auto
    // function will automatically be set to "off", and the new parameter value will be applied to the
    // subsequently grabbed images.
    int n = 0;
    while (camera.ExposureAuto.GetValue() != ExposureAuto_Off)
    {
        CBaslerUniversalGrabResultPtr ptrGrabResult;
        camera.GrabOne( 5000, ptrGrabResult );
#ifdef PYLON_WIN_BUILD
        Pylon::DisplayImage( 1, ptrGrabResult );

        //For demonstration purposes only. Wait until the image is shown.
        WaitObject::Sleep( 100 );
#endif
        //Make sure the loop is exited.
        if (++n > 100)
        {
            throw TIMEOUT_EXCEPTION( "The adjustment of auto exposure did not finish." );
        }
    }

    cout << "ExposureAuto went back to 'Off' after " << n << " frames." << endl;
    cout << "Final exposure time = ";
    if (camera.ExposureTime.IsReadable()) // Cameras based on SFNC 2.0 or later, e.g., USB cameras
    {
        cout << camera.ExposureTime.GetValue() << " us" << endl << endl;
    }
    else
    {
        cout << camera.ExposureTimeAbs.GetValue() << " us" << endl << endl;
    }
}


void AutoExposureContinuous( CBaslerUniversalInstantCamera& camera )
{
    // Check whether the Exposure Auto feature is available.
    if (!camera.ExposureAuto.IsWritable())
    {
        cout << "The camera does not support Exposure Auto." << endl << endl;
        return;
    }

    // Maximize the grabbed area of interest (Image AOI).
    camera.OffsetX.TrySetToMinimum();
    camera.OffsetY.TrySetToMinimum();
    camera.Width.SetToMaximum();
    camera.Height.SetToMaximum();

    if (camera.AutoFunctionROISelector.IsWritable()) // Cameras based on SFNC 2.0 or later, e.g., USB cameras
    {
        // Set the Auto Function ROI for luminance statistics.
        // We want to use ROI1 for gathering the statistics

        camera.AutoFunctionROISelector.SetValue( AutoFunctionROISelector_ROI1 );
        camera.AutoFunctionROIUseBrightness.TrySetValue( true );   // ROI 1 is used for brightness control
        camera.AutoFunctionROISelector.SetValue( AutoFunctionROISelector_ROI2 );
        camera.AutoFunctionROIUseBrightness.TrySetValue( false );   // ROI 2 is not used for brightness control

        // Set the ROI (in this example the complete sensor is used)
        camera.AutoFunctionROISelector.SetValue( AutoFunctionROISelector_ROI1 );  // configure ROI 1
        camera.AutoFunctionROIOffsetX.SetToMinimum();
        camera.AutoFunctionROIOffsetY.SetToMinimum();
        camera.AutoFunctionROIWidth.SetToMaximum();
        camera.AutoFunctionROIHeight.SetToMaximum();
    }
    else if (camera.AutoFunctionAOISelector.IsWritable())
    {
        // Set the Auto Function AOI for luminance statistics.
        // Currently, AutoFunctionAOISelector_AOI1 is predefined to gather
        // luminance statistics.
        camera.AutoFunctionAOISelector.SetValue( AutoFunctionAOISelector_AOI1 );
        camera.AutoFunctionAOIOffsetX.SetToMinimum();
        camera.AutoFunctionAOIOffsetY.SetToMinimum();
        camera.AutoFunctionAOIWidth.SetToMaximum();
        camera.AutoFunctionAOIHeight.SetToMaximum();
    }

    if (camera.GetSfncVersion() >= Sfnc_2_0_0) // Cameras based on SFNC 2.0 or later, e.g., USB cameras
    {
        // Set the target value for luminance control.
        // A value of 0.3 means that the target brightness is 30 % of the maximum brightness of the raw pixel value read out from the sensor.
        // A value of 0.4 means 40 % and so forth.
        camera.AutoTargetBrightness.SetValue( 0.3 );

        cout << "Trying 'ExposureAuto = Continuous'." << endl;
        cout << "Initial exposure time = ";
        cout << camera.ExposureTime.GetValue() << " us" << endl;

        camera.ExposureAuto.SetValue( ExposureAuto_Continuous );
    }
    else
    {
        // Set the target value for luminance control. The value is always expressed
        // as an 8 bit value regardless of the current pixel data output format,
        // i.e., 0 -> black, 255 -> white.
        camera.AutoTargetValue.SetValue( 80 );

        cout << "Trying 'ExposureAuto = Continuous'." << endl;
        cout << "Initial exposure time = ";
        cout << camera.ExposureTimeAbs.GetValue() << " us" << endl;

        camera.ExposureAuto.SetValue( ExposureAuto_Continuous );
    }

    // When "continuous" mode is selected, the parameter value is adjusted repeatedly while images are acquired.
    // Depending on the current frame rate, the automatic adjustments will usually be carried out for
    // every or every other image, unless the camera's microcontroller is kept busy by other tasks.
    // The repeated automatic adjustment will proceed until the "once" mode of operation is used or
    // until the auto function is set to "off", in which case the parameter value resulting from the latest
    // automatic adjustment will operate unless the value is manually adjusted.
    for (int n = 0; n < 20; ++n)    // For demonstration purposes, we will use only 20 images.
    {
        CBaslerUniversalGrabResultPtr ptrGrabResult;
        camera.GrabOne( 5000, ptrGrabResult );
#ifdef PYLON_WIN_BUILD
        Pylon::DisplayImage( 1, ptrGrabResult );

        //For demonstration purposes only. Wait until the image is shown.
        WaitObject::Sleep( 100 );
#endif
    }
    camera.ExposureAuto.SetValue( ExposureAuto_Off ); // Switch off Exposure Auto.

    cout << "Final exposure time = ";
    if (camera.ExposureTime.IsReadable()) // Cameras based on SFNC 2.0 or later, e.g., USB cameras
    {
        cout << camera.ExposureTime.GetValue() << " us" << endl << endl;
    }
    else
    {
        cout << camera.ExposureTimeAbs.GetValue() << " us" << endl << endl;
    }
}


void AutoWhiteBalance( CBaslerUniversalInstantCamera& camera )
{
    // Check whether the Balance White Auto feature is available.
    if (!camera.BalanceWhiteAuto.IsWritable())
    {
        cout << "The camera does not support Balance White Auto." << endl << endl;
        return;
    }

    // Maximize the grabbed area of interest (Image AOI).
    camera.OffsetX.TrySetToMinimum();
    camera.OffsetY.TrySetToMinimum();
    camera.Width.SetToMaximum();
    camera.Height.SetToMaximum();

    if (camera.AutoFunctionROISelector.IsWritable()) // Cameras based on SFNC 2.0 or later, e.g., USB cameras
    {
        // Set the Auto Function ROI for white balance.
        // We want to use ROI2
        camera.AutoFunctionROISelector.SetValue( AutoFunctionROISelector_ROI1 );
        camera.AutoFunctionROIUseWhiteBalance.SetValue( false );   // ROI 1 is not used for white balance
        camera.AutoFunctionROISelector.SetValue( AutoFunctionROISelector_ROI2 );
        camera.AutoFunctionROIUseWhiteBalance.SetValue( true );   // ROI 2 is used for white balance

        // Set the Auto Function AOI for white balance statistics.
        // Currently, AutoFunctionROISelector_ROI2 is predefined to gather
        // white balance statistics.
        camera.AutoFunctionROISelector.SetValue( AutoFunctionROISelector_ROI2 );
        camera.AutoFunctionROIOffsetX.SetToMinimum();
        camera.AutoFunctionROIOffsetY.SetToMinimum();
        camera.AutoFunctionROIWidth.SetToMaximum();
        camera.AutoFunctionROIHeight.SetToMaximum();
    }
    else if (camera.AutoFunctionAOISelector.IsWritable())
    {
        // Set the Auto Function AOI for luminance statistics.
        // Currently, AutoFunctionAOISelector_AOI1 is predefined to gather
        // luminance statistics.
        camera.AutoFunctionAOISelector.SetValue( AutoFunctionAOISelector_AOI1 );
        camera.AutoFunctionAOIOffsetX.SetToMinimum();
        camera.AutoFunctionAOIOffsetY.SetToMinimum();
        camera.AutoFunctionAOIWidth.SetToMaximum();
        camera.AutoFunctionAOIHeight.SetToMaximum();
    }

    cout << "Trying 'BalanceWhiteAuto = Once'." << endl;
    cout << "Initial balance ratio: ";

    if (camera.GetSfncVersion() >= Sfnc_2_0_0) // Cameras based on SFNC 2.0 or later, e.g., USB cameras
    {
        camera.BalanceRatioSelector.SetValue( BalanceRatioSelector_Red );
        cout << "R = " << camera.BalanceRatio.GetValue() << "   ";
        camera.BalanceRatioSelector.SetValue( BalanceRatioSelector_Green );
        cout << "G = " << camera.BalanceRatio.GetValue() << "   ";
        camera.BalanceRatioSelector.SetValue( BalanceRatioSelector_Blue );
        cout << "B = " << camera.BalanceRatio.GetValue() << endl;
    }
    else
    {
        camera.BalanceRatioSelector.SetValue( BalanceRatioSelector_Red );
        cout << "R = " << camera.BalanceRatioAbs.GetValue() << "   ";
        camera.BalanceRatioSelector.SetValue( BalanceRatioSelector_Green );
        cout << "G = " << camera.BalanceRatioAbs.GetValue() << "   ";
        camera.BalanceRatioSelector.SetValue( BalanceRatioSelector_Blue );
        cout << "B = " << camera.BalanceRatioAbs.GetValue() << endl;
    }

    camera.BalanceWhiteAuto.SetValue( BalanceWhiteAuto_Once );

    // When the "once" mode of operation is selected,
    // the parameter values are automatically adjusted until the related image property
    // reaches the target value. After the automatic parameter value adjustment is complete, the auto
    // function will automatically be set to "off" and the new parameter value will be applied to the
    // subsequently grabbed images.
    int n = 0;
    while (camera.BalanceWhiteAuto.GetValue() != BalanceWhiteAuto_Off)
    {
        CBaslerUniversalGrabResultPtr ptrGrabResult;
        camera.GrabOne( 5000, ptrGrabResult );
#ifdef PYLON_WIN_BUILD
        Pylon::DisplayImage( 1, ptrGrabResult );

        //For demonstration purposes only. Wait until the image is shown.
        WaitObject::Sleep( 100 );
#endif

        //Make sure the loop is exited.
        if (++n > 100)
        {
            throw TIMEOUT_EXCEPTION( "The adjustment of auto white balance did not finish." );
        }
    }

    cout << "BalanceWhiteAuto went back to 'Off' after ";
    cout << n << " frames." << endl;
    cout << "Final balance ratio: ";

    if (camera.GetSfncVersion() >= Sfnc_2_0_0) // Cameras based on SFNC 2.0 or later, e.g., USB cameras
    {
        camera.BalanceRatioSelector.SetValue( BalanceRatioSelector_Red );
        cout << "R = " << camera.BalanceRatio.GetValue() << "   ";
        camera.BalanceRatioSelector.SetValue( BalanceRatioSelector_Green );
        cout << "G = " << camera.BalanceRatio.GetValue() << "   ";
        camera.BalanceRatioSelector.SetValue( BalanceRatioSelector_Blue );
        cout << "B = " << camera.BalanceRatio.GetValue() << endl;
    }
    else
    {
        camera.BalanceRatioSelector.SetValue( BalanceRatioSelector_Red );
        cout << "R = " << camera.BalanceRatioAbs.GetValue() << "   ";
        camera.BalanceRatioSelector.SetValue( BalanceRatioSelector_Green );
        cout << "G = " << camera.BalanceRatioAbs.GetValue() << "   ";
        camera.BalanceRatioSelector.SetValue( BalanceRatioSelector_Blue );
        cout << "B = " << camera.BalanceRatioAbs.GetValue() << endl;
    }
}


bool IsColorCamera( CBaslerUniversalInstantCamera& camera )
{
    StringList_t settableValues;
    camera.PixelFormat.GetSettableValues( settableValues );
    bool result = false;

    for (size_t i = 0; i < settableValues.size(); i++)
    {
        if (settableValues[i].find( String_t( "Bayer" ) ) != String_t::_npos())
        {
            result = true;
            break;
        }
    }
    return result;
}

ParametrizeCamera_Configurations#

This sample is part of the pylon sample solution that can be found under <⁠SDK ROOT>\Development\Samples\C++.

// ParametrizeCamera_Configurations.cpp
/*
    Note: Before getting started, Basler recommends reading the "Programmer's Guide" topic
    in the pylon C++ API documentation delivered with pylon.
    If you are upgrading to a higher major version of pylon, Basler also
    strongly recommends reading the "Migrating from Previous Versions" topic in the pylon C++ API documentation.

    The instant camera allows to install event handlers for configuration purposes
    and for handling the grab results. This is very useful for handling standard
    camera setups and image processing tasks.

    This sample shows how to use configuration event handlers by applying the standard
    configurations and registering sample configuration event handlers.

    Configuration event handlers are derived from the CConfigurationEventHandler base class.
    CConfigurationEventHandler provides virtual methods that can be overridden. If the
    configuration event handler is registered these methods are called when the state of the
    instant camera objects changes, e.g. when the camera object is opened or closed.

    The standard configuration event handlers override the OnOpened method. The overridden method
    parametrizes the camera.

    Device specific camera classes, e.g. for GigE cameras, provide specialized
    event handler base classes, e.g. CBaslerGigEConfigurationEventHandler.
*/

// Include files to use the pylon API.
#include <pylon/PylonIncludes.h>

// Include files used by samples.
#include "../include/ImageEventPrinter.h"
#include "../include/ConfigurationEventPrinter.h"
#include "../include/PixelFormatAndAoiConfiguration.h"

// Namespace for using pylon objects.
using namespace Pylon;

// Namespace for using cout.
using namespace std;

// Number of images to be grabbed.
static const uint32_t c_countOfImagesToGrab = 3;


int main( int /*argc*/, char* /*argv*/[] )
{
    // The exit code of the sample application.
    int exitCode = 0;

    // Before using any pylon methods, the pylon runtime must be initialized.
    PylonInitialize();

    try
    {
        // Create an instant camera object with the first camera device found.
        CInstantCamera camera( CTlFactory::GetInstance().CreateFirstDevice() );

        // Print the model name of the camera.
        cout << "Using device " << camera.GetDeviceInfo().GetModelName() << endl;

        // For demonstration purposes only, register an image event handler.
        // printing out information about the grabbed images.
        camera.RegisterImageEventHandler( new CImageEventPrinter, RegistrationMode_Append, Cleanup_Delete );

        // This smart pointer will receive the grab result data.
        CGrabResultPtr ptrGrabResult;



        cout << "Grab using continuous acquisition:" << endl << endl;

        // Register the standard configuration event handler for setting up the camera for continuous acquisition.
        // By setting the registration mode to RegistrationMode_ReplaceAll, the new configuration handler replaces the
        // default configuration handler that has been automatically registered when creating the
        // instant camera object.
        // The handler is automatically deleted when deregistered or when the registry is cleared if Cleanup_Delete is specified.
        camera.RegisterConfiguration( new CAcquireContinuousConfiguration, RegistrationMode_ReplaceAll, Cleanup_Delete );

        // The camera's Open() method calls the configuration handler's OnOpened() method that
        // applies the required parameter modifications.
        camera.Open();

        // The registered configuration event handler has done its parametrization now.
        // Additional parameters could be set here.

        // Grab some images for demonstration.
        camera.StartGrabbing( c_countOfImagesToGrab );
        while (camera.IsGrabbing())
        {
            camera.RetrieveResult( 5000, ptrGrabResult, TimeoutHandling_ThrowException );
        }

        // Close the camera.
        camera.Close();



        cout << "Grab using software trigger mode:" << endl << endl;

        // Register the standard configuration event handler for setting up the camera for software
        // triggering.
        // The current configuration is replaced by the software trigger configuration by setting the
        // registration mode to RegistrationMode_ReplaceAll.
        camera.RegisterConfiguration( new CSoftwareTriggerConfiguration, RegistrationMode_ReplaceAll, Cleanup_Delete );

        // StartGrabbing() calls the camera's Open() automatically if the camera is not open yet.
        // The Open method calls the configuration handler's OnOpened() method that
        // sets the required parameters for enabling software triggering.

        // Grab some images for demonstration.
        camera.StartGrabbing( c_countOfImagesToGrab );
        while (camera.IsGrabbing())
        {
            // Execute the software trigger. The call waits up to 1000 ms for the camera
            // to be ready to be triggered.
            camera.WaitForFrameTriggerReady( 1000, TimeoutHandling_ThrowException );
            camera.ExecuteSoftwareTrigger();
            camera.RetrieveResult( 5000, ptrGrabResult, TimeoutHandling_ThrowException );
        }
        // StopGrabbing() is called from RetrieveResult if the number of images
        // to grab has been reached. Since the camera was opened by StartGrabbing()
        // it is closed by StopGrabbing().

        // The CSoftwareTriggerConfiguration, like all standard configurations, is provided as a header file.
        // The source code can be copied and modified to meet application specific needs, e.g.
        // the CSoftwareTriggerConfiguration class could easily be changed into a hardware trigger configuration.



        cout << "Grab using single frame acquisition:" << endl << endl;

        // Register the standard configuration event handler for configuring single frame acquisition.
        // The previous configuration is removed by setting the registration mode to RegistrationMode_ReplaceAll.
        camera.RegisterConfiguration( new CAcquireSingleFrameConfiguration, RegistrationMode_ReplaceAll, Cleanup_Delete );

        // GrabOne calls StartGrabbing and StopGrabbing internally.
        // As seen above Open() is called by StartGrabbing and
        // the OnOpened() method of the CAcquireSingleFrameConfiguration handler is called.
        camera.GrabOne( 5000, ptrGrabResult );

        // To continuously grab single images it is much more efficient to open the camera before grabbing.
        // Note: The software trigger mode (see above) should be used for grabbing single images if you want to maximize frame rate.

        // Now, the camera parameters are applied in the OnOpened method of the configuration object.
        camera.Open();

        // Additional parameters could be set here.

        // Grab some images for demonstration.
        camera.GrabOne( 5000, ptrGrabResult );
        camera.GrabOne( 5000, ptrGrabResult );
        camera.GrabOne( 5000, ptrGrabResult );

        // Close the camera.
        camera.Close();



        cout << "Grab using multiple configuration objects:" << endl << endl;

        // Register the standard event handler for configuring single frame acquisition.
        // The current configuration is replaced by setting the registration mode to RegistrationMode_ReplaceAll.
        camera.RegisterConfiguration( new CAcquireSingleFrameConfiguration, RegistrationMode_ReplaceAll, Cleanup_Delete );

        // Register an additional configuration handler to set the image format and adjust the AOI.
        // By setting the registration mode to RegistrationMode_Append, the configuration handler is added instead of replacing
        // the already registered configuration handler.
        camera.RegisterConfiguration( new CPixelFormatAndAoiConfiguration, RegistrationMode_Append, Cleanup_Delete );

        // Create an event printer on the heap.
        CConfigurationEventPrinter* pEventPrinterObject = new CConfigurationEventPrinter;
        // Register the handler object and define Cleanup_None so that it is not deleted by the camera object.
        // It must be ensured, that the configuration handler "lives" at least until the handler is deregistered!
        camera.RegisterConfiguration( pEventPrinterObject, RegistrationMode_Append, Cleanup_None );

        // Grab an image for demonstration. Configuration events are printed.
        cout << endl << "Grab, configuration events are printed:" << endl << endl;
        camera.GrabOne( 5000, ptrGrabResult );

        // Deregister the event handler.
        camera.DeregisterConfiguration( pEventPrinterObject );
        // The event handler can now be deleted.
        delete pEventPrinterObject;
        pEventPrinterObject = NULL;

        // Grab an image for demonstration. Configuration events are not printed.
        cout << endl << "Grab, configuration events are not printed:" << endl << endl;
        camera.GrabOne( 5000, ptrGrabResult );
    }
    catch (const GenericException& e)
    {
        // Error handling.
        cerr << "An exception occurred." << endl
            << e.GetDescription() << endl;
        exitCode = 1;
    }

    // Comment the following two lines to disable waiting on exit
    cerr << endl << "Press enter to exit." << endl;
    while (cin.get() != '\n');

    // Releases all pylon resources.
    PylonTerminate();

    return exitCode;
}

ParametrizeCamera_GenericParameterAccess#

This sample is part of the pylon sample solution that can be found under <⁠SDK ROOT>\Development\Samples\C++.

// ParametrizeCamera_GenericParameterAccess.cpp
/*
    Note: Before getting started, Basler recommends reading the "Programmer's Guide" topic
    in the pylon C++ API documentation delivered with pylon.
    If you are upgrading to a higher major version of pylon, Basler also
    strongly recommends reading the "Migrating from Previous Versions" topic in the pylon C++ API documentation.

    For camera configuration and for accessing other parameters, the pylon API
    uses the technologies defined by the GenICam standard hosted by the
    European Machine Vision Association (EMVA). The GenICam specification
    (http://www.GenICam.org) defines a format for camera description files.
    These files describe the configuration interface of GenICam compliant cameras.
    The description files are written in XML (eXtensible Markup Language) and
    describe camera registers, their interdependencies, and all other
    information needed to access high-level features such as Gain,
    Exposure Time, or Image Format by means of low-level register read and
    write operations.

    The elements of a camera description file are represented as software
    objects called Nodes. For example, a node can represent a single camera
    register, a camera parameter such as Gain, a set of available parameter
    values, etc. Each node implements the GenApi::INode interface.

    The nodes are linked together by different relationships as explained in the
    GenICam standard document available at www.GenICam.org. The complete set of
    nodes is stored in a data structure called Node Map.
    At runtime, a Node Map is instantiated from an XML description.

    This sample shows the 'generic' approach for configuring a camera
    using the GenApi nodemaps represented by the GenApi::INodeMap interface.

    The names and types of the parameter nodes can be found in the Basler pylon Programmer's Guide
    and API Reference, in the camera User's Manual, in the camera's document about
    Register Structure and Access Methodes (if applicable), and by using the pylon Viewer tool.

    See also the ParametrizeCamera_NativeParameterAccess sample for the 'native'
    approach for configuring a camera.
*/

// Include files to use the pylon API.
#include <pylon/PylonIncludes.h>

// Namespace for using pylon objects.
using namespace Pylon;

// Namespace for using GenApi objects.
using namespace GenApi;

// Namespace for using cout.
using namespace std;


int main( int /*argc*/, char* /*argv*/[] )
{

    // The exit code of the sample application.

    int exitCode = 0;

    // Before using any pylon methods, the pylon runtime must be initialized.
    PylonInitialize();

    try
    {
        // Create an instant camera object with the camera found first.
        CInstantCamera camera( CTlFactory::GetInstance().CreateFirstDevice() );

        INodeMap& nodemap = camera.GetNodeMap();

        // Open the camera for accessing the parameters.
        camera.Open();

        // Get camera device information.
        cout << "Camera Device Information" << endl
            << "=========================" << endl;
        cout << "Vendor           : "
            << CStringParameter( nodemap, "DeviceVendorName" ).GetValue() << endl;
        cout << "Model            : "
            << CStringParameter( nodemap, "DeviceModelName" ).GetValue() << endl;
        cout << "Firmware version : "
            << CStringParameter( nodemap, "DeviceFirmwareVersion" ).GetValue() << endl << endl;

       // Camera settings.
        cout << "Camera Device Settings" << endl
            << "======================" << endl;


       // Set the AOI:

       // Get the integer nodes describing the AOI.
        CIntegerParameter offsetX( nodemap, "OffsetX" );
        CIntegerParameter offsetY( nodemap, "OffsetY" );
        CIntegerParameter width( nodemap, "Width" );
        CIntegerParameter height( nodemap, "Height" );

        // On some cameras, the offsets are read-only.
        // Therefore, we must use "Try" functions that only perform the action
        // when parameters are writable. Otherwise, we would get an exception.
        offsetX.TrySetToMinimum();
        offsetY.TrySetToMinimum();

        // Some properties have restrictions.
        // We use API functions that automatically perform value corrections.
        // Alternatively, you can use GetInc() / GetMin() / GetMax() to make sure you set a valid value.
        width.SetValue( 202, IntegerValueCorrection_Nearest );
        height.SetValue( 101, IntegerValueCorrection_Nearest );

        cout << "OffsetX          : " << offsetX.GetValue() << endl;
        cout << "OffsetY          : " << offsetY.GetValue() << endl;
        cout << "Width            : " << width.GetValue() << endl;
        cout << "Height           : " << height.GetValue() << endl;



        // Access the PixelFormat enumeration type node.
        CEnumParameter pixelFormat( nodemap, "PixelFormat" );

        // Remember the current pixel format.
        String_t oldPixelFormat = pixelFormat.GetValue();
        cout << "Old PixelFormat  : " << oldPixelFormat << endl;

        // Set the pixel format to Mono8 if available.
        if (pixelFormat.CanSetValue( "Mono8" ))
        {
            pixelFormat.SetValue( "Mono8" );
            cout << "New PixelFormat  : " << pixelFormat.GetValue() << endl;
        }


        // Set the new gain to 50% ->  Min + ((Max-Min) / 2).
        //
        // Note: Some newer camera models may have auto functions enabled.
        //       To be able to set the gain value to a specific value
        //       the Gain Auto function must be disabled first.
        // Access the enumeration type node GainAuto.
        // We use a "Try" function that only performs the action if the parameter is writable.
        CEnumParameter gainAuto( nodemap, "GainAuto" );
        gainAuto.TrySetValue( "Off" );


        // Check to see which Standard Feature Naming Convention (SFNC) is used by the camera device.
        if (camera.GetSfncVersion() >= Sfnc_2_0_0)
        {
            // Access the Gain float type node. This node is available for USB camera devices.
            // USB camera devices are compliant to SFNC version 2.0.
            CFloatParameter gain( nodemap, "Gain" );
            if (gain.TrySetValuePercentOfRange( 50.0 ))
            {
                cout << "Gain (50%)       : " << gain.GetValue() << " (Min: " << gain.GetMin() << "; Max: " << gain.GetMax() << ")" << endl;
            }
        }
        else
        {
            // Access the GainRaw integer type node. This node is available for GigE camera devices.
            CIntegerParameter gainRaw( nodemap, "GainRaw" );
            if (gainRaw.TrySetValuePercentOfRange( 50.0 ))
            {
                cout << "Gain (50%)       : " << gainRaw.GetValue() << " (Min: " << gainRaw.GetMin() << "; Max: " << gainRaw.GetMax() << "; Inc: " << gainRaw.GetInc() << ")" << endl;
            }
        }


        // Restore the old pixel format.
        pixelFormat.SetValue( oldPixelFormat );

        // Close the camera.
        camera.Close();
    }
    catch (const GenericException& e)
    {
        // Error handling.
        cerr << "An exception occurred." << endl
            << e.GetDescription() << endl;
        exitCode = 1;
    }

    // Comment the following two lines to disable waiting on exit.
    cerr << endl << "Press enter to exit." << endl;
    while (cin.get() != '\n');

    // Releases all pylon resources.
    PylonTerminate();

    return exitCode;
}

ParametrizeCamera_LoadAndSave#

This sample is part of the pylon sample solution that can be found under <⁠SDK ROOT>\Development\Samples\C++.

// ParametrizeCamera_LoadAndSave.cpp
/*
    Note: Before getting started, Basler recommends reading the "Programmer's Guide" topic
    in the pylon C++ API documentation delivered with pylon.
    If you are upgrading to a higher major version of pylon, Basler also
    strongly recommends reading the "Migrating from Previous Versions" topic in the pylon C++ API documentation.

    This sample application demonstrates how to save or load the features of a camera
    to or from a file.
*/

// Include files to use the pylon API.
#include <pylon/PylonIncludes.h>

// Namespace for using pylon objects.
using namespace Pylon;

// Namespace for using cout.
using namespace std;

// The name of the pylon feature stream file.
const char Filename[] = "NodeMap.pfs";


int main( int /*argc*/, char* /*argv*/[] )
{
    // The exit code of the sample application.
    int exitCode = 0;

    // Before using any pylon methods, the pylon runtime must be initialized.
    PylonInitialize();

    try
    {
        // Create an instant camera object with the camera device found first.
        CInstantCamera camera( CTlFactory::GetInstance().CreateFirstDevice() );

        // Print the model name of the camera.
        cout << "Using device " << camera.GetDeviceInfo().GetModelName() << endl;

        // Open the camera.
        camera.Open();

        cout << "Saving camera's node map to file..." << endl;
        // Save the content of the camera's node map into the file.
        CFeaturePersistence::Save( Filename, &camera.GetNodeMap() );

        cout << "Reading file back to camera's node map..." << endl;
        // Just for demonstration, read the content of the file back to the camera's node map with enabled validation.
        CFeaturePersistence::Load( Filename, &camera.GetNodeMap(), true );

        // Close the camera.
        camera.Close();
    }
    catch (const GenericException& e)
    {
        // Error handling.
        cerr << "An exception occurred." << endl
            << e.GetDescription() << endl;
        exitCode = 1;
    }

    // Comment the following two lines to disable waiting on exit
    cerr << endl << "Press enter to exit." << endl;
    while (cin.get() != '\n');

    // Releases all pylon resources.
    PylonTerminate();

    return exitCode;
}

ParametrizeCamera_LookupTable#

This sample is part of the pylon sample solution that can be found under <⁠SDK ROOT>\Development\Samples\C++.

// ParametrizeCamera_LookupTable.cpp
/*
    Note: Before getting started, Basler recommends reading the "Programmer's Guide" topic
    in the pylon C++ API documentation delivered with pylon.
    If you are upgrading to a higher major version of pylon, Basler also
    strongly recommends reading the "Migrating from Previous Versions" topic in the pylon C++ API documentation.

    This sample program demonstrates the use of the Luminance Lookup Table feature.
*/

// Include files to use the pylon API.
#include <pylon/PylonIncludes.h>

// Include file to use pylon universal instant camera parameters.
#include <pylon/BaslerUniversalInstantCamera.h>

// Namespace for using pylon objects.
using namespace Pylon;

// Namespace for using pylon universal instant camera parameters.
using namespace Basler_UniversalCameraParams;

// Namespace for using cout.
using namespace std;


int main( int /*argc*/, char* /*argv*/[] )
{
    // The exit code of the sample application.
    int exitCode = 0;

    // Before using any pylon methods, the pylon runtime must be initialized.
    PylonInitialize();

    try
    {
        // Create an instant camera object with the first found camera device.
        CBaslerUniversalInstantCamera camera( CTlFactory::GetInstance().CreateFirstDevice() );

        // Print the model name of the camera.
        cout << "Using device " << camera.GetDeviceInfo().GetModelName() << endl;

        cout << "Opening camera...";

        // Open the camera.
        camera.Open();

        cout << "done" << endl;

        cout << "Writing LUT....";

        // Select the lookup table using the LUTSelector.
        camera.LUTSelector.SetValue( LUTSelector_Luminance );

        // Some cameras have 10 bit and others have 12 bit lookup tables, so determine
        // the type of the lookup table for the current device.
        const int nValues = (int) camera.LUTIndex.GetMax() + 1;
        int inc;
        if (nValues == 4096) // 12 bit LUT.
            inc = 8;
        else if (nValues == 1024) // 10 bit LUT.
            inc = 2;
        else
        {
            throw RUNTIME_EXCEPTION( "Type of LUT is not supported by this sample." );
        }

        // Use LUTIndex and LUTValue parameter to access the lookup table values.
        // The following lookup table causes an inversion of the sensor values.

        for (int i = 0; i < nValues; i += inc)
        {
            camera.LUTIndex.SetValue( i );
            camera.LUTValue.SetValue( nValues - 1 - i );
        }

        cout << "done" << endl;

        // Enable the lookup table.
        camera.LUTEnable.SetValue( true );

        // Grab and process images here.
        // ...

        // Disable the lookup table.
        camera.LUTEnable.SetValue( false );

        // Close the camera.
        camera.Close();
    }
    catch (const GenericException& e)
    {
        // Error handling.
        cerr << "An exception occurred." << endl
            << e.GetDescription() << endl;
        exitCode = 1;
    }

    // Comment the following two lines to disable waiting on exit
    cerr << endl << "Press enter to exit." << endl;
    while (cin.get() != '\n');

    // Releases all pylon resources.
    PylonTerminate();

    return exitCode;
}

ParametrizeCamera_NativeParameterAccess#

This sample is part of the pylon sample solution that can be found under <⁠SDK ROOT>\Development\Samples\C++.

// ParametrizeCamera_NativeParameterAccess.cpp
/*
    Note: Before getting started, Basler recommends reading the "Programmer's Guide" topic
    in the pylon C++ API documentation delivered with pylon.
    If you are upgrading to a higher major version of pylon, Basler also
    strongly recommends reading the "Migrating from Previous Versions" topic in the pylon C++ API documentation.

    For camera configuration and for accessing other parameters, the pylon API
    uses the technologies defined by the GenICam standard hosted by the
    European Machine Vision Association (EMVA). The GenICam specification
    (http://www.GenICam.org) defines a format for camera description files.
    These files describe the configuration interface of GenICam compliant cameras.
    The description files are written in XML (eXtensible Markup Language) and
    describe camera registers, their interdependencies, and all other
    information needed to access high-level features such as Gain,
    Exposure Time, or Image Format by means of low-level register read and
    write operations.

    The elements of a camera description file are represented as software
    objects called Nodes. For example, a node can represent a single camera
    register, a camera parameter such as Gain, a set of available parameter
    values, etc. Each node implements the GenApi::INode interface.

    Using the code generators provided by GenICam's GenApi module,
    a programming interface is created from a camera description file.
    Thereby, a member is provided for each parameter that is available for the camera device.
    The programming interface is exported by the device-specific Instant Camera classes.
    This is the easiest way to access parameters.

    This sample shows the 'native' approach for configuring a camera
    using device-specific instant camera classes.

    See also the ParametrizeCamera_GenericParameterAccess sample for the 'generic'
    approach for configuring a camera.
*/

// Include files to use the pylon API.
#include <pylon/PylonIncludes.h>

// Include file to use pylon universal instant camera parameters.
#include <pylon/BaslerUniversalInstantCamera.h>

// Namespace for using pylon objects.
using namespace Pylon;

// Namespace for using pylon universal instant camera parameters.
using namespace Basler_UniversalCameraParams;

// Namespace for using cout.
using namespace std;


int main( int /*argc*/, char* /*argv*/[] )
{
    // The exit code of the sample application.
    int exitCode = 0;

    // Before using any pylon methods, the pylon runtime must be initialized.
    PylonInitialize();

    try
    {
        // Create an instant camera object with the first found camera device.
        CBaslerUniversalInstantCamera camera( CTlFactory::GetInstance().CreateFirstDevice() );

        // Open the camera for accessing the parameters.
        camera.Open();

        // Get camera device information.
        cout << "Camera Device Information" << endl
            << "=========================" << endl;
        cout << "Vendor           : "
            << camera.DeviceVendorName.GetValue() << endl;
        cout << "Model            : "
            << camera.DeviceModelName.GetValue() << endl;
        cout << "Firmware version : "
            << camera.DeviceFirmwareVersion.GetValue() << endl << endl;

       // Camera settings.
        cout << "Camera Device Settings" << endl
            << "======================" << endl;



       // Set the AOI:

       // On some cameras, the offsets are read-only.
       // Therefore, we must use "Try" functions that only perform the action
       // when parameters are writable. Otherwise, we would get an exception.
        camera.OffsetX.TrySetToMinimum();
        camera.OffsetY.TrySetToMinimum();

        // Some properties have restrictions.
        // We use API functions that automatically perform value corrections.
        // Alternatively, you can use GetInc() / GetMin() / GetMax() to make sure you set a valid value.
        camera.Width.SetValue( 202, IntegerValueCorrection_Nearest );
        camera.Height.SetValue( 101, IntegerValueCorrection_Nearest );

        cout << "OffsetX          : " << camera.OffsetX.GetValue() << endl;
        cout << "OffsetY          : " << camera.OffsetY.GetValue() << endl;
        cout << "Width            : " << camera.Width.GetValue() << endl;
        cout << "Height           : " << camera.Height.GetValue() << endl;


        // Remember the current pixel format.
        PixelFormatEnums oldPixelFormat = camera.PixelFormat.GetValue();
        cout << "Old PixelFormat  : " << camera.PixelFormat.ToString() << " (" << oldPixelFormat << ")" << endl;

        // Set pixel format to Mono8 if available.
        if (camera.PixelFormat.CanSetValue( PixelFormat_Mono8 ))
        {
            camera.PixelFormat.SetValue( PixelFormat_Mono8 );
            cout << "New PixelFormat  : " << camera.PixelFormat.ToString() << " (" << camera.PixelFormat.GetValue() << ")" << endl;
        }

        // Set the new gain to 50% ->  Min + ((Max-Min) / 2).
        //
        // Note: Some newer camera models may have auto functions enabled.
        //       To be able to set the gain value to a specific value
        //       the Gain Auto function must be disabled first.
        // Access the enumeration type node GainAuto.
        // We use a "Try" function that only performs the action if the parameter is writable.
        camera.GainAuto.TrySetValue( GainAuto_Off );

        if (camera.GetSfncVersion() >= Sfnc_2_0_0) // Cameras based on SFNC 2.0 or later, e.g., USB cameras
        {
            if (camera.Gain.TrySetValuePercentOfRange( 50.0 ))
            {
                cout << "Gain (50%)       : " << camera.Gain.GetValue() << " (Min: " << camera.Gain.GetMin() << "; Max: " << camera.Gain.GetMax() << ")" << endl;
            }
        }
        else
        {
            if (camera.GainRaw.TrySetValuePercentOfRange( 50.0 ))
            {
                cout << "Gain (50%)       : " << camera.GainRaw.GetValue() << " (Min: " << camera.GainRaw.GetMin() << "; Max: " << camera.GainRaw.GetMax() << "; Inc: " << camera.GainRaw.GetInc() << ")" << endl;
            }
        }


        // Restore the old pixel format.
        camera.PixelFormat.SetValue( oldPixelFormat );

        // Close the camera.
        camera.Close();
    }
    catch (const GenericException& e)
    {
        // Error handling.
        cerr << "An exception occurred." << endl
            << e.GetDescription() << endl;
        exitCode = 1;
    }

    // Comment the following two lines to disable waiting on exit
    cerr << endl << "Press enter to exit." << endl;
    while (cin.get() != '\n');

    // Releases all pylon resources.
    PylonTerminate();

    return exitCode;
}

ParametrizeCamera_SerialCommunication#

This sample is part of the pylon sample solution that can be found under <⁠SDK ROOT>\Development\Samples\C++.

// ParametrizeCamera_SerialCommunication.cpp
/*
    Note: Before getting started, Basler recommends reading the "Programmer's Guide" topic
    in the pylon C++ API documentation delivered with pylon.
    If you are upgrading to a higher major version of pylon, Basler also
    strongly recommends reading the "Migrating from Previous Versions" topic in the pylon C++ API documentation.

    This sample program demonstrates the use of the UART (asynchronous serial communication) feature
    that is available on some camera models, e.g., ace 2 Pro cameras. This allows you to establish 
    serial communication between a host and an external device through the camera.
*/

// Include files to use the pylon API.
#include <pylon/PylonIncludes.h>

// Include file to use pylon universal instant camera parameters.
#include <pylon/BaslerUniversalInstantCamera.h>

// Include vector for transmitting/receiving data buffers.
#include <vector>

// Namespace for using pylon objects.
using namespace Pylon;

// Namespace for using pylon universal instant camera parameters.
using namespace Basler_UniversalCameraParams;

// Namespace for using GenApi objects.
using namespace GenApi;

// Namespace for using cout.
using namespace std;

// Data type for storing bytes for serial transmission.
typedef vector<uint8_t> SerialDataBuffer_t;


// Transmit data:
// -----------------
//
// 1. Write the data to BslSerialTransferBuffer (for GigE, you need to write multiples of 4 bytes).
// 2. Write the real (not the padded) length (in bytes) of data to BslSerialTransferLength.
// 3. Execute BslSerialTransmit.
// 4. Wait for BslSerialTxFifoEmpty to become true before further transmissions.
// 5. Repeat until all data has been transmitted.

void SerialTransmit( CBaslerUniversalInstantCamera &camera, const SerialDataBuffer_t &transmitData )
{
    // Get the max buffer size for transmission.
    const size_t max_tx_size = static_cast<size_t>(camera.BslSerialTransferBuffer.GetLength());

    SerialDataBuffer_t::const_iterator currentPosition = transmitData.begin();
    while (currentPosition != transmitData.end())
    {
        // Calculate the number of bytes that can be transferred at once.
        size_t bytesToSend = static_cast<size_t>(transmitData.end() - currentPosition);
        size_t transferLength = min( bytesToSend, max_tx_size );

        // Create and fill transfer buffer with the calculated length.
        SerialDataBuffer_t dataBuffer( currentPosition, currentPosition + transferLength );

        // As GigE devices only allow multiples of 4 bytes for data transfer, add padding to the buffer if necessary.
        size_t paddedLength = transferLength + (4 - (transferLength % 4)) % 4;
        dataBuffer.resize( paddedLength );

        // Write padded data to the camera buffer and set transfer length to the unpadded value.
        camera.BslSerialTransferBuffer.Set( dataBuffer.data(), paddedLength );
        camera.BslSerialTransferLength.SetValue( transferLength );

        // Start the transmission.
        camera.BslSerialTransmit.Execute();
        currentPosition += transferLength;

        // Poll every 100 ms until FIFO is empty.
        size_t count = 0;
        while (!camera.BslSerialTxFifoEmpty.GetValue() && (count++ < 100))
        {
            WaitObject::Sleep( 1 );
        }

        // Check for overflow status (updated by the transmit command).
        if (camera.BslSerialTxFifoOverflow.GetValue())
        {
            cerr << "WARNING: Serial transmit overflow!" << endl;
        }
    }
}


// Receive data:
// -------------
//
// 1. Execute BslSerialReceive.
// 2. Check for flags in BslSerialRxFifoOverflow, BslSerialRxParityError, and BslSerialRxStopBitError.
// 3. Read BslSerialTransferLength to obtain the length of received data.
// 4. Read BslSerialTransferLength bytes from BslSerialTransferBuffer (for GigE you need to read multiples of 4 bytes).
// 5. Repeat if BslSerialTransferLength was not 0.

SerialDataBuffer_t SerialReceive( CBaslerUniversalInstantCamera &camera )
{
    SerialDataBuffer_t receiveData; // Buffer for receiving data.
    size_t bytesReceived = 0;       // Number of bytes used by the complete transmission.
    size_t transferLength = 0;      // Number of bytes received at once.
    do
    {
        // Receive data from FIFO.
        camera.BslSerialReceive.Execute();

        // Check for overflow of receive FIFO. If this is set, data was lost!
        if (camera.BslSerialRxFifoOverflow.GetValue())
        {
            cerr << "WARNING: Receive overflow detected!" << endl;
        }

        // Check for a receive parity error. If this is set, data may be incorrect!
        if (camera.BslSerialRxParityError.GetValue())
        {
            cerr << "WARNING: Parity error in received data stream detected!" << endl;
        }

        // Check for a stop bit error. If this is set, data may be incorrect!
        // Also, this bit is normally set when a break condition occurred.
        if (camera.BslSerialRxStopBitError.GetValue())
        {
            cerr << "WARNING: Stop bit error in received data stream detected!" << endl;
        }

        // Check how many bytes where received and fetch the data from the transfer buffer.
        transferLength = static_cast<size_t>(camera.BslSerialTransferLength.GetValue());
        if (transferLength)
        {
            // GigE devices only allow multiples of 4 bytes for data transfer. Add padding to the buffer if necessary.
            size_t paddedLength = transferLength + (4 - (transferLength % 4)) % 4;
            receiveData.resize( bytesReceived + paddedLength );

            // Read padded data but only count unpadded length.
            camera.BslSerialTransferBuffer.Get( &receiveData[bytesReceived], paddedLength );
            bytesReceived += transferLength;

        }
    } while (transferLength);

    // Finally, remove padding from received data.
    receiveData.resize( bytesReceived );

    return receiveData;
}


int main( int /*argc*/, char* /*argv*/[] )
{
    // The exit code of the sample application.
    int exitCode = 0;

    // Before using any pylon methods, the pylon runtime must be initialized. 
    PylonInitialize();

    try
    {
        // Create an instant camera object with the first camera device found.
        CBaslerUniversalInstantCamera camera( CTlFactory::GetInstance().CreateFirstDevice() );

        // Print the model name of the camera.
        cout << "Using device " << camera.GetDeviceInfo().GetModelName() << endl;

        cout << "Opening camera...";

        // Open the camera.
        camera.Open();

        cout << "done" << endl;

        // Check whether the device supports asynchronous serial communication.
        if (!camera.BslSerialReceive.IsWritable() || !camera.BslSerialTransmit.IsWritable())
        {
            throw RUNTIME_EXCEPTION( "The device doesn't support asynchronous serial communication." );
        }

        // === Configure I/O ===

        // Change this to 'false' to use the camera's digital I/O lines for communication.
        // This requires a UART device to be attached to the camera. Leave the switch
        // 'true' to demonstrate the UART in loopback mode.
        const bool loopback = true;
        if (loopback)
        {
            // Loopback: Simply use SerialTx as source for receive.
            cout << "Configure loopback for serial communication...";

            camera.BslSerialRxSource = BslSerialRxSource_SerialTx;

            cout << "done" << endl;
        }
        else
        {
            // On ace 2 cameras, lines 2 and 3 are GPIO lines.
            // Do not use the opto-coupled input for UART communications!
            cout << "Configure GPIO lines for serial communication..." << endl;

            // Use line 2 as TX (Output).
            camera.LineSelector = LineSelector_Line2;
            camera.LineMode = LineMode_Output;
            camera.LineSource = LineSource_SerialTx;

            // Use line 3 as RX (Input).
            camera.LineSelector = LineSelector_Line3;
            camera.LineMode = LineMode_Input;

            camera.BslSerialRxSource = BslSerialRxSource_Line3;

            cout << "done" << endl;
        }

        // === Configure the serial communication module (115200 baud - 8n1) ===
        cout << "Configure UART to 115200 8N1...";
        camera.BslSerialBaudRate = BslSerialBaudRate_Baud115200;
        camera.BslSerialNumberOfDataBits = BslSerialNumberOfDataBits_Bits8;
        camera.BslSerialParity = BslSerialParity_None;
        camera.BslSerialNumberOfStopBits = BslSerialNumberOfStopBits_Bits1;
        cout << "done" << endl;

        // === Transmit data ===
        const std::string message( "For documentation, see: https://docs.baslerweb.com/serial-communication" );
        const SerialDataBuffer_t transmitData( message.begin(), message.end() );

        cout << "Transmit: " << "'" << message << "'" << endl;
        SerialTransmit( camera, transmitData );
        cout << "Transmit: done!" << endl;

        // === Receive data ===
        // Note: For loopback the message transmitted was too long and the RX-FIFO is in overflow condition!
        cout << "Receive: Starting..." << endl;
        if (loopback)
        {
            cout << "Note: In loopback mode, the message is too long for the receive FIFO and an overflow message will appear! \n";
            cout << "Note: The received message seen here will be truncated!" << endl;
        }
        SerialDataBuffer_t receivedData = SerialReceive( camera );
        cout << "Receive: " << "'" << std::string( receivedData.begin(), receivedData.end() ) << "'" << endl;

        // === Transmit & check break condition ===
        cout << "Receive break: " << camera.BslSerialRxBreak.GetValue() << endl;
        camera.BslSerialRxBreakReset.Execute();

        cout << "Set break condition...";
        camera.BslSerialTxBreak.SetValue( true );
        WaitObject::Sleep( 10 );
        camera.BslSerialTxBreak.SetValue( false );
        cout << "done!" << endl;

        cout << "Receive break: " << camera.BslSerialRxBreak.GetValue() << endl;
        camera.BslSerialRxBreakReset.Execute();

        // After a break, the receive FIFO contains errors, so flush the FIFO.
        cout << "Note: After a break condition framing error flags will probably be set!" << endl;
        SerialReceive( camera );

        // Close the camera.
        camera.Close();

    }
    catch (const GenericException &e)
    {
        // Error handling.
        cerr << "An exception occurred." << endl
            << e.GetDescription() << endl;
        exitCode = 1;
    }

    // Comment the following two lines to disable waiting on exit.
    cerr << endl << "Press enter to exit." << endl;
    while (cin.get() != '\n');

    // Releases all pylon resources. 
    PylonTerminate();

    return exitCode;
}

ParametrizeCamera_Shading#

This sample is part of the pylon sample solution that can be found under <⁠SDK ROOT>\Development\Samples\C++.

// ParametrizeCamera_Shading.cpp
/*
    Note: Before getting started, Basler recommends reading the "Programmer's Guide" topic
    in the pylon C++ API documentation delivered with pylon.
    If you are upgrading to a higher major version of pylon, Basler also
    strongly recommends reading the "Migrating from Previous Versions" topic in the pylon C++ API documentation.

    This sample demonstrates how to calculate and upload a gain shading
    set to a Basler runner line scan camera.

    This sample only applies to Basler runner cameras.
*/

// For use with Visual Studio >= 2005, disable deprecate warnings caused by the fopen function.
#define _CRT_SECURE_NO_WARNINGS

// Include files to use the pylon API.
#include <pylon/PylonIncludes.h>

// Include file to use pylon universal instant camera parameters.
#include <pylon/BaslerUniversalInstantCamera.h>

// For DBL_MAX.
#include <float.h>
#include <errno.h>

#ifdef _MSC_VER
#pragma warning(push)
#pragma warning(disable: 4244)
#endif

// For file upload.
#include <GenApi/Filestream.h>

#ifdef _MSC_VER
#pragma warning(pop)
#endif

// Namespace for using pylon objects.
using namespace Pylon;

// Namespaces for using pylon universal instant camera parameters.
using namespace Basler_UniversalCameraParams;
using namespace Basler_UniversalStreamParams;

// Namespace for using cout.
using namespace std;

////////////////////////////////////////////////////////////////////////////////

// Prototypes for functions used in 'main'.

void CreateShadingData( CBaslerUniversalInstantCamera& camera,
                        const char* pLocalFilename );
void UploadFile( CBaslerUniversalInstantCamera& camera,
                 const char* pCameraFilename,
                 const char* pLocalFilename );
void CheckShadingData( CBaslerUniversalInstantCamera& camera );

////////////////////////////////////////////////////////////////////////////////

// Name of the file where we will store the shading data on the local disk.
static const char LocalFilename[] = "ShadingData.bin";


#define USE_SHADING_SET_1   // Define which shading set we are going to use.

#if defined (USE_SHADING_SET_1)

// Name of the file in the camera where the shading data will be stored.
static const char CameraFilename[] = "UserGainShading1";

// Name of the shading set that corresponds to 'CameraFilename'.
static ShadingSetSelectorEnums ShadingSet = ShadingSetSelector_UserShadingSet1;

#elif defined (USE_SHADING_SET_2)

// Name of the file in the camera where shading data will be stored.
static const char CameraFilename[] = "UserGainShading2";

// Name of the shading set that corresponds to 'CameraFilename'.
static ShadingSetSelectorEnums ShadingSet = ShadingSetSelector_UserShadingSet2;

#else
#error No shading set defined!
#endif

////////////////////////////////////////////////////////////////////////////////

int main( int /*argc*/, char* /*argv*/[] )
{
    // The exit code of the sample application.
    int exitCode = 0;

    // Before using any pylon methods, the pylon runtime must be initialized.
    PylonInitialize();

    try
    {
        // Only look for GigE cameras.
        CDeviceInfo info;
        info.SetDeviceClass( Pylon::BaslerGigEDeviceClass );

        // Create an instant camera object for the GigE camera found first.
        CBaslerUniversalInstantCamera camera( CTlFactory::GetInstance().CreateFirstDevice( info ) );

        // Print the model name of the camera.
        cout << "Using device " << camera.GetDeviceInfo().GetModelName() << endl;

        // Register the standard configuration event handler for configuring single frame acquisition.
        // This replaces the default configuration as all event handlers are removed by setting the registration mode to RegistrationMode_ReplaceAll.
        camera.RegisterConfiguration( new CAcquireSingleFrameConfiguration(), RegistrationMode_ReplaceAll, Cleanup_Delete );

        // Open the camera.
        camera.Open();

        // Only line scan cameras support gain shading.
        if (camera.DeviceScanType.GetValue() == DeviceScanType_Linescan)
        {
            // Here, we assume that the conditions for exposure (illumination,
            // exposure time, etc.) have been set up to deliver images of
            // uniform intensity (gray value), but that the acquired images are not uniform.
            // We calculate the gain shading data so that the observed non-uniformity
            // will be compensated when the data are applied.
            // These data are saved to a local file.
            CreateShadingData( camera, LocalFilename );

            // Transfer calculated gain shading data from the local file to the camera.
            UploadFile( camera, CameraFilename, LocalFilename );

            // Test to what extent the non-uniformity has been compensated.
            CheckShadingData( camera );
        }
        else
        {
            cerr << "Only line scan cameras support gain shading." << endl;
        }

        // Close the camera.
        camera.Close();
    }
    catch (const GenericException& e)
    {
        // Error handling.
        cerr << "An exception occurred." << endl
            << e.GetDescription() << endl;
        exitCode = 1;
    }

    // Comment the following two lines to disable waiting on exit.
    cerr << endl << "Press enter to exit." << endl;
    while (cin.get() != '\n');

    // Releases all pylon resources.
    PylonTerminate();

    return exitCode;
}

////////////////////////////////////////////////////////////////////////////////
//
// In the following code, the format of the arrays containing intensities
// or coefficients is as follows:
//
//      If the pixel format is PixelFormat_Mono8:
//      ArraySize == Width
//      [Value_x0, Value_x1, Value_x2, ... , Value_x(Width - 1)]
//
//      If the pixel format is PixelFormat_RGB8Packed:
//      ArraySize ==  3 * Width
//      [ValueRed_x0,   ValueRed_x1,   ... , ValueRed_x(Width - 1),
//       ValueGreen_x0, ValueGreen_x1, ... , ValueGreen_x(Width - 1),
//       ValueBlue_x0,  ValueBlue_x1,  ... , ValueBlue_x(Width - 1)]
//
////////////////////////////////////////////////////////////////////////////////


//
// Grab a frame and store the intensitiy for the pixels in each column
// in 'Intensities'.
//
void AverageLines( CBaslerUniversalInstantCamera& camera,
                   uint32_t Width,         // Width of frame (number of pixels in each line).
                   uint32_t Height,        // Height of frame (number of lines in each frame).
                   uint32_t NumCoeffs,     // Number of coefficients.
                   double* Intensities )    // Destination array.
{
    for (uint32_t x = 0; x < NumCoeffs; x++)
    {
        Intensities[x] = 0.0;
    }

    cout << "Grab frame for averaging." << endl;

    CGrabResultPtr ptrGrabResult;
    camera.GrabOne( 5000, ptrGrabResult );
    uint8_t* Buffer = static_cast<uint8_t*>(ptrGrabResult->GetBuffer());

    if (NumCoeffs == 3 * Width)
    {
        //
        // RGB mode.
        //
        for (uint32_t x = 0; x < Width; x++)
        {
            for (uint32_t y = 0; y < Height; y++)
            {
                // Add intensities.
                uint32_t idx = 3 * (y * Width + x);
                Intensities[x] += Buffer[idx];
                Intensities[x + Width] += Buffer[idx + 1];
                Intensities[x + 2 * Width] += Buffer[idx + 2];
            }
        }
    }
    else
    {
        //
        // Mono mode.
        //
        for (uint32_t x = 0; x < Width; x++)
        {
            for (uint32_t y = 0; y < Height; y++)
            {
                // Add intensities.
                Intensities[x] += Buffer[y * Width + x];
            }
        }
    }
    double scale = 1.0 / double( Height );
    for (uint32_t x = 0; x < NumCoeffs; x++)
    {
        // Calculate average intensities.
        Intensities[x] *= scale;
    }
}

////////////////////////////////////////////////////////////////////////////////
//
// Take the average intensities from 'pDblCoeff'. Identify the minimum and maximum
// average intensity. For each intensity, calculate a multiplier so that
// the product of the multiplier and  the intensity equals the maximimum intensity (the
// multiplier for the maximum intensity is 1). Store the multipliers in 'pDblCoeff'.
//
void CalculateCoeffs( uint32_t  Width,         // Width of image (number of pixels in each line).
                      uint32_t  /*Height*/,    // Height of image (number of lines in each frame).
                      uint32_t  NumCoeffs,     // Number of shading coefficients.
                      double* pDblCoeff )      // In: averaged intensities.
                                               // Out: multiplier values.
{
    if (NumCoeffs == 3 * Width)
    {
        //
        // RGB mode.
        //
        double MinR = DBL_MAX;
        double MinG = DBL_MAX;
        double MinB = DBL_MAX;
        double MaxR = -DBL_MAX;
        double MaxG = -DBL_MAX;
        double MaxB = -DBL_MAX;

        for (uint32_t x = 0; x < Width; x++)
        {
            // Determine min and max intensity.
            if (pDblCoeff[x] < MinR)
            {
                MinR = pDblCoeff[x];
            }

            if (pDblCoeff[x] > MaxR)
            {
                MaxR = pDblCoeff[x];
            }

            if (pDblCoeff[x + Width] < MinG)
            {
                MinG = pDblCoeff[x + Width];
            }

            if (pDblCoeff[x + Width] > MaxG)
            {
                MaxG = pDblCoeff[x + Width];
            }

            if (pDblCoeff[x + 2 * Width] < MinB)
            {
                MinB = pDblCoeff[x + 2 * Width];
            }

            if (pDblCoeff[x + 2 * Width] > MaxB)
            {
                MaxB = pDblCoeff[x + 2 * Width];
            }
        }
        cout << "MaxR = " << (MaxR / MinR) << " * MinR" << endl;
        cout << "MaxG = " << (MaxG / MinG) << " * MinG" << endl;
        cout << "MaxB = " << (MaxB / MinB) << " * MinB" << endl;

        // Scale to maximum intensity.
        for (uint32_t x = 0; x < Width; x++)
        {
            pDblCoeff[x] = MaxR / pDblCoeff[x];
            pDblCoeff[x + Width] = MaxG / pDblCoeff[x + Width];
            pDblCoeff[x + 2 * Width] = MaxB / pDblCoeff[x + 2 * Width];
        }
    }
    else
    {
        //
        // Mono mode.
        //

        double Min = DBL_MAX;
        double Max = -DBL_MAX;
        for (uint32_t x = 0; x < Width; x++)
        {
            // Determine min and max intensity.
            if (pDblCoeff[x] < Min)
            {
                Min = pDblCoeff[x];
            }

            if (pDblCoeff[x] > Max)
            {
                Max = pDblCoeff[x];
            }
        }

        cout << "Max = " << (Max / Min) << " * Min" << endl;

        // Scale to maximum intensity.
        for (uint32_t x = 0; x < Width; x++)
        {
            pDblCoeff[x] = Max / pDblCoeff[x];
        }
    }
}

////////////////////////////////////////////////////////////////////////////////

bool SupportsRGB( CBaslerUniversalInstantCamera& camera );


// 'CreateShadingData' assumes that the conditions for exposure (illumination,
// exposure time, etc.) have been set up to deliver images of
// uniform intensity (gray value), but that the acquired images are not uniform.
// We calculate the gain shading data so that the observed non-uniformity
// will be compensated when the data are applied.
// These data are saved to a local file.

void CreateShadingData( CBaslerUniversalInstantCamera& camera, const char* pLocalFilename )
{
    //
    // Prepare camera for grab.
    //

    uint32_t Width = (uint32_t) camera.Width.GetValue();
    uint32_t Height = (uint32_t) camera.Height.GetValue();
    int32_t BytesPerPixel = 1;
    if (SupportsRGB( camera ))
    {
        camera.PixelFormat.SetValue( PixelFormat_RGB8Packed );
        BytesPerPixel = 3;
    }
    else
    {
        camera.PixelFormat.SetValue( PixelFormat_Mono8 );
    }

    // Disable gain shading for calculation.
    camera.ShadingSelector.SetValue( ShadingSelector_GainShading );
    camera.ShadingEnable.SetValue( false );

    //
    // Grab and average images into 'pDblCoeff'.
    //

    uint32_t NumCoeffs = BytesPerPixel * Width;
    double* pDblCoeff = new double[NumCoeffs];
    AverageLines( camera, Width, Height, NumCoeffs, pDblCoeff );

    //
    // Calculate gain shading data.
    //

    // Convert averaged intensities to multipliers.
    CalculateCoeffs( Width, Height, NumCoeffs, pDblCoeff );

    // Convert multipliers to camera format.
    uint32_t* pCoeffs = new uint32_t[NumCoeffs];
    for (uint32_t x = 0; x < NumCoeffs; x++)
    {
        // The multipliers are expressed as 32 bit fixed point
        // numbers with 16 bits before and 16 bits after
        // the decimal point.
        uint32_t coeff = uint32_t( pDblCoeff[x] * (1 << 16) );

        // Currently, the maximum multiplier is limited to 3.99998
        // (max register value == 0x0003FFFF).

        if (coeff > 0x0003FFFF)
        {
            static bool PrintMessage = true;
            if (PrintMessage)
            {
                PrintMessage = false;
                cout << "Gain shading had to be clipped." << endl;
            }
            coeff = 0x0003FFFF;
        }

        pCoeffs[x] = coeff;
    }

    delete[] pDblCoeff;

    //
    // Write data to file.
    //
    FILE* fp = fopen( pLocalFilename, "wb" );
    if (fp == NULL)
    {
        RUNTIME_EXCEPTION( "Can not open file '%s'\n", pLocalFilename );
    }

    // Header for gain shading file.
    struct ShadingHeader_t
    {
        unsigned char  version;
        unsigned char  type;
        unsigned char  sensorType;
        unsigned char  lineType;
        unsigned short width;
        unsigned short reserved;
    };

    // Constants used in header.
    static const unsigned char ShadingVersion_1 = 0x5a;
    static const unsigned char ShadingType_Gain = 0xc3;
    static const unsigned char ShadingSensorType_Line = 0x02;
    static const unsigned char ShadingLineType_Single = 0x01;
    static const unsigned char ShadingLineType_Tri = 0x03;

        // Construct header.
    ShadingHeader_t h;
    h.version = ShadingVersion_1;
    h.type = ShadingType_Gain;
    h.sensorType = ShadingSensorType_Line;
    h.lineType = BytesPerPixel == 3 ? ShadingLineType_Tri : ShadingLineType_Single;
    h.width = uint16_t( Width );
    h.reserved = 0;

        // Write shading data to local file.
    fwrite( &h, sizeof( h ), 1, fp );
    fwrite( pCoeffs, sizeof( uint32_t ), NumCoeffs, fp );
    fclose( fp );
    delete[] pCoeffs;
}

////////////////////////////////////////////////////////////////////////////////

// Copy data from a local file to a file in the camera.
void UploadFile( CBaslerUniversalInstantCamera& camera,
                 const char* pCameraFilename,
                 const char* pLocalFilename )
{

    // Open local file.
    FILE* fp = fopen( pLocalFilename, "rb" );
    if (fp == NULL)
    {
        RUNTIME_EXCEPTION( "Can not open file '%s'\n", pLocalFilename );
    }

    // Determine file size.
    fseek( fp, 0, SEEK_END );
    size_t Size = ftell( fp );
    rewind( fp );

    if (Size == 0)
    {
        fclose( fp );
        return;
    }

    // Read data from local file into pBuf.
    char* pBuf = new char[Size];
    size_t read = fread( pBuf, 1, Size, fp );
    fclose( fp );
    if (read != Size)
    {
        RUNTIME_EXCEPTION( "Failed to read from file '%s'\n", pLocalFilename );
    }

    // Transfer data to camera.
    GenApi::ODevFileStream stream( &camera.GetNodeMap(), pCameraFilename );
    stream.write( pBuf, streamsize( Size ) );
    stream.close();

    delete[] pBuf;
}



////////////////////////////////////////////////////////////////////////////////
// Check the success of 'CreateShadingData' and 'UploadFile' by
//     - activating and enabling the uploaded shading data file
//     - grabbing one image
//     - calculating the multipliers again, expecting them to be close to 1.0
void CheckShadingData( CBaslerUniversalInstantCamera& camera )
{
    uint32_t Width = (uint32_t) camera.Width.GetValue();
    uint32_t Height = (uint32_t) camera.Height.GetValue();
    int32_t BytesPerPixel = 1;
    if (SupportsRGB( camera ))
    {
        BytesPerPixel = 3;
    }

    //
    // Activate and enable the gain shading set that was just uploaded.
    //

    camera.ShadingSelector.SetValue( ShadingSelector_GainShading );
    camera.ShadingSetSelector.SetValue( ShadingSet );
    camera.ShadingSetActivate.Execute();
    camera.ShadingEnable.SetValue( true );

    //
    // Grab image and calculate multipliers just to print the new Max/Min ratio.
    //

    uint32_t NumCoeffs = BytesPerPixel * Width;
    double* pDblCoeff = new double[NumCoeffs];
    AverageLines( camera,
                  Width,
                  Height,
                  NumCoeffs,
                  pDblCoeff );
    cout << endl << "After applying shading correction:" << endl;
    CalculateCoeffs( Width, Height, NumCoeffs, pDblCoeff );
    delete[] pDblCoeff;
}

////////////////////////////////////////////////////////////////////////////////
// Check whether camera supports RGB pixel formats.
bool SupportsRGB( CBaslerUniversalInstantCamera& camera )
{
    GenApi::NodeList_t Entries;
    camera.PixelFormat.GetEntries( Entries );
    bool Result = false;

    for (size_t i = 0; i < Entries.size(); i++)
    {
        GenApi::INode* pNode = Entries[i];
        if (IsAvailable( pNode->GetAccessMode() ))
        {
            GenApi::IEnumEntry* pEnum = dynamic_cast<GenApi::IEnumEntry*>(pNode);
            const GenICam::gcstring sym( pEnum->GetSymbolic() );
            if (sym.find( GenICam::gcstring( "RGB" ) ) != string::npos)
            {
                Result = true;
                break;
            }
        }
    }
    return Result;
}

ParametrizeCamera_UserSets#

This sample is part of the pylon sample solution that can be found under <⁠SDK ROOT>\Development\Samples\C++.

// ParametrizeCamera_UserSets.cpp
/*
    Note: Before getting started, Basler recommends reading the "Programmer's Guide" topic
    in the pylon C++ API documentation delivered with pylon.
    If you are upgrading to a higher major version of pylon, Basler also
    strongly recommends reading the "Migrating from Previous Versions" topic in the pylon C++ API documentation.

    Demonstrates how to use user configuration sets (user sets) and how to configure the camera
    to start up with the user defined settings of user set 1.

    You can also configure your camera using the pylon Viewer and
    store your custom settings in a user set of your choice.

    Note: Different camera families implement different versions of the Standard Feature Naming Convention (SFNC).
    That's why the name and the type of the parameters used can be different.

    ATTENTION:
    Executing this sample will overwrite all current settings in user set 1.
*/

// Include files to use the pylon API.
#include <pylon/PylonIncludes.h>

// Include file to use pylon universal instant camera parameters.
#include <pylon/BaslerUniversalInstantCamera.h>

// Namespace for using pylon objects.
using namespace Pylon;

// Namespace for using pylon universal instant camera parameters.
using namespace Basler_UniversalCameraParams;

// Namespace for using cout.
using namespace std;


int main( int /*argc*/, char* /*argv*/[] )
{
    // The exit code of the sample application.
    int exitCode = 0;

    // Before using any pylon methods, the pylon runtime must be initialized.
    PylonInitialize();

    try
    {
        // Create an instant camera object with the first found camera device.
        CBaslerUniversalInstantCamera camera( CTlFactory::GetInstance().CreateFirstDevice() );

        // Print the model name of the camera.
        cout << "Using device " << camera.GetDeviceInfo().GetModelName() << endl;

        // Open the camera.
        camera.Open();

        // Check if the device supports user sets.
        if (!camera.UserSetSelector.IsWritable())
        {
            throw RUNTIME_EXCEPTION( "The device doesn't support user sets." );
        }

        // Used for USB cameras
        UserSetDefaultEnums oldDefaultUserSet = UserSetDefault_Default;
        // Used for GigE cameras
        UserSetDefaultSelectorEnums oldDefaultUserSetSelector = UserSetDefaultSelector_Default;

        // Remember the current default user set selector so we can restore it later when cleaning up.
        if (camera.UserSetDefault.IsReadable()) // Cameras based on SFNC 2.0 or later, e.g., USB cameras
        {
            oldDefaultUserSet = camera.UserSetDefault.GetValue();
        }
        else
        {
            oldDefaultUserSetSelector = camera.UserSetDefaultSelector.GetValue();
        }

        // Load default settings.
        cout << "Loading default settings" << endl;
        camera.UserSetSelector.SetValue( UserSetSelector_Default );
        camera.UserSetLoad.Execute();

        // Set gain and exposure time values.
        // The camera won't let you set specific values when related auto functions are active.
        // So we need to disable the related auto functions before setting the values.
        cout << "Turning off Gain Auto and Exposure Auto." << endl;

        if (camera.Gain.IsWritable()) // Cameras based on SFNC 2.0 or later, e.g., USB cameras
        {
            camera.GainAuto.TrySetValue( GainAuto_Off );
            camera.Gain.SetValue( camera.Gain.GetMin() );
            camera.ExposureAuto.TrySetValue( ExposureAuto_Off );
            camera.ExposureTime.SetValue( camera.ExposureTime.GetMin() );
        }
        else
        {
            camera.GainAuto.TrySetValue( GainAuto_Off );
            camera.GainRaw.SetValue( camera.GainRaw.GetMin() );
            camera.ExposureAuto.TrySetValue( ExposureAuto_Off );
            camera.ExposureTimeRaw.SetValue( camera.ExposureTimeRaw.GetMin() );
        }

        // Save to user set 1.
        //
        // ATTENTION:
        // This will overwrite all settings previously saved in user set 1.
        cout << "Saving currently active settings to user set 1." << endl;
        camera.UserSetSelector.SetValue( UserSetSelector_UserSet1 );
        camera.UserSetSave.Execute();

        // Show default settings.
        cout << endl << "Loading default settings." << endl;
        camera.UserSetSelector.SetValue( UserSetSelector_Default );
        camera.UserSetLoad.Execute();
        cout << "Default settings" << endl;
        cout << "================" << endl;
        if (camera.Gain.IsReadable()) // Cameras based on SFNC 2.0 or later, e.g., USB cameras
        {
            cout << "Gain          : " << camera.Gain.GetValue() << endl;
            cout << "Exposure time : " << camera.ExposureTime.GetValue() << endl;
        }
        else
        {
            cout << "Gain          : " << camera.GainRaw.GetValue() << endl;
            cout << "Exposure time : " << camera.ExposureTimeRaw.GetValue() << endl;
        }

        // Show user set 1 settings.
        cout << endl << "Loading user set 1 settings." << endl;
        camera.UserSetSelector.SetValue( UserSetSelector_UserSet1 );
        camera.UserSetLoad.Execute();
        cout << "User set 1 settings" << endl;
        cout << "===================" << endl;

        if (camera.Gain.IsReadable()) // Cameras based on SFNC 2.0 or later, e.g., USB cameras
        {
            cout << "Gain          : " << camera.Gain.GetValue() << endl;
            cout << "Exposure time : " << camera.ExposureTime.GetValue() << endl;
        }
        else
        {
            cout << "Gain          : " << camera.GainRaw.GetValue() << endl;
            cout << "Exposure time : " << camera.ExposureTimeRaw.GetValue() << endl;
        }

        // Set user set 1 as default user set:
        // When the camera wakes up it will be configured
        // with the settings from user set 1.
        if (camera.UserSetDefault.IsWritable()) // Cameras based on SFNC 2.0 or later, e.g., USB cameras
        {
            camera.UserSetDefault.SetValue( UserSetDefault_UserSet1 );

            // Restore the default user set selector.
            camera.UserSetDefault.SetValue( oldDefaultUserSet );
        }
        else
        {
            // Set user set 1 as default user set:
            // When the camera wakes up it will be configured
            // with the settings from user set 1.
            camera.UserSetDefaultSelector.SetValue( UserSetDefaultSelector_UserSet1 );

            // Restore the default user set selector.
            camera.UserSetDefaultSelector.SetValue( oldDefaultUserSetSelector );
        }
        // Close the camera.
        camera.Close();
    }
    catch (const GenericException& e)
    {
        // Error handling.
        cerr << "An exception occurred." << endl
            << e.GetDescription() << endl;
        exitCode = 1;
    }

    // Comment the following two lines to disable waiting on exit.
    cerr << endl << "Press enter to exit." << endl;
    while (cin.get() != '\n');

    // Releases all pylon resources.
    PylonTerminate();

    return exitCode;
}

Utility_FFC#

This sample is part of the pylon sample solution that can be found under <⁠SDK ROOT>\Development\Samples\C++.

// Utility_FFC.cpp
/*
    Note: Before getting started, Basler recommends reading the "Programmer's Guide" topic
    in the pylon C++ API documentation delivered with pylon.
    If you are upgrading to a higher major version of pylon, Basler also
    strongly recommends reading the "Migrating from Previous Versions" topic in the pylon C++ API documentation.

    This sample illustrates how to use the Flat-Field Correction (FFC) with Basler boost V cameras.
    The goal of FFC is to create a more accurate and evenly-illuminated representation of the original image.
    This sample requires a lot of processing power and execution may take a long time in debug configurations.
*/

#pragma warning( disable : 4996 ) // Function call with parameters that may be unsafe. This call relies on the caller to check that the values passed are correct. To disable this warning, use -D_SCL_SECURE_NO_WARNINGS. See the Visual Studio documentation for how to use C++ Checked Iterators.

#define NOMINMAX

#include <algorithm>
#include <cmath>
#include <numeric>

// Include files to use the pylon API.
#include <pylon/PylonIncludes.h>

// Include file to use pylon universal instant camera parameters.
#include <pylon/BaslerUniversalInstantCamera.h>

// Namespace for using pylon objects.
using namespace Pylon;

// Namespace for using GenApi objects.
using namespace GenApi;

// Namespace for using cout.
using namespace std;

// The maximum number of images to be grabbed.
static const uint8_t numberOfMaxImagesToGrab = 5;

// Creates an image buffer based on the arithmetic mean of all images grabbed and creates a row of correction values for DSNU and PRNU coefficients calculation.
void processImages( CInstantCamera& camera, float& meanPixelVal, vector<float>& meanOfColumns, const size_t width, const size_t height )
{
    // Start the grabbing of numberOfMaxImagesToGrab images.
    camera.StartGrabbing( numberOfMaxImagesToGrab );

    // This smart pointer will receive the grab result data.
    CGrabResultPtr ptrGrabResult;

    cout << "Please wait. Images are being grabbed." << endl;
    uint8_t succeededGrabs = 0;

    CIntegerParameter bufferSize( camera.GetNodeMap(), "PayloadSize" );
    vector<float> bufferArray( static_cast<size_t>(bufferSize.GetValue()), 0.0f);

    // Camera.StopGrabbing() is called automatically by the RetrieveResult() method
    // when numberOfMaxImagesToGrab images have been retrieved.
    while (camera.IsGrabbing())
    {
        // Wait for an image and then retrieve it. A timeout of 5000 ms is used.
        camera.RetrieveResult( 5000, ptrGrabResult, TimeoutHandling_ThrowException );

        // Image grabbed successfully and expected image buffer size is equal to actual?
        if (ptrGrabResult->GrabSucceeded() && (bufferArray.size() == ptrGrabResult->GetPayloadSize()))
        {
            // Adds current buffer to all previously acquired (and summed) buffers.
            std::transform( bufferArray.begin(),
                            bufferArray.end(),
                            static_cast<const uint8_t*>(ptrGrabResult->GetBuffer()),
                            bufferArray.begin(),
                            plus<float>() );

            succeededGrabs++;
        }
        else
        {
            cout << "Error: " << std::hex << ptrGrabResult->GetErrorCode() << std::dec << " " << ptrGrabResult->GetErrorDescription() << endl;
        }
    }
    if (succeededGrabs > 0)
    {
        const float heightFloat = static_cast<float>(height);
        const float widthFloat = static_cast<float>(width);

        // Calculates a row (width) of sums of every column (height).
        for (unsigned int y = 0; y < height; ++y)
        {
            for (unsigned int x = 0; x < width; ++x)
            {
                size_t idx = (width * y) + x;
                meanOfColumns[x] += bufferArray[idx];
            }
        }

        // Division by succeededGrabs is neccessary because we summed the pixel values.
        // Division by height is neccessary for mean pixel value per column.
        const float divisor = static_cast<float>(succeededGrabs) + heightFloat;
        std::for_each( meanOfColumns.begin(), 
                       meanOfColumns.end(), 
                       [&divisor] ( float& n )
                       {
                           n /= divisor;
                       } );

        // Calculating mean pixel value of correction value row.
        meanPixelVal = std::accumulate( meanOfColumns.begin(), meanOfColumns.end(), 0.0f);
        meanPixelVal /= widthFloat;
    }
}

// Searching for FFC-compatible boost cameras in CXP transport layer (TL).
bool findBoostCam( CInstantCamera& camera )
{
    // Build a filter list containing each model name supported.
    const char* supportedModelNames[] = { "boA9344-70cc", 
                                          "boA9344-70cm", 
                                          "boA5120-150cc", 
                                          "boA5120-150cm", 
                                          "boA5120-230cc", 
                                          "boA5120-230cm" };

    DeviceInfoList filter;
    CDeviceInfo deviceInfo;

    // Find only CXP devices.
    deviceInfo.SetDeviceClass( Pylon::BaslerGenTlCxpDeviceClass );

    // Add each model name to the filter.
    for (const char* supportedModelName : supportedModelNames)
    {
        deviceInfo.SetModelName( supportedModelName );
        filter.push_back( deviceInfo );
    }

    // List of devices matching the filter.
    Pylon::DeviceInfoList_t deviceInfoList;

    CTlFactory::GetInstance().EnumerateDevices( deviceInfoList, filter );
    cout << "Devices found: " << deviceInfoList.size() << endl;

    if (!deviceInfoList.empty())
    {
        // First FFC-compatible CXP device will be chosen.
        camera.Attach( CTlFactory::GetInstance().CreateDevice( deviceInfoList[0] ) );
        if (camera.IsPylonDeviceAttached())
        {
            cout << "Starting FFC with device: " << deviceInfoList[0].GetModelName() << " (" << deviceInfoList[0].GetSerialNumber() << ")" << endl;
            return true;
        }
    }
    else
    {
        cout << "Couldn't find any FFC-compatible CXP device." << endl;
    }

    return false;
}

int main( int /*argc*/, char* /*argv*/[] )
{
    // The exit code of the sample application.
    int exitCode = 0;

    // Before using any pylon methods, the pylon runtime must be initialized.
    PylonInitialize();

    try
    {
        // Create an instant camera object with the first camera device found.
        CInstantCamera camera;

        // This sample only works with these cameras.
        if (!findBoostCam( camera ))
        {
            cerr << "Couldn't find supported device." << endl;
            PylonTerminate();

            exitCode = 1;
            return exitCode;
        }

        // Open the camera.
        camera.Open();

        INodeMap& nodemap = camera.GetNodeMap();

        //  Prepare camera settings to get the optimum images.
        CEnumParameter pixelFormat( nodemap, "PixelFormat" );
        CIntegerParameter width( nodemap, "Width" );
        CIntegerParameter height( nodemap, "Height" );
        CFloatParameter exposureTime( nodemap, "ExposureTime" );

        pixelFormat.TrySetValue( "Mono8" ); // FFC formula works only with Mono8 images.
        width.SetToMaximum();
        height.SetToMaximum();

        cout << "Pixel format: " << pixelFormat.GetValue() << endl;
        cout << "Image width: " << width.GetValue() << endl;
        cout << "Image height: " << height.GetValue() << endl;

        unsigned int inputExposureTime = 0;
        cout << "Enter a valid exposure time between " << exposureTime.GetMin() 
             << " and " << exposureTime.GetMax() << " [us] for a dark image." << endl;

        cin >> inputExposureTime;

        cout << "Exposure time for dark image is: " << inputExposureTime << " us" << endl;
        exposureTime.SetValue( inputExposureTime, FloatValueCorrection_ClipToRange );

        // Mean values over every column (height).
        vector<float> dx( static_cast<size_t>(width.GetValue()), 0.0f );
        // Mean pixel value over all dark images.
        float dMeanFloat = 0.0f;
        // With exposure time for dark images, processes the mean image buffer to calculate dx and dmean.
        processImages( camera, dMeanFloat, dx, static_cast<size_t>(width.GetValue()), static_cast<size_t>(height.GetValue()) );

        cout << "Enter a valid exposure time between " << exposureTime.GetMin() 
             << " and " << exposureTime.GetMax() << " [us] for a bright image." << endl;

        cin >> inputExposureTime;

        cout << "Exposure time for bright image is: " << inputExposureTime << " us" << endl;
        exposureTime.SetValue( inputExposureTime, FloatValueCorrection_ClipToRange );

        // Mean values over every column (height).
        vector<float> gx( static_cast<size_t>(width.GetValue()), 0.0f );
        // Mean pixel value over all bright images.
        float gMeanFloat = 0.0f;
        // With exposure time for bright images, processes the mean image buffer to calculate gx and gmean.
        processImages( camera, gMeanFloat, gx, static_cast<size_t>(width.GetValue()), static_cast<size_t>(height.GetValue()) );

        // Calculate Dark Signal Non-Uniformity (DSNU) coefficients.
        vector<uint8_t> dsnuCoefficients( dx.size(), 0 );
        for (unsigned i = 0; i < dsnuCoefficients.size(); ++i)
        {
            uint8_t ui8ValOverHeight = static_cast<uint8_t>(round( dx.at( i ) ));
            ui8ValOverHeight = std::max( static_cast<uint8_t>(0), ui8ValOverHeight );
            ui8ValOverHeight = std::min( ui8ValOverHeight, static_cast<uint8_t>(127) );
            dsnuCoefficients[i] = ui8ValOverHeight;
        }

        // Calculate Photo Response Non-Uniformity (PRNU) coefficients.
        vector<uint16_t> prnuCoefficients( gx.size(), 0 );
        for (unsigned i = 0; i < prnuCoefficients.size(); ++i)
        {
            float floatTmpVal = 128.0f * gMeanFloat / (gx.at( i ) - dx.at( i ) + 1.0f);
            uint16_t ui16TmpVal = static_cast<uint16_t>(round( floatTmpVal ));
            ui16TmpVal = std::max( static_cast<uint16_t>(0), ui16TmpVal );
            ui16TmpVal = std::min( ui16TmpVal, static_cast<uint16_t>(511) );
            prnuCoefficients[i] = ui16TmpVal;
        }

        if (dMeanFloat > 40.0f)
        {
            cout << ("It looks like the mean dark image isn't dark enough because the dmean value is a little bit high.") << endl;
        }

        if (gMeanFloat < 150.0f)
        {
            cout << ("It looks like the mean bright image is too dark because the gmean value is a little bit low.") << endl;

        }

        if (gMeanFloat > 210.0f)
        {
            cout << ("It looks like the mean bright image is too bright because the gmean value is a little bit high.") << endl;
        }

        CIntegerParameter FFCCoeffX( nodemap, "BslFlatFieldCorrectionCoeffX" );
        CIntegerParameter FFCCoeffDSNU( nodemap, "BslFlatFieldCorrectionCoeffDSNU" );
        CIntegerParameter FFCCoeffPRNU( nodemap, "BslFlatFieldCorrectionCoeffPRNU" );

        // Writing DSNU and PRNU coefficients to camera in column i.
        for (unsigned i = 0; i < gx.size(); ++i)
        {
            FFCCoeffX.SetValue( i, IntegerValueCorrection_Nearest );
            FFCCoeffDSNU.SetValue( dsnuCoefficients[i], IntegerValueCorrection_Nearest );
            FFCCoeffPRNU.SetValue( prnuCoefficients[i], IntegerValueCorrection_Nearest );
        }

        int64_t dMean = static_cast<int64_t>(round( dMeanFloat ));
        dMean = std::max( static_cast<int64_t>(0), dMean );
        dMean = std::min( dMean, static_cast<int64_t>(127) );

        // Saves mean dark image pixel value.
        CIntegerParameter FFCDMean( nodemap, "BslFlatFieldCorrectionDMean" ); 
        FFCDMean.SetValue( dMean );

        // Saves current flat-field correction values to flash memory.
        CCommandParameter FFCSaveToFlash( nodemap, "BslFlatFieldCorrectionSaveToFlash" );
        FFCSaveToFlash.Execute();
    }
    catch (const GenericException& e)
    {
        // Error handling.
        cerr << "An exception occurred." << endl
            << e.GetDescription() << endl;
        exitCode = 1;
    }

    // Comment the following two lines to disable waiting on exit.
    cout << endl << "Press enter to exit." << endl;
    while (cin.get() != '\n');

    // Releases all pylon resources.
    PylonTerminate();

    return exitCode;
}

Utility_GrabAvi#

Note

This sample is only available on Windows.

This sample is part of the pylon sample solution that can be found under <⁠SDK ROOT>\Development\Samples\C++.

// Utility_GrabAvi.cpp
/*
    Note: Before getting started, Basler recommends reading the "Programmer's Guide" topic
    in the pylon C++ API documentation delivered with pylon.
    If you are upgrading to a higher major version of pylon, Basler also
    strongly recommends reading the "Migrating from Previous Versions" topic in the pylon C++ API documentation.

    This sample illustrates how to create a video file in Audio Video Interleave (AVI) format.
*/

// Include files to use the pylon API.
#include <pylon/PylonIncludes.h>
#include <pylon/AviCompressionOptions.h>
#include <pylon/PylonGUI.h>

// Namespace for using pylon objects.
using namespace Pylon;

// Namespace for using GenApi objects.
using namespace GenApi;

// Namespace for using cout.
using namespace std;

// The maximum number of images to be grabbed.
static const uint32_t c_countOfImagesToGrab = 100;

// When this amount of image data has been written the grabbing is stopped.
static const size_t c_maxImageDataBytesThreshold = 50 * 1024 * 1024;


int main( int /*argc*/, char* /*argv*/[] )
{
    // The exit code of the sample application.
    int exitCode = 0;

    // Before using any pylon methods, the pylon runtime must be initialized.
    PylonInitialize();

    try
    {
        // Create an AVI writer object.
        CAviWriter aviWriter;

        // The AVI writer supports the output formats PixelType_Mono8,
        // PixelType_BGR8packed, and PixelType_BGRA8packed.
        EPixelType aviPixelType = PixelType_BGR8packed;
        // The frame rate used for playing the video (play back frame rate).
        const int cFramesPerSecond = 20;

        // Create an instant camera object with the camera device found first.
        CInstantCamera camera( CTlFactory::GetInstance().CreateFirstDevice() );

        // Print the model name of the camera.
        cout << "Using device " << camera.GetDeviceInfo().GetModelName() << endl;

        // Open the camera.
        camera.Open();

        // Get the required camera settings.
        CIntegerParameter width( camera.GetNodeMap(), "Width" );
        CIntegerParameter height( camera.GetNodeMap(), "Height" );
        CEnumParameter pixelFormat( camera.GetNodeMap(), "PixelFormat" );

        // Optional: Depending on your camera or computer, you may not be able to save
        // a video without losing frames. Therefore, we limit the resolution:
        width.TrySetValue( 640, IntegerValueCorrection_Nearest );
        height.TrySetValue( 480, IntegerValueCorrection_Nearest );

        if (pixelFormat.IsReadable())
        {
            // If the camera produces Mono8 images use Mono8 for the AVI file.
            if (pixelFormat.GetValue() == "Mono8")
            {
                aviPixelType = PixelType_Mono8;
            }
        }

        // Optionally set up compression options.
        SAviCompressionOptions* pCompressionOptions = NULL;
        // Uncomment the two code lines below to enable AVI compression.
        // A dialog will be shown for selecting the codec.
        //SAviCompressionOptions compressionOptions( "MSVC", true);
        //pCompressionOptions = &compressionOptions;

        // Open the AVI writer.
        aviWriter.Open(
            "_TestAvi.avi",
            cFramesPerSecond,
            aviPixelType,
            (uint32_t) width.GetValue(),
            (uint32_t) height.GetValue(),
            ImageOrientation_BottomUp, // Some compression codecs will not work with top down oriented images.
            pCompressionOptions );

        // Start the grabbing of c_countOfImagesToGrab images.
        // The camera device is parameterized with a default configuration which
        // sets up free running continuous acquisition.
        camera.StartGrabbing( c_countOfImagesToGrab, GrabStrategy_LatestImages );


        cout << "Please wait. Images are grabbed." << endl;

        // This smart pointer will receive the grab result data.
        CGrabResultPtr ptrGrabResult;

        // Camera.StopGrabbing() is called automatically by the RetrieveResult() method
        // when c_countOfImagesToGrab images have been retrieved.
        while (camera.IsGrabbing())
        {
            // Wait for an image and then retrieve it. A timeout of 5000 ms is used.
            camera.RetrieveResult( 5000, ptrGrabResult, TimeoutHandling_ThrowException );

            // Image grabbed successfully?
            if (ptrGrabResult->GrabSucceeded())
            {
                // Display the image. Remove the following line of code to maximize frame rate.
                Pylon::DisplayImage( 1, ptrGrabResult );

                // If required, the grabbed image is converted to the correct format and is then added to the AVI file.
                // The orientation of the image taken by the camera is top down.
                // The bottom up orientation is specified to apply when opening the Avi Writer. That is why the image is
                // always converted before it is added to the AVI file.
                // To maximize frame rate try to avoid image conversion (see the CanAddWithoutConversion() method).
                aviWriter.Add( ptrGrabResult );

                // If images are skipped, writing AVI frames takes too much processing time.
                cout << "Images Skipped = " << ptrGrabResult->GetNumberOfSkippedImages() << boolalpha
                    << "; Image has been converted = " << !aviWriter.CanAddWithoutConversion( ptrGrabResult )
                    << endl;

                // Check whether the image data size limit has been reached to avoid the AVI File to get too large.
                // The size returned by GetImageDataBytesWritten() does not include the sizes of the AVI file header and AVI file index.
                // See the documentation for GetImageDataBytesWritten() for more information.
                if (c_maxImageDataBytesThreshold < aviWriter.GetImageDataBytesWritten())
                {
                    cout << "The image data size limit has been reached." << endl;
                    break;
                }
            }
            else
            {
                cout << "Error: " << std::hex << ptrGrabResult->GetErrorCode() << std::dec << " " << ptrGrabResult->GetErrorDescription() << endl;
            }
        }
    }
    catch (const GenericException& e)
    {
        // Error handling.
        cerr << "An exception occurred." << endl
            << e.GetDescription() << endl;
        exitCode = 1;
    }

    // Comment the following two lines to disable waiting on exit.
    cerr << endl << "Press enter to exit." << endl;
    while (cin.get() != '\n');

    // Releases all pylon resources.
    PylonTerminate();

    return exitCode;
}

Utility_GrabVideo#

This sample is part of the pylon sample solution that can be found under <⁠SDK ROOT>\Development\Samples\C++.

// Utility_GrabVideo.cpp
/*
    Note: Before getting started, Basler recommends reading the "Programmer's Guide" topic
    in the pylon C++ API documentation delivered with pylon.
    If you are upgrading to a higher major version of pylon, Basler also
    strongly recommends reading the "Migrating from Previous Versions" topic in the pylon C++ API documentation.

    This sample illustrates how to create a video file in MP4 format.
*/

// Include files to use the pylon API.
#include <pylon/PylonIncludes.h>
#ifdef PYLON_WIN_BUILD
#    include <pylon/PylonGUI.h>
#endif

// Namespace for using pylon objects.
using namespace Pylon;

// Namespace for using GenApi objects.
using namespace GenApi;

// Namespace for using cout.
using namespace std;

// The maximum number of images to be grabbed.
static const uint32_t c_countOfImagesToGrab = 100;
// When this amount of image data has been written, the grabbing is stopped.
static const int64_t c_maxImageDataBytesThreshold = 50 * 1024 * 1024;


int main( int /*argc*/, char* /*argv*/[] )
{
    // The exit code of the sample application.
    int exitCode = 0;

    // Before using any pylon methods, the pylon runtime must be initialized.
    PylonInitialize();

    try
    {
        // Check if CVideoWriter is supported and all DLLs are available.
        if (!CVideoWriter::IsSupported())
        {
            cout << "VideoWriter is not supported at the moment. Please install the pylon Supplementary Package for MPEG-4 which is available on the Basler website." << endl;
            // Releases all pylon resources.
            PylonTerminate();
            // Return with error code 1.
            return 1;
        }

        // Create a video writer object.
        CVideoWriter videoWriter;

        // The frame rate used for playing the video (playback frame rate).
        const int cFramesPerSecond = 20;
        // The quality used for compressing the video.
        const uint32_t cQuality = 90;

        // Create an instant camera object with the first camera device found.
        CInstantCamera camera( CTlFactory::GetInstance().CreateFirstDevice() );

        // Print the model name of the camera.
        cout << "Using device " << camera.GetDeviceInfo().GetModelName() << endl;

        // Open the camera.
        camera.Open();

        // Get the required camera settings.
        CIntegerParameter width( camera.GetNodeMap(), "Width" );
        CIntegerParameter height( camera.GetNodeMap(), "Height" );
        CEnumParameter pixelFormat( camera.GetNodeMap(), "PixelFormat" );

        // Optional: Depending on your camera or computer, you may not be able to save
        // a video without losing frames. Therefore, we limit the resolution:
        width.TrySetValue( 640, IntegerValueCorrection_Nearest );
        height.TrySetValue( 480, IntegerValueCorrection_Nearest );

        // Map the pixelType
        CPixelTypeMapper pixelTypeMapper( &pixelFormat );
        EPixelType pixelType = pixelTypeMapper.GetPylonPixelTypeFromNodeValue( pixelFormat.GetIntValue() );

        // Set parameters before opening the video writer.
        videoWriter.SetParameter(
        (uint32_t) width.GetValue(),
            (uint32_t) height.GetValue(),
            pixelType,
            cFramesPerSecond,
            cQuality );

        // Open the video writer.
        videoWriter.Open( "_TestVideo.mp4" );

        // Start the grabbing of c_countOfImagesToGrab images.
        // The camera device is parameterized with a default configuration which
        // sets up free running continuous acquisition.
        camera.StartGrabbing( c_countOfImagesToGrab, GrabStrategy_LatestImages );


        cout << "Please wait. Images are being grabbed." << endl;

        // This smart pointer will receive the grab result data.
        CGrabResultPtr ptrGrabResult;

        // Camera.StopGrabbing() is called automatically by the RetrieveResult() method
        // when c_countOfImagesToGrab images have been retrieved.
        while (camera.IsGrabbing())
        {
            // Wait for an image and then retrieve it. A timeout of 5000 ms is used.
            camera.RetrieveResult( 5000, ptrGrabResult, TimeoutHandling_ThrowException );

            // Image grabbed successfully?
            if (ptrGrabResult->GrabSucceeded())
            {
                // Access the image data.
                cout << "SizeX: " << ptrGrabResult->GetWidth() << endl;
                cout << "SizeY: " << ptrGrabResult->GetHeight() << endl;
                const uint8_t* pImageBuffer = (uint8_t*) ptrGrabResult->GetBuffer();
                cout << "Gray value of first pixel: " << (uint32_t) pImageBuffer[0] << endl << endl;

    #ifdef PYLON_WIN_BUILD
                // Display the grabbed image.
                Pylon::DisplayImage( 1, ptrGrabResult );
    #endif

                // If required, the grabbed image is converted to the correct format and is then added to the video file.
                // If the orientation of the image does not mach the orientation required for video compression, the
                // image will be flipped automatically to ImageOrientation_TopDown, unless the input pixel type is Yuv420p.
                videoWriter.Add( ptrGrabResult );

                // If images are skipped, writing video frames takes too much processing time.
                cout << "Images Skipped = " << ptrGrabResult->GetNumberOfSkippedImages() << boolalpha
                    << "; Image has been converted = " << !videoWriter.CanAddWithoutConversion( ptrGrabResult )
                    << endl;

                // Check whether the image data size limit has been reached to avoid the video file becoming too large.
                if (c_maxImageDataBytesThreshold < videoWriter.BytesWritten.GetValue())
                {
                    cout << "The image data size limit has been reached." << endl;
                    break;
                }
            }
            else
            {
                cout << "Error: " << std::hex << ptrGrabResult->GetErrorCode() << std::dec << " " << ptrGrabResult->GetErrorDescription() << endl;
            }
        }
    }
    catch (const GenericException& e)
    {
        // Error handling.
        cerr << "An exception occurred." << endl
            << e.GetDescription() << endl;
        exitCode = 1;
    }

    // Comment the following two lines to disable waiting on exit.
    cerr << endl << "Press enter to exit." << endl;
    while (cin.get() != '\n');

    // Releases all pylon resources.
    PylonTerminate();

    return exitCode;
}

Utility_Image#

This sample is part of the pylon sample solution that can be found under <⁠SDK ROOT>\Development\Samples\C++.

// Utility_Image.cpp
/*
    Note: Before getting started, Basler recommends reading the "Programmer's Guide" topic
    in the pylon C++ API documentation delivered with pylon.
    If you are upgrading to a higher major version of pylon, Basler also
    strongly recommends reading the "Migrating from Previous Versions" topic in the pylon C++ API documentation.

    This sample illustrates how to use the pylon image classes CPylonImage and CPylonBitmapImage.
    CPylonImage supports handling image buffers of the various existing pixel types.
    CPylonBitmapImage can be used to easily create Windows bitmaps for displaying images.

    Additionally, there are two image class related interfaces in pylon IImage and IReusableImage.
    IImage can be used to access image properties and image buffer.
    The IReusableImage interface extends the IImage interface to be able to reuse the
    resources of the image to represent a different image.
    Both CPylonImage and CPylonBitmapImage implement the IReusableImage interface.

    The pylon grab result class CGrabResultPtr provides a cast operator to the IImage
    interface. This eases the use of the grab result together with the image classes.
*/

#include <iomanip>
// Include files to use the pylon API.
#include <pylon/PylonIncludes.h>
#ifdef PYLON_WIN_BUILD
#    include <pylon/PylonGUI.h>
#endif

#include "../include/SampleImageCreator.h"

// Namespace for using pylon objects.
using namespace Pylon;

// Namespace for using GenApi objects.
using namespace GenApi;

// Namespace for using cout.
using namespace std;


// This is a helper function for printing image properties.
void PrintImageProperties( IImage& image )
{
    cout
        << "Buffer: " << image.GetBuffer()
        << " Image Size: " << image.GetImageSize()
        << " Width: " << image.GetWidth()
        << " Height: " << image.GetHeight()
        << " Unique: " << image.IsUnique()
        << endl;
}


// This is a helper function for showing an image on the screen if Windows is used,
// and for printing the first bytes of the image.
void ShowImage( IImage& image, const char* message = NULL )
{
#ifdef PYLON_WIN_BUILD
    // Display the image.
    Pylon::DisplayImage( 1, image );
#endif

    if (message)
    {
        cout << endl << message << " ";
    }

    // store state of cout
    std::ios state( NULL );
    state.copyfmt( cout );

    const uint8_t* pBytes = reinterpret_cast<const uint8_t*>(image.GetBuffer());
    cout << endl << "First six bytes of the image: " << endl;
    for (unsigned int i = 0; i < 6; ++i)
    {
        cout << "0x" << hex << setfill( '0' ) << setw( 2 ) << unsigned( pBytes[i] ) << " ";
    }
    cout << endl;

    // restore state of cout
    cout.copyfmt( state );

    cerr << "Press enter to continue." << endl;
    while (cin.get() != '\n');
}


int main( int /*argc*/, char* /*argv*/[] )
{
    // The exit code of the sample application.
    int exitCode = 0;

    // Before using any pylon methods, the pylon runtime must be initialized.
    PylonInitialize();

    try
    {
        // Define some constants.
        const uint32_t cWidth = 640;
        const uint32_t cHeight = 480;
        const uint32_t cPadding = 10;
        const uint8_t cSampleGrayValue = 160;

        // The CPylonImage basics.
        {
            // Create a pylon image with the given properties.
            CPylonImage imageMono8( CPylonImage::Create( PixelType_Mono8, cWidth, cHeight ) );
            cout << "The properties of the newly created image." << endl;
            PrintImageProperties( imageMono8 );

            // The image class allocates a buffer large enough to hold the image.
            // We can use it for example to fill it with a test pattern.
            uint32_t width = imageMono8.GetWidth();
            uint32_t height = imageMono8.GetHeight();
            uint8_t* buffer = static_cast<uint8_t*>(imageMono8.GetBuffer());
            uint8_t* p = buffer;
            for (uint32_t y = 0; y < height; ++y)
            {
                for (uint32_t x = 0; x < width; ++x, ++p)
                {
                    *p = (uint8_t) ((x + y) % 256);
                }
            }

            // Show the image on the screen in a separate window.
            ShowImage( imageMono8, "Created image." );

            // If the pylon image object is copied or assigned then no image data copy is made.
            // All objects reference the same buffer now. The image properties have been copied though.
            // The IsUnique() method can be used to check whether a buffer is
            // referenced by multiple pylon image objects.
            CPylonImage sameImageMono8A( imageMono8 );
            CPylonImage sameImageMono8B = imageMono8;
            cout << endl << "After assignment multiple images reference the same data." << endl;
            PrintImageProperties( imageMono8 );
            PrintImageProperties( sameImageMono8A );
            PrintImageProperties( sameImageMono8B );

            // The CopyImage method can be used to create a full copy of an image.
            CPylonImage copiedImage;
            copiedImage.CopyImage( imageMono8 );
            cout << endl << "The properties of a full copy of the test image." << endl;
            PrintImageProperties( copiedImage );

            // The Release() method can be used to release any data.
            // The object sameImageMono8B is now empty.
            // No buffer is allocated.
            sameImageMono8B.Release();
            cout << endl << "Assigned to image object after releasing the image data." << endl;
            PrintImageProperties( sameImageMono8B );

            // A newly created image object is empty.
            CPylonImage reusedImage;
            cout << endl << "A newly created image object." << endl;
            PrintImageProperties( reusedImage );

            // The Reset() method can be used to reset the image properties
            // and allocate a new buffer if required.
            reusedImage.Reset( PixelType_Mono8, cWidth, cHeight );
            cout << "After resetting the image properties. A new Buffer is allocated." << endl;
            PrintImageProperties( reusedImage );

            // Reset() never decreases the allocated buffer size if the
            // new image fits into the current buffer.
            // The new image is smaller and therefore the buffer is reused.
            reusedImage.Reset( PixelType_Mono8, cWidth / 2, cHeight );
            cout << "After resetting the image properties to a smaller image. The buffer is reused." << endl;
            PrintImageProperties( reusedImage );

            // A new buffer is allocated because the old buffer is
            // too small for the new image.
            reusedImage.Reset( PixelType_Mono8, cWidth * 2, cHeight );
            cout << "After resetting the image properties to a larger image." << endl << "A new Buffer is allocated." << endl;
            PrintImageProperties( reusedImage );

            // The imageMono8 and sameImageMono8A objects still reference the
            // same image. Because of this the buffer referenced by sameImageMono8A
            // can't be reused. A new buffer is allocated.
            sameImageMono8A.Reset( PixelType_Mono8, cWidth, cHeight );
            cout << endl << "After resetting the image properties while the image data is referenced by another image. A new Buffer is allocated." << endl;
            PrintImageProperties( sameImageMono8A );

            // For advanced use cases additional line padding and the image orientation can be defined, too.
            sameImageMono8A.Reset( PixelType_Mono8, cWidth, cHeight, cPadding, ImageOrientation_TopDown );
            cout << endl << "After resetting the image properties with additional padding." << endl;
            PrintImageProperties( sameImageMono8A );

            // The image objects are destroyed here and the buffers are deleted.
            // An allocated image buffer is deleted if it is not referenced
            // anymore by a pylon image object.
        }


        // The CPylonImage and user buffers.
        {
            // Create pylon images.
            CPylonImage imageA;
            CPylonImage imageB;

            // Create a buffer for demonstration purposes. This could be a buffer of a 3rd party
            // image library.
            // This example uses a C++ library vector class for buffer allocation for automatic
            // deletion of the buffer.
            vector<uint8_t> buffer( (cWidth + cPadding) * cHeight, cSampleGrayValue );
            size_t bufferSize = buffer.size() * sizeof( buffer[0] );

            // Initializes the image object with the user buffer. Now the image object could be used to
            // interface with other pylon objects more easily, e.g. the image format converter.
            // The user buffer must not be deleted while it is attached to the pylon image object.
            imageA.AttachUserBuffer( &buffer[0], bufferSize, PixelType_Mono8, cWidth, cHeight, cPadding );
            cout << endl << "The properties of an image with an attached user buffer." << endl;
            PrintImageProperties( imageA );

            // The image can be assigned new properties as long as the image fits into the user buffer.
            imageA.Reset( PixelType_Mono8, cWidth / 2, cHeight );
            cout << "After resetting the image properties to a smaller image. The buffer is reused." << endl;
            PrintImageProperties( imageA );

            // This causes an exception because the attached user buffer is too small for the image.
            try
            {
                cout << "Try calling the Reset method when the user buffer is too small for the new image." << endl;
                imageA.Reset( PixelType_Mono8, cWidth * 2, cHeight );
            }
            catch (const GenericException& e)
            {
                cerr << "Expected exception: " << e.GetDescription() << endl;
            }

            // The CopyImage method can be used to create a full copy of the provided image.
            imageB.CopyImage( &buffer[0], bufferSize, PixelType_Mono8, cWidth, cHeight, cPadding );
            cout << endl << "The properties of an image after a full copy of a user buffer." << endl;
            PrintImageProperties( imageB );

            // The image objects are destroyed. The user must take care of the deletion of the user buffer.
        }


        // The CPylonImage and grab results.
        {
            // This smart pointer will receive the grab result data.
            CGrabResultPtr ptrGrabResult;

            // Try to get a grab result.
            cout << endl << "Waiting for an image to be grabbed." << endl;
            try
            {
                CInstantCamera Camera( CTlFactory::GetInstance().CreateFirstDevice() );
                Camera.GrabOne( 1000, ptrGrabResult );
            }
            catch (const GenericException& e)
            {

                cerr << "Could not grab an image: " << endl
                    << e.GetDescription() << endl;
            }

            if (ptrGrabResult && ptrGrabResult->GrabSucceeded())
            {
                // Create a pylon image.
                CPylonImage image;

                // A pylon grab result class CGrabResultPtr provides a cast operator to IImage.
                // That's why it can be used like an image, e.g. to print its properties or
                // to show it on the screen.
                cout << endl << "The properties of the grabbed image." << endl;
                PrintImageProperties( ptrGrabResult );
                ShowImage( ptrGrabResult, "Grabbed image." );

                // Initializes the image object with the buffer from the grab result.
                // This prevents the reuse of the buffer for grabbing as long as it is
                // not released.
                // Please note that this is not relevant for this example because the
                // camera object has been destroyed already.
                image.AttachGrabResultBuffer( ptrGrabResult );
                cout << endl << "The properties of an image with an attached grab result." << endl;
                PrintImageProperties( image );

                // Get the grab result image properties for later use.
                EPixelType pixelType = ptrGrabResult->GetPixelType();
                uint32_t width = ptrGrabResult->GetWidth();
                uint32_t height = ptrGrabResult->GetHeight();

                // Now the grab result can be released. The grab result buffer is now
                // only held by the pylon image.
                ptrGrabResult.Release();
                cout << "After the grab result has been released." << endl;
                PrintImageProperties( image );

                // If a grab result is referenced then always a new buffer is allocated on reset.
                image.Reset( pixelType, width / 2, height );
                cout << endl << "After resetting the image properties while a grab result is referenced. A new Buffer is allocated." << endl;
                PrintImageProperties( image );
            }
        }


        // Loading and saving.
        // Please note that this is only a brief overview. Please look at the
        // Utility_ImageLoadAndSave sample for more information.
        {
            // Create pylon images.
            CPylonImage imageSaved;
            CPylonImage imageLoaded;

            // Create a sample image.
            imageSaved = SampleImageCreator::CreateJuliaFractal( PixelType_RGB8packed, cWidth, cHeight );

#ifdef PYLON_WIN_BUILD
            // Save the image. The image is automatically converted to
            // a format that can be saved if needed.
            imageSaved.Save( ImageFileFormat_Bmp, "JuliaFractal.bmp" );
#endif

#ifdef PYLON_WIN_BUILD
            // Load the image.
            imageLoaded.Load( "JuliaFractal.bmp" );
            cout << endl << "The properties of the loaded sample image." << endl;
            PrintImageProperties( imageLoaded );
            ShowImage( imageLoaded, "The loaded sample image is shown." );
#endif
        }


        // The GetAOI method.
        // This method can be used to create partial images derived from an image, e.g. thumbnail images for displaying
        // defects.
        {
            // Create pylon images.
            CPylonImage sampleImage;
            CPylonImage aoi;
            CPylonImage aoiFromAoi;

            // Create a sample image.
            sampleImage = SampleImageCreator::CreateJuliaFractal( PixelType_RGB8packed, cWidth, cHeight );
            cout << endl << "The properties of the sample image." << endl;
            PrintImageProperties( sampleImage );

            // Compute the coordinates of the area of interest.
            uint32_t topLeftX = cWidth / 4;
            uint32_t topLeftY = cHeight / 2;
            uint32_t width = cWidth / 4;
            uint32_t height = cHeight / 4;

            // Create a new pylon image containing the AOI.
            // No image data is copied. The same image buffer is referenced.
            // The buffer start is now the first pixel of the and the
            // padding property of the pylon image object is used to skip over the
            // part of a line outside of the AOI.
            aoi = sampleImage.GetAoi( topLeftX, topLeftY, width, height );
            cout << "After creating an AOI." << endl;
            PrintImageProperties( aoi );
            ShowImage( aoi, "AOI of the sample image." );

            // CopyImage( const IImage& image, size_t newPaddingX) can be used to create a
            // full copy and to remove the additional padding.
            CPylonImage copiedAoi;
            copiedAoi.CopyImage( aoi, 0 );
            cout << "The properties of a full copy of the AOI image." << endl;
            PrintImageProperties( copiedAoi );

            // GetAOI can be applied again for the AOI image.
            topLeftX = width / 4;
            topLeftY = height / 4;
            width = width / 2;
            height = height / 2;
            aoiFromAoi = aoi.GetAoi( topLeftX, topLeftY, width, height );

            // An AOI image is still valid if the source image object has been destroyed
            // or the image data has been released.
            aoi.Release();
            sampleImage.Release();

            // Show the image.
            cout << "After creating an AOI of an AOI." << endl;
            PrintImageProperties( aoiFromAoi );
            ShowImage( aoiFromAoi, "AOI of the AOI of the sample image." );

            // The AOI image still references the buffer of the source image.
            // It is the only object that references this buffer.
            // That's why the full buffer can be reused if needed.
            aoiFromAoi.Reset( PixelType_Mono8, cWidth, cHeight );
            cout << "After reusing the buffer of the sample image." << endl;
            PrintImageProperties( aoiFromAoi );
        }


        // The GetPlane method.
        // This method can be used to work with the planes of
        // an planar image.
        {
            // Create an image object.
            CPylonImage imageRGB8planar;

            // Create a sample image.
            imageRGB8planar = SampleImageCreator::CreateMandelbrotFractal( PixelType_RGB8planar, cWidth, cHeight );
            ShowImage( imageRGB8planar, "Sample image." );

            // Create images to access the planes of the planar image.
            // No image data is copied. The same image buffer is referenced.
            // The buffer start is the start of the plane and the pixel type
            // set to the corresponding pixel type of a plane.
            CPylonImage redPlane = imageRGB8planar.GetPlane( 0 );
            CPylonImage greenPlane = imageRGB8planar.GetPlane( 1 );
            CPylonImage bluePlane = imageRGB8planar.GetPlane( 2 );

            // Show the planes.
            ShowImage( redPlane, "Red plane of the sample image." );
            ShowImage( greenPlane, "Green plane of the sample image." );
            ShowImage( bluePlane, "Blue plane of the sample image." );

            // Now a plane can be modified. Here the red plane is set to zero.
            memset( redPlane.GetBuffer(), 0, greenPlane.GetImageSize() );

            // Show the image.
            ShowImage( imageRGB8planar, "Sample image with red set to zero." );
        }


        // The CPylonBitmapImage class.
        // This class can be used to easily create Windows bitmaps, e.g. for displaying.
        {
#ifdef PYLON_WIN_BUILD
            // Create a bitmap image
            CPylonBitmapImage bitmapImage;

            // Create a sample image.
            CPylonImage sampleImage;
            sampleImage = SampleImageCreator::CreateJuliaFractal( PixelType_RGB8packed, cWidth, cHeight );

            // The bitmap image class automatically converts input images to the
            // corresponding bitmap format.
            bitmapImage.CopyImage( sampleImage );
            cout << endl << "The properties of the bitmap image." << endl;
            PrintImageProperties( bitmapImage );
            ShowImage( bitmapImage, "The sample image is shown." );

            // If the pylon bitmap image object is copied or assigned then no image data copy is made.
            // All objects reference the same Windows bitmap now.
            // The IsUnique() method can be used to check whether the Windows bitmap is
            // referenced by multiple pylon image objects.
            CPylonBitmapImage sameBitmapImageA( bitmapImage );
            CPylonBitmapImage sameBitmapImageB = bitmapImage;
            cout << endl << "After assignment multiple images reference the same data." << endl;
            PrintImageProperties( bitmapImage );
            PrintImageProperties( sameBitmapImageA );
            PrintImageProperties( sameBitmapImageB );

            // The Release() method can be used to release any data.
            // The object sameBitmapImageB is now empty.
            // No bitmap is allocated.
            sameBitmapImageB.Release();
            cout << endl << "Assigned to image object after releasing the image data." << endl;
            PrintImageProperties( sameBitmapImageB );

            // The image format converter can be used to have more control over the conversion.
            // In this example a monochrome version of a sample image is created.
            // See the Utility_ImageFormatConverter sample for more details.
            CImageFormatConverter converter;
            converter.OutputPixelFormat = PixelType_Mono8;
            converter.Convert( bitmapImage, sampleImage );

            // Show the image.
            cout << endl << "The properties of the converted bitmap image." << endl;
            PrintImageProperties( bitmapImage );
            ShowImage( bitmapImage, "The to monochrome converted sample image is shown." );

            // Reset can be used to reuse the underlying windows bitmap if
            // the new image properties are equal to the old ones.
            // No additional program logic is needed for reusing a bitmap
            // until new image properties are required.
            bitmapImage.Reset( PixelType_Mono8, cWidth, cHeight );
            cout << endl << "The properties of the reused bitmap image with equal properties." << endl;
            PrintImageProperties( bitmapImage );

            // Now the new image properties are different. A new Windows
            // bitmap is created.
            bitmapImage.Reset( PixelType_Mono8, cWidth / 2, cHeight );
            cout << endl << "The properties of the newly allocated bitmap image with different properties." << endl;
            PrintImageProperties( bitmapImage );

            // The bitmap image class provides a cast operator for HBitmap.
            // The cast operator can be used for instance to provide the handle to Windows API functions.
            HBITMAP bitmap = bitmapImage;

            // The bitmap can also be detached to use it without the pylon image object.
            bitmap = bitmapImage.Detach();

            // The pylon bitmap image is now empty.
            cout << endl << "The image object after detaching the image data." << endl;
            PrintImageProperties( bitmapImage );

            // After detaching the bitmap must be deleted by the user.
            ::DeleteObject( bitmap );
#endif
        }

    }
    catch (const GenericException& e)
    {
        // Error handling.
        cerr << "An exception occurred." << endl
            << e.GetDescription() << endl;
        exitCode = 1;
    }

    // Comment the following two lines to disable waiting on exit.
    cerr << endl << "Press enter to exit." << endl;
    while (cin.get() != '\n');

    // Releases all pylon resources.
    PylonTerminate();

    return exitCode;
}

Utility_ImageDecompressor#

This sample is part of the pylon sample solution that can be found under <⁠SDK ROOT>\Development\Samples\C++.

// Utility_ImageDecompressor.cpp
/*
    Note: Before getting started, Basler recommends reading the "Programmer's Guide" topic
    in the pylon C++ API documentation delivered with pylon.
    If you are upgrading to a higher major version of pylon, Basler also
    strongly recommends reading the "Migrating from Previous Versions" topic in the pylon C++ API documentation.

    This sample illustrates how to enable the Compression Beyond feature in Basler cameras and
    how to decompress images using the CImageDecompressor class.
*/

#include <iomanip>
// Include files to use the pylon API.
#include <pylon/PylonIncludes.h>
#ifdef PYLON_WIN_BUILD
#    include <pylon/PylonGUI.h>
#endif

// Include file to use pylon universal instant camera parameters.
#include <pylon/BaslerUniversalInstantCamera.h>

// Namespace for using pylon objects.
using namespace Pylon;

// Namespace for using GenApi objects.
using namespace GenApi;

// Namespace for using pylon universal instant camera parameters.
using namespace Basler_UniversalCameraParams;

// Namespace for using cout.
using namespace std;


// This is a helper function for showing an image on the screen if Windows is used,
// and for printing the first bytes of the image.
void ShowImage( IImage& image, const char* message = NULL )
{
#ifdef PYLON_WIN_BUILD
    // Display the image.
    Pylon::DisplayImage( 1, image );
#endif

    if (message)
    {
        cout << endl << message << " ";
    }

    // Store state of cout.
    std::ios state( NULL );
    state.copyfmt( cout );

    const uint8_t* pBytes = reinterpret_cast<const uint8_t*>(image.GetBuffer());
    cout << endl << "First six bytes of the image: " << endl;
    for (unsigned int i = 0; i < 6; ++i)
    {
        cout << "0x" << hex << setfill( '0' ) << setw( 2 ) << unsigned( pBytes[i] ) << " ";
    }
    cout << endl;

    // Restore state of cout.
    cout.copyfmt( state );

    // Wait for key.
    cerr << endl << "Press enter to continue." << endl;
    while (cin.get() != '\n');
}


// Helper function for printing the data stored in the CompressionInfo_t structure.
void printCompressionInfo( const CompressionInfo_t& info )
{
    string status;

    switch (info.compressionStatus)
    {
        case CompressionStatus_Ok:
            status = "Ok";
            break;

        case CompressionStatus_BufferOverflow:
            status = "Buffer overflow";
            break;

        case CompressionStatus_Error:
            status = "Error";
            break;
    }

    cout << endl << "Compression info:" << endl;
    cout << "hasCompressedImage      :" << (info.hasCompressedImage == true ? "Yes" : "No") << endl;
    cout << "compressionStatus       :" << status << endl;
    cout << "lossy                   :" << (info.lossy == true ? "Yes" : "No") << endl;
    cout << "width                   :" << info.width << endl;
    cout << "height                  :" << info.height << endl;
    cout << "pixelType               :" << CPixelTypeMapper::GetNameByPixelType( info.pixelType ) << endl;
    cout << "decompressedImageSize   :" << info.decompressedImageSize << endl;
    cout << "decompressedPayloadSize :" << info.decompressedPayloadSize << endl;
}


int main( int /*argc*/, char* /*argv*/[] )
{
    // The exit code of the sample application.
    int exitCode = 0;

    // Before using any pylon methods, the pylon runtime must be initialized.
    PylonInitialize();

    try
    {
        // Create a target image.
        CPylonImage targetImage;

        // This smart pointer will receive the grab result data.
        CGrabResultPtr ptrGrabResult;

        // Create an instant camera object with the camera device found first.
        CBaslerUniversalInstantCamera camera( CTlFactory::GetInstance().CreateFirstDevice() );

        // Open the camera.
        camera.Open();

        // Print the model name of the camera.
        cout << "Using device " << camera.GetDeviceInfo().GetModelName() << endl;

        // Fetch the nodemap.
        INodeMap& nodemap = camera.GetNodeMap();

        // Check if the camera supports compression.
        if (camera.ImageCompressionMode.IsWritable())
        {
            // Remember the original compression mode.
            String_t oldCompressionMode = camera.ImageCompressionMode.ToString();
            cout << "Old compression mode: " << oldCompressionMode << endl;

            // Set the compression mode to BaslerCompressionBeyond if available.
            if (camera.ImageCompressionMode.CanSetValue( ImageCompressionMode_BaslerCompressionBeyond ))
            {
                camera.ImageCompressionMode.SetValue( ImageCompressionMode_BaslerCompressionBeyond );
                cout << "New compression mode: " << camera.ImageCompressionMode.ToString() << endl;
            }

            // After enabling the compression, we can read the compression rate option.
            String_t oldCompressionRateOption = camera.ImageCompressionRateOption.ToString();
            cout << "Old compression rate option: " << oldCompressionRateOption << endl;

            // Configure lossless compression.
            if (camera.ImageCompressionRateOption.CanSetValue( ImageCompressionRateOption_Lossless ))
            {
                camera.ImageCompressionRateOption.SetValue( ImageCompressionRateOption_Lossless );
                cout << "New compression rate option: " << camera.ImageCompressionRateOption.ToString() << endl;
            }

            // Create the decompressor and initialize it with the nodemap of the camera.
            CImageDecompressor decompressor( nodemap );

            // Wait for a new image.
            if (camera.GrabOne( 1000, ptrGrabResult ))
            {
                if (ptrGrabResult->GrabSucceeded())
                {
                    // Fetch compression info and check whether the image was compressed by the camera.
                    CompressionInfo_t info;
                    if (decompressor.GetCompressionInfo( info, ptrGrabResult ))
                    {
                        // Print content of CompressionInfo_t.
                        printCompressionInfo( info );

                        // Check if image is still compressed (could have been decompressed by a transport layer).
                        if (info.hasCompressedImage)
                        {
                            if (info.compressionStatus == CompressionStatus_Ok)
                            {
                                // Show compression ratio.
                                cout << endl << "Transferred payload \t:" << ptrGrabResult->GetPayloadSize() << endl;
                                cout << "Compression ratio \t:" << (static_cast<float>(ptrGrabResult->GetPayloadSize()) / static_cast<float>(info.decompressedPayloadSize) * 100.0f) << "%" << endl;

                                // Decompress the image.
                                decompressor.DecompressImage( targetImage, ptrGrabResult );

                                // Show the image.
                                ShowImage( targetImage, "Decompressed image." );
                            }
                            else
                            {
                                cout << "There was an error while the camera was compressing the image." << endl;
                            }
                        }
                        else
                        {
                            // No decompression is needed because it is already an uncompressed image.
                            // (This can happen if the transport layer supports transparent decompressing.)
                            ShowImage( ptrGrabResult, "Grabbed image." );
                        }
                    }
                    else
                    {
                        // No decompression is required because the image has never been compressed.
                        // (This can happen if compression was accidentally turned off after initializing the decompressor class.)
                        ShowImage( ptrGrabResult, "Grabbed image." );
                    }
                }
                else
                {
                    cout << "Error: " << std::hex << ptrGrabResult->GetErrorCode() << std::dec << " " << ptrGrabResult->GetErrorDescription() << endl;
                }
            }
            else
            {
                cout << "Error: Could not grab an image." << endl;
            }

            // Take another picture with lossy compression (if available).

            if (camera.ImageCompressionRateOption.IsWritable())
            {
                cout << endl << "--- Switching to Fix Ratio compression ---" << endl << endl << endl;

                if (camera.ImageCompressionRateOption.CanSetValue( ImageCompressionRateOption_FixRatio ))
                {
                    camera.ImageCompressionRateOption.SetValue( ImageCompressionRateOption_FixRatio );
                    cout << "New compression rate option: " << camera.ImageCompressionRateOption.ToString() << endl;
                }

                // After changing the compression parameters, the decompressor MUST be reconfigured.
                decompressor.SetCompressionDescriptor( nodemap );

                // Wait for a new image.
                if (camera.GrabOne( 1000, ptrGrabResult ))
                {
                    if (ptrGrabResult->GrabSucceeded())
                    {
                        // Fetch compression info and check whether the image was compressed by the camera.
                        CompressionInfo_t info;
                        if (decompressor.GetCompressionInfo( info, ptrGrabResult ))
                        {
                            // Print content of CompressionInfo_t.
                            printCompressionInfo( info );

                            // Check if image is still compressed (could have been decompressed by a transport layer).
                            if (info.hasCompressedImage)
                            {
                                if (info.compressionStatus == CompressionStatus_Ok)
                                {
                                    // Show compression ratio.
                                    cout << endl << "Transferred payload \t:" << ptrGrabResult->GetPayloadSize() << endl;
                                    cout << "Compression ratio \t:" << (static_cast<float>(ptrGrabResult->GetPayloadSize()) / static_cast<float>(info.decompressedPayloadSize) * 100.0f) << "%" << endl;

                                    // Decompress the image.
                                    decompressor.DecompressImage( targetImage, ptrGrabResult );

                                    // Show the image.
                                    ShowImage( targetImage, "Decompressed image." );
                                }
                                else
                                {
                                    cout << "There was an error while the camera was compressing the image." << endl;
                                }
                            }
                            else
                            {
                                // No decompression is needed because it is already an uncompressed image.
                                // (This can happen if the transport layer supports transparent decompressing.)
                                ShowImage( ptrGrabResult, "Grabbed image." );
                            }
                        }
                        else
                        {
                            // No decompression is required because the image has never been compressed.
                            // (This can happen if compression was accidentally turned off after initializing the decompressor class.)
                            ShowImage( ptrGrabResult, "Grabbed image." );
                        }
                    }
                    else
                    {
                        cout << "Error: " << std::hex << ptrGrabResult->GetErrorCode() << std::dec << " " << ptrGrabResult->GetErrorDescription() << endl;
                    }
                }
                else
                {
                    cout << "Error: Could not grab an image." << endl;
                }
            }

            // Restore original compression mode. Compression rate option must be restored first as the rate can't be changed
            // when compression itself is turned off.
            camera.ImageCompressionRateOption.SetValue( oldCompressionRateOption );
            camera.ImageCompressionMode.SetValue( oldCompressionMode );
        }
        else
        {
            cout << "This camera does not support compression." << endl;
        }

        camera.Close();
    }
    catch (const GenericException& e)
    {

        cerr << "Could not grab an image: " << endl
            << e.GetDescription() << endl;

        exitCode = 1;
    }

    // Release all pylon resources.
    PylonTerminate();

    // Wait for key.
    cerr << endl << "Press enter to exit." << endl;
    while (cin.get() != '\n');

    return exitCode;
}

Utility_ImageFormatConverter#

This sample is part of the pylon sample solution that can be found under <⁠SDK ROOT>\Development\Samples\C++.

// Utility_ImageFormatConverter.cpp
/*
    Note: Before getting started, Basler recommends reading the "Programmer's Guide" topic
    in the pylon C++ API documentation delivered with pylon.
    If you are upgrading to a higher major version of pylon, Basler also
    strongly recommends reading the "Migrating from Previous Versions" topic in the pylon C++ API documentation.

    This sample illustrates how to use the image format
    converter class CImageFormatConverter.

    The image format converter accepts all image formats
    produced by Basler camera devices and it is able to
    convert these to a number of output formats.
    The conversion can be controlled by several parameters.
    See the converter class documentation for more details.
*/

#include <iomanip>
// Include files to use the pylon API.
#include <pylon/PylonIncludes.h>
#ifdef PYLON_WIN_BUILD
#    include <pylon/PylonGUI.h>
#endif

#include "../include/SampleImageCreator.h"

// Namespace for using pylon objects.
using namespace Pylon;

// Namespace for using GenApi objects.
using namespace GenApi;

// Namespace for using cout.
using namespace std;


// This is a helper function for showing an image on the screen if Windows is used,
// and for printing the first bytes of the image.
void ShowImage( IImage& image, const char* message = NULL )
{
#ifdef PYLON_WIN_BUILD
    // Display the image.
    Pylon::DisplayImage( 1, image );
#endif

    if (message)
    {
        cout << endl << message << " ";
    }

    // store state of cout
    std::ios state( NULL );
    state.copyfmt( cout );

    const uint8_t* pBytes = reinterpret_cast<const uint8_t*>(image.GetBuffer());
    cout << endl << "First six bytes of the image: " << endl;
    for (unsigned int i = 0; i < 6; ++i)
    {
        cout << "0x" << hex << setfill( '0' ) << setw( 2 ) << unsigned( pBytes[i] ) << " ";
    }
    cout << endl;

    // restore state of cout
    cout.copyfmt( state );

    cerr << "Press enter to continue." << endl;
    while (cin.get() != '\n');
}


int main( int /*argc*/, char* /*argv*/[] )
{
    // The exit code of the sample application.
    int exitCode = 0;

    // Before using any pylon methods, the pylon runtime must be initialized.
    PylonInitialize();

    try
    {
        // Define some constants.
        const uint32_t cWidth = 640;
        const uint32_t cHeight = 480;

        // The image format converter basics.
        {
            // First the image format converter class must be created.
            CImageFormatConverter converter;

            // Second the converter must be parameterized.
            converter.OutputPixelFormat = PixelType_Mono16;
            converter.OutputBitAlignment = OutputBitAlignment_MsbAligned;

            // Try to set MaxNumThreads to maximum supported value.
            // Values larger than 1 will enable multithreaded image format conversion.

            // Set to 1 to disable multithreaded image format conversion.
            //converter.MaxNumThreads = 1;

            // Set to maximum value to enable all available cores for multithreaded image format conversion.
            converter.MaxNumThreads.TrySetToMaximum();

            // Then it can be used to convert input images to
            // the target image format.

            // Create a sample image.
            CPylonImage imageRGB8packed = SampleImageCreator::CreateMandelbrotFractal( PixelType_RGB8packed, cWidth, cHeight );
            ShowImage( imageRGB8packed, "Source image." );

            // Create a target image
            CPylonImage targetImage;

            // Convert the image. Note that there are more overloaded Convert methods available, e.g.
            // for converting the image from or to a user buffer.
            converter.Convert( targetImage, imageRGB8packed );
            ShowImage( targetImage, "Converted image." );
        }


        // Checking if conversion is needed.
        {
            // Create a target image.
            CPylonImage targetImage;

            // Create the converter and set parameters.
            CImageFormatConverter converter;
            converter.OutputPixelFormat = PixelType_Mono8;

            // Try to get a grab result for demonstration purposes.
            cout << endl << "Waiting for an image to be grabbed." << endl;
            try
            {
                // This smart pointer will receive the grab result data.
                CGrabResultPtr ptrGrabResult;
                CInstantCamera Camera( CTlFactory::GetInstance().CreateFirstDevice() );

                if (Camera.GrabOne( 1000, ptrGrabResult ))
                {
                    // Now we can check if conversion is required.
                    if (converter.ImageHasDestinationFormat( ptrGrabResult ))
                    {
                        // No conversion is needed. It can be skipped for saving processing
                        // time.
                        ShowImage( ptrGrabResult, "Grabbed image." );
                    }
                    else
                    {
                        // Conversion is needed.
                        ShowImage( ptrGrabResult, "Grabbed image." );
                        converter.Convert( targetImage, ptrGrabResult );
                        ShowImage( targetImage, "Converted image." );
                    }
                }
            }
            catch (const GenericException& e)
            {

                cerr << "Could not grab an image: " << endl
                    << e.GetDescription() << endl;
            }
        }
    }
    catch (const GenericException& e)
    {
        // Error handling.
        cerr << "An exception occurred." << endl
            << e.GetDescription() << endl;
        exitCode = 1;
    }

    // Comment the following two lines to disable waiting on exit.
    cerr << endl << "Press enter to exit." << endl;
    while (cin.get() != '\n');

    // Releases all pylon resources.
    PylonTerminate();

    return exitCode;
}

Utility_ImageLoadAndSave#

This sample is part of the pylon sample solution that can be found under <⁠SDK ROOT>\Development\Samples\C++.

// Utility_ImageLoadAndSave.cpp
/*
    Note: Before getting started, Basler recommends reading the "Programmer's Guide" topic
    in the pylon C++ API documentation delivered with pylon.
    If you are upgrading to a higher major version of pylon, Basler also
    strongly recommends reading the "Migrating from Previous Versions" topic in the pylon C++ API documentation.

    This sample illustrates how to load and save images.

    The CImagePersistence class provides static functions for
    loading and saving images. It uses the image
    class related interfaces IImage and IReusableImage of pylon.

    IImage can be used to access image properties and image buffer.
    Therefore, it is used when saving images. In addition to that images can also
    be saved by passing an image buffer and the corresponding properties.

    The IReusableImage interface extends the IImage interface to be able to reuse
    the resources of the image to represent a different image. The IReusableImage
    interface is used when loading images.

    The CPylonImage and CPylonBitmapImage image classes implement the
    IReusableImage interface. These classes can therefore be used as targets
    for loading images.

    The gab result smart pointer classes provide a cast operator to the IImage
    interface. This makes it possible to pass a grab result directly to the
    function that saves images to disk.
*/

// Include files to use the pylon API.
#include <pylon/PylonIncludes.h>
#include "../include/SampleImageCreator.h"

// Namespace for using pylon objects.
using namespace Pylon;

// Namespace for using GenApi objects.
using namespace GenApi;

// Namespace for using cout.
using namespace std;


int main( int /*argc*/, char* /*argv*/[] )
{
    // The exit code of the sample application.
    int exitCode = 0;

    // Before using any pylon methods, the pylon runtime must be initialized.
    PylonInitialize();

    try
    {
        // Define some constants.
        const uint32_t cWidth = 640;
        const uint32_t cHeight = 480;

        // Saving images using the CImagePersistence class.
        {
            // Create a sample image.
            CPylonImage imageRGB16packed = SampleImageCreator::CreateMandelbrotFractal( PixelType_RGB16packed, cWidth, cHeight );

            // If required the image is automatically converted to a new image and then saved.
            // An image with a bit depth higher than 8 Bit is stored with 16 Bit bit depth
            // if supported by the image file format. In this case the pixel data is MSB aligned.
            // If more control over the conversion is required then the CImageFormatConverter class
            // can be used to convert the input image before saving it (not shown).
            CImagePersistence::Save( ImageFileFormat_Tiff, "MandelbrotFractal.tiff", imageRGB16packed );

            cout << "The image " << (CImagePersistence::CanSaveWithoutConversion( ImageFileFormat_Tiff, imageRGB16packed ) ? "can" : "can not")
                << " be saved without conversion as tiff." << endl;

#ifdef PYLON_WIN_BUILD
            // The CPylonImage and the CPylonBitmapImage classes provide a member function
            // for saving images for convenience. This function calls CImagePersistence::Save().
            imageRGB16packed.Save( ImageFileFormat_Bmp, "MandelbrotFractal.bmp" );

            // CanSaveWithoutConversion() can be used to check whether a conversion is performed when saving the image.
            cout << "The image " << (CImagePersistence::CanSaveWithoutConversion( ImageFileFormat_Bmp, imageRGB16packed ) ? "can" : "can not")
                << " be saved without conversion as bmp." << endl;
#endif

            // Additionally it is possible to save image data that is not held by an image class.
            // For demonstration purposes only, the buffer and the image properties from the sample image are used here.
            EPixelType pixelType = imageRGB16packed.GetPixelType();
            uint32_t width = imageRGB16packed.GetWidth();
            uint32_t height = imageRGB16packed.GetHeight();
            size_t paddingX = imageRGB16packed.GetPaddingX();
            EImageOrientation orientation = imageRGB16packed.GetOrientation();
            size_t bufferSize = imageRGB16packed.GetImageSize();
            void* buffer = imageRGB16packed.GetBuffer();

            CImagePersistence::Save(
                ImageFileFormat_Png,
                "MandelbrotFractal.png",
                buffer,
                bufferSize,
                pixelType,
                width,
                height,
                paddingX,
                orientation );
        }


        // Loading images.
        {
            // Create pylon images.
            CPylonImage imageRGB16packedFromTiff;
            CPylonImage imageBGR8packedFromBmp;

            // Load the tiff image directly via the ImageFile interface.
            CImagePersistence::Load( "MandelbrotFractal.tiff", imageRGB16packedFromTiff );
            cout << "The pixel type of the image is " << (imageRGB16packedFromTiff.GetPixelType() == PixelType_RGB16packed ? "" : "not ")
                << "RGB16packed." << endl;

#ifdef PYLON_WIN_BUILD
            // The CPylonImage and the CPylonBitmapImage classes provide a member function
            // for loading images for convenience. This function calls CImagePersistence::Load().
            imageBGR8packedFromBmp.Load( "MandelbrotFractal.bmp" );

            // The format of the loaded image from the bmp file is BGR8packed instead of the original RGB16packed format because
            // it had to be converted for saving it in the bmp format.
            cout << "The pixel type of the image is " << (imageBGR8packedFromBmp.GetPixelType() == PixelType_BGR8packed ? "" : "not ")
                << "BGR8packed." << endl;
#endif

        }

//JPEG handling is only supported on windows
#ifdef PYLON_WIN_BUILD
        // Selecting the image quality when saving in JPEG format.
        {
            // Create a sample image.
            CPylonImage imageRGB8packed = SampleImageCreator::CreateMandelbrotFractal( PixelType_RGB8packed, cWidth, cHeight );

            // The JPEG image quality can be adjusted in the range from 0 to 100.
            CImagePersistenceOptions additionalOptions;
            // Set the lowest quality value.
            additionalOptions.SetQuality( 0 );

            // Save the image.
            CImagePersistence::Save( ImageFileFormat_Jpeg, "MandelbrotFractal_0.jpg", imageRGB8packed, &additionalOptions );

            // Set the highest quality value.
            additionalOptions.SetQuality( 100 );

            // Save the image.
            CImagePersistence::Save( ImageFileFormat_Jpeg, "MandelbrotFractal_100.jpg", imageRGB8packed, &additionalOptions );
        }
#endif

        // Saving grabbed images.
        {
            // Try to get a grab result.
            cout << endl << "Waiting for an image to be grabbed." << endl;
            try
            {
                // This smart pointer will receive the grab result data.
                CGrabResultPtr ptrGrabResult;
                CInstantCamera Camera( CTlFactory::GetInstance().CreateFirstDevice() );

                if (Camera.GrabOne( 1000, ptrGrabResult ))
                {
                    // The pylon grab result smart pointer classes provide a cast operator to the IImage
                    // interface. This makes it possible to pass a grab result directly to the
                    // function that saves an image to disk.
                    CImagePersistence::Save( ImageFileFormat_Png, "GrabbedImage.png", ptrGrabResult );
                }
            }
            catch (const GenericException& e)
            {

                cerr << "Could not grab an image: " << endl
                    << e.GetDescription() << endl;
            }
        }
    }
    catch (const GenericException& e)
    {
        // Error handling.
        cerr << "An exception occurred." << endl
            << e.GetDescription() << endl;
        exitCode = 1;
    }

    // Comment the following two lines to disable waiting on exit.
    cerr << endl << "Press enter to exit." << endl;
    while (cin.get() != '\n');

    // Releases all pylon resources.
    PylonTerminate();

    return exitCode;
}

Utility_IpConfig#

This sample is part of the pylon sample solution that can be found under <⁠SDK ROOT>\Development\Samples\C++.

// Utility_IpConfig.cpp
/*
    Note: Before getting started, Basler recommends reading the "Programmer's Guide" topic
    in the pylon C++ API documentation delivered with pylon.
    If you are upgrading to a higher major version of pylon, Basler also
    strongly recommends reading the "Migrating from Previous Versions" topic in the pylon C++ API documentation.

    This sample demonstrates how to configure the IP address of a camera.
*/

// Include files to use the pylon API.
#include <pylon/PylonIncludes.h>
#include <pylon/gige/GigETransportLayer.h>
#include <algorithm>
#include <iostream>
#include <cctype>

// Namespace for using pylon objects.
using namespace Pylon;

// Namespace for using cout.
using namespace std;



int main( int argc, char* argv[] )
{
    // The exit code of the sample application.
    int exitCode = 0;

    // Before using any pylon methods, the pylon runtime must be initialized.
    PylonAutoInitTerm autoInitTerm;

    try
    {
        // Create GigE transport layer.
        CTlFactory& TlFactory = CTlFactory::GetInstance();
        IGigETransportLayer* pTl = dynamic_cast<IGigETransportLayer*>(TlFactory.CreateTl( Pylon::BaslerGigEDeviceClass ));
        if (pTl == NULL)
        {
            cerr << "Error: No GigE transport layer installed." << endl;
            cerr << "       Please install GigE support as it is required for this sample." << endl;
            return 1;
        }

        // Enumerate devices.
        DeviceInfoList_t lstDevices;
        pTl->EnumerateAllDevices( lstDevices );

        // Check if enough parameters are given.
        if (argc < 3)
        {
            // Print usage information.
            cout << "Usage: Utility_IpConfig <MAC> <IP> [MASK] [GATEWAY]" << endl;
            cout << "       <MAC> is the MAC address without separators, e.g., 0030531596CF" << endl;
            cout << "       <IP> is one of the following:" << endl;
            cout << "            - AUTO to use Auto-IP (LLA)." << endl;
            cout << "            - DHCP to use DHCP." << endl;
            cout << "            - Everything else is interpreted as a new IP address in dotted notation, e.g., 192.168.1.1" << endl;
            cout << "       [MASK] is the network mask in dotted notation. This is optional. 255.255.255.0 is used as default." << endl;
            cout << "       [GATEWAY] is the gateway address in dotted notation. This is optional. 0.0.0.0 is used as default." << endl;
            cout << "Please note that this is a sample and no sanity checks are made." << endl;
            cout << endl;

            // Print header for information table.
            cout << left << setfill( ' ' );
            cout << endl;
            cout.width( 32 + 14 + 17 + 17 + 15 + 8 );
            cout << "Available Devices";
            cout.width( 15 );
            cout << "   supports " << endl;
            cout.width( 32 );
            cout << "Friendly Name";
            cout.width( 14 );
            cout << "MAC";
            cout.width( 17 );
            cout << "IP Address";
            cout.width( 17 );
            cout << "Subnet Mask";
            cout.width( 15 );
            cout << "Gateway";
            cout.width( 8 );
            cout << "Mode";
            cout.width( 4 );
            cout << "IP?";
            cout.width( 6 );
            cout << "DHCP?";
            cout.width( 5 );
            cout << "LLA?";
            cout << endl;

            // Print information table.
            for (DeviceInfoList_t::const_iterator it = lstDevices.begin(); it != lstDevices.end(); ++it)
            {
                // Determine current configuration mode.
                String_t activeMode;
                if (it->IsPersistentIpActive())
                {
                    activeMode = "Static";
                }
                else if (it->IsDhcpActive())
                {
                    activeMode = "DHCP";
                }
                else
                {
                    activeMode = "AutoIP";
                }

                cout.width( 32 );
                cout << it->GetFriendlyName();
                cout.width( 14 );
                cout << it->GetMacAddress();
                cout.width( 17 );
                cout << it->GetIpAddress();
                cout.width( 17 );
                cout << it->GetSubnetMask();
                cout.width( 15 );
                cout << it->GetDefaultGateway();
                cout.width( 8 );
                cout << activeMode;
                cout.width( 4 );
                cout << it->IsPersistentIpSupported();
                cout.width( 6 );
                cout << it->IsDhcpSupported();
                cout.width( 5 );
                cout << it->IsAutoIpSupported();
                cout << endl;
            }

            exitCode = 1;
        }
        else
        {
            // Read arguments. Note that sanity checks are skipped for clarity.
            String_t macAddress = argv[1];
            String_t ipAddress = argv[2];
            String_t subnetMask = "255.255.255.0";
            if (argc >= 4)
            {
                subnetMask = argv[3];
            }
            String_t defaultGateway = "0.0.0.0";
            if (argc >= 5)
            {
                defaultGateway = argv[4];
            }

            // Check if configuration mode is AUTO, DHCP, or IP address.
            bool isAuto = (strcmp( argv[2], "AUTO" ) == 0);
            bool isDhcp = (strcmp( argv[2], "DHCP" ) == 0);
            bool isStatic = !isAuto && !isDhcp;

            // Find the camera's user-defined name.
            String_t userDefinedName = "";
            for (DeviceInfoList_t::const_iterator it = lstDevices.begin(); it != lstDevices.end(); ++it)
            {
                if (macAddress == it->GetMacAddress())
                {
                    userDefinedName = it->GetUserDefinedName();
                }
            }

            // Set new IP configuration.
            bool setOk = pTl->BroadcastIpConfiguration( macAddress, isStatic, isDhcp,
                                                        ipAddress, subnetMask, defaultGateway, userDefinedName );

                                                    // Show result message.
            if (setOk)
            {
                pTl->RestartIpConfiguration( macAddress );
                cout << "Successfully changed IP configuration via broadcast for device " << macAddress << " to " << ipAddress << endl;
            }
            else
            {
                cout << "Failed to change IP configuration via broadcast for device " << macAddress << endl;
                cout << "This is not an error. The device may not support broadcast IP configuration." << endl;
            }
        }

        // Comment the following two lines to disable waiting on exit.
        cerr << endl << "Press enter to exit." << endl;
        while (cin.get() != '\n');

        // Release transport layer.
        TlFactory.ReleaseTl( pTl );
    }
    catch (const GenericException& e)
    {
        // Error handling.
        cerr << "An exception occurred." << endl
            << e.GetDescription() << endl;
        exitCode = 1;
    }

    return exitCode;
}

Utility_InstantInterface#

This sample is part of the pylon sample solution that can be found under <⁠SDK ROOT>\Development\Samples\C++.

// Utility_InstantInterface.cpp
/*
    Note: Before getting started, Basler recommends reading the "Programmer's Guide" topic
    in the pylon C++ API documentation delivered with pylon.
    If you are upgrading to a higher major version of pylon, Basler also
    strongly recommends reading the "Migrating from Previous Versions" topic in the pylon C++ API documentation.

    This sample illustrates how to use CInstantInterface to access parameters of the interface.
    Using the Basler CXP-12 interface card as an example, the sample shows you how to access the Power-Over-CXP settings
    and monitor the power usage.
*/

// Include file to use the pylon API.
#include <pylon/PylonIncludes.h>

// Include file to use cout
#include <iostream>

// Namespace for using pylon objects.
using namespace Pylon;

// Namespace for using cout.
using namespace std;


int main( int /*argc*/, char* /*argv*/[] )
{
    // The exit code of the sample application.
    int exitCode = 0;

    // Before using any pylon methods, the pylon runtime must be initialized.
    PylonAutoInitTerm autoInitTerm;

    try
    {
        // Open the first interface on the CXP interface card.
        CInterfaceInfo info;
        info.SetDeviceClass( Pylon::BaslerGenTlCxpDeviceClass );

        CUniversalInstantInterface instantInterface( info );
        instantInterface.Open();
        cout << "Interface opened." << endl;

        cout << " ExternalPowerPresent: ";
        if( instantInterface.ExternalPowerPresent.GetValue() ) 
        {
            cout << "yes" << endl;

            cout << " Switching power OFF." << endl;
            instantInterface.CxpPoCxpTurnOff.Execute();
            WaitObject::Sleep( 1000 );

            cout << " Switching power ON." << endl;
            instantInterface.CxpPoCxpAuto.Execute();
            // wait for 5000 ms (5 s) to allow the camera to start up again
            WaitObject::Sleep( 5000 );

            cout << " Updating device list." << endl;
            instantInterface.DeviceUpdateList.Execute();

            double current = instantInterface.CxpPort0Current();
            double voltage = instantInterface.CxpPort0Voltage();
            double power = instantInterface.CxpPort0Power();
            cout << fixed;
            cout.precision( 2 );
            cout << "  Port 0 :" << endl;
            cout << "   Current " << current << " mA" << endl;
            cout << "   Voltage " << voltage << " V" << endl;
            cout << "   Power " << power << " W" << endl << endl;
        }
        else
        {
            cout << "no" << endl;
        }

        instantInterface.Close();
    }
    catch (const GenericException& e)
    {
        // Error handling.
        cerr << "An exception occurred." << endl
            << e.GetDescription() << endl;
        exitCode = 1;
    }

    // Comment the following two lines to disable waiting on exit.
    cerr << endl << "Press enter to exit." << endl;
    while (cin.get() != '\n');

    // All pylon resources are automatically released when autoInitTerm goes out of scope.

    return exitCode;
}