FLI libflipro API
StreamingAndHWMerge.cpp

Streaming and Using HW Merge Example Code.
This example shows you how to create a streaming application that uses the hardware merging capabilities on host the PCIE Fibre card.

#include "stdint.h"
#include "stdlib.h"
#include "stdio.h"
#if defined(_WIN32) || defined(_WINDOWS)
#include "windows.h"
#else
#include "unistd.h"
#include "string.h"
#endif
#include "wchar.h"
#include "libflipro.h"
#define FLI_TEST_MAX_SUPPORTED_CAMERAS (4)
// Static Function declarations
static int32_t SetFrameInfo(int32_t iDeviceHandle);
static void HWMergeNormal(FPRO_HWMERGEFRAMES eFrames, FPRO_IMAGE_FORMAT eFormat);
static int32_t CheckReferenceMetaData(FILE* pFile);
static int32_t SendHWMergeReferenceFiles(int32_t iHandle, const char* pDSNUFileName, const char* pPRNUFileName, uint32_t uiWidth, uint32_t uiHeight);
// Static Data declarations
static int32_t s_iDeviceHandle;
uint32_t uiNumDetectedDevices;
static FPRODEVICEINFO s_camDeviceInfo[FLI_TEST_MAX_SUPPORTED_CAMERAS];
#define STREAMER_PATH_MAX (1024)
static wchar_t s_wcStreamerPath[STREAMER_PATH_MAX];
static wchar_t s_wcStreamerPrefix[STREAMER_PATH_MAX];
int main()
{
int32_t iResult;
// first get the list of available devices
uiNumDetectedDevices = FLI_TEST_MAX_SUPPORTED_CAMERAS;
iResult = FPROCam_GetCameraList(s_camDeviceInfo, &uiNumDetectedDevices);
if ((iResult >= 0) && (uiNumDetectedDevices > 0))
{
// Open the first device in the list
s_iDeviceHandle = -1;
iResult = FPROCam_Open(&s_camDeviceInfo[0], &s_iDeviceHandle);
if ((iResult >= 0) && (s_iDeviceHandle >= 0))
{
// For merging, you will need to send the appropriate reference frames
// to the PCIE card in order to get the desired merge results. By default, the API initializes the
// hardware with identity frames for both operations. If you have generated Reference Files
// using the FLI Pilot Application, you can call the FPROAlgo_SetHardwareMergeReferenceFiles()
// API to initialize the reference frames. Passing NULL for a file name will load identity
// reference frames (0 for DSNU and 1 for PRNU).
//iResult= FPROAlgo_SetHardwareMergeReferenceFiles(s_iDeviceHandle, NULL, NULL);
iResult= FPROAlgo_SetHardwareMergeReferenceFiles(s_iDeviceHandle, L"./DSNUFrameCalculation-DSNU_RCDData_1.rcd", L"./DSNUFrameCalculation-PRNU_RCDData_1.rcd");
// Alternatively, there is the SendHWMergeReferenceFiles example function below that can guide
// you on how to extract the reference data from your own reference frames and call the
// FPROAlgo_SetHardwareMergeReferenceFrames() API.
// This example function expects reference files generated by the FLI Pilot application, but If you
// generate your own files, it should provide the guidance to correctly build the necessary structures.
//SendHWMergeReferenceFiles(s_iDeviceHandle, ".\\DSNU.rcd", ".\\PRNU.rcd", 4096, 4096);
// You can also adjust the merge algorithm with the following function.
// This function is optional as default thresholds are already set up in the API.
// See the documentation for a description of this function and parameters.
// FPROAlgo_SetHardwareMergeThresholds(int32_t iHandle, uint16_t uiHighGainThreshold, uint16_t uiMergeDifferenceThreshold);
// The above functions must be called when you open a new connection to the camera. Once you have the algorithms set up
// for your specific imaging parameters, you can start and stop streams as done in this example. If you change imaging
// parameters, it is a good idea to setup the algorithms as above before streaming again.
// Set up your exposure and frame parameters
if (iResult >= 0)
iResult = SetFrameInfo(s_iDeviceHandle);
// This call performs a 'normal' merge: the low gain and high
// gain images are merged using the Reference Frame algorithm and
// stored as RCD files (the file type native to the FLI Cameras
// and Pilot application.
// This call performs a 'normal' merge: the low gain and high
// gain images are merged using the Reference Frame algorithm and
// stored as TIFF files.
// This call performs a 'merge', using only the corrected low gain
// frame and stores the results as TIFF
// This call performs a 'merge', using only the corrected high gain
// frame and stores the results as TIFF
}
// Close up shop
iResult = FPROCam_Close(s_iDeviceHandle);
}
return 0;
}
// The merging function
void HWMergeNormal(FPRO_HWMERGEFRAMES eFrames, FPRO_IMAGE_FORMAT eFormat)
{
int32_t iResult;
uint32_t uiFramSizeInBytes;
uint32_t uiNumFrames;
FPROSTREAMSTATS streamStats;
FPRO_HWMERGEENABLE mergeEnables;
// Here we enable the specific HW merge when using the FPROMERGE_ALGO_REF_FRAME algorithm
mergeEnables.bMergeEnable = true;
mergeEnables.eMergeFrames = eFrames;
mergeEnables.eMergeFormat = eFormat;
iResult = FPROAlgo_SetHardwareMergeEnables(s_iDeviceHandle, mergeEnables);
if (iResult >= 0)
{
// Make sure we have space for the image frame.
// To facilitate calculating the actual byte size of the data you will receive for an image, the
// API provides the FPROFrame_ComputeFrameSize() function. Note that before you use this function,
// ALL imaging parameters and Hardware Merge configuration must be set. The reason is because this
// call uses the actual camera and Hardware Merge settings to determine the size of the resulting image data.
uiFramSizeInBytes = FPROFrame_ComputeFrameSize(s_iDeviceHandle);
if (iResult >= 0)
{
// all is well - Now you can get the streamer interface started
// First initialize the Streamer. You need to pass a path to where you want the
// streamed files stored, and optionally a prefix for each file. A time stamped
// directory will be created in the path directory you specify. The prefix will be prepended
// to each file that is stored.
uiNumFrames = 10;
#if defined(_WIN32) || defined(_WINDOWS)
wcscpy_s(s_wcStreamerPath, STREAMER_PATH_MAX, L"./StreamerFiles");
wcscpy_s(s_wcStreamerPrefix, STREAMER_PATH_MAX, L"myFiles");
#else
wcscpy(s_wcStreamerPath, L"./StreamerFiles");
wcscpy(s_wcStreamerPrefix, L"myFiles");
#endif
iResult = FPROFrame_StreamInitialize(s_iDeviceHandle, uiFramSizeInBytes, s_wcStreamerPath, s_wcStreamerPrefix);
if (iResult >= 0)
{
// Successful initialization
// Start the streamer
// Here you pass the number of images to stream (10 in this case).
// Passing 0 will run forever until you stop it.
// The Frame Interval works similarly to the timeout you specify in the
// FPROFrame_GetVideoFrame() call. The API needs to know when to expect frames.
// Typically you just set this to the exposure time + inter-frame delay you have set up.
iResult = FPROFrame_StreamStart(s_iDeviceHandle, 10, 50);
if (iResult >= 0)
{
// The streaming process has started, now you wait until it is done
// Checking for completion is a bit tricky- Checking for a status of FPRO_STREAMER_STOPPED
// is insufficient. There are underlying threads writing the files to the physical disk.
// You must make sure that all of the streamed files have been written before stopping the
// streamer.
iResult = FPROFrame_StreamGetStatistics(s_iDeviceHandle, &streamStats);
while ((iResult >= 0) &&
(!((streamStats.iStatus == FPROSTREAMERSTATUS::FPRO_STREAMER_STOPPED) && (streamStats.uiDiskFramesWritten == uiNumFrames)) ||
{
// Check the stats again- you can check as often as you like
#if defined(_WIN32) || defined(_WINDOWS)
Sleep(1000);
#else
sleep(1);
#endif
iResult = FPROFrame_StreamGetStatistics(s_iDeviceHandle, &streamStats);
printf("\rNum Written= %lld", streamStats.uiDiskFramesWritten);
fflush(stdout);
}
printf("\n");
// check the reasons for leaving the loop
if ((iResult < 0) || (streamStats.iStatus == FPROSTREAMERSTATUS::FPRO_STREAMER_STOPPED_ERROR))
{
printf("Stream Error\n");
}
// stop the stream
FPROFrame_StreamStop(s_iDeviceHandle);
// You may start the stream up again without deinitializing.
// But if you want to change the frame size, path, or prefix,
// you must deinitialize and then re-initialize with the
// new settings.
}
// Deinitialize the stream
FPROFrame_StreamDeinitialize(s_iDeviceHandle);
}
}
}
}
int32_t
SetFrameInfo(int32_t iDeviceHandle)
{
int32_t iResult;
// assume success
iResult = 0;
// Set the exposure time
// The default camera exposure time is 50msecs (for the GSENSE 400)
// The FPROCtrl_SetExposureTime() API expects the exposure time in
// nano seconds. The frameDelay parameter is also in nanoseconds
if (iResult >= 0)
iResult = FPROCtrl_SetExposure(iDeviceHandle, 50000000, 10000000, false);
// Set the Image area
// By default, the camera sets its image area to its maximum values.
// For the GSENSE 4040 model, that is 4096 columns x 4096 rows
// But if you were to change the values this is how you would do it.
if (iResult >= 0)
iResult = FPROFrame_SetImageArea(iDeviceHandle, 0, 0, 4096, 4096);
// return our result
return(iResult);
}
int32_t CheckReferenceMetaData(FILE* pFile)
{
int32_t iResult;
size_t uiBytesRead;
uint32_t uiMetaSize;
uint32_t uiFileSize;
uint8_t uiMetaCheck[6];
iResult = -1;
fseek(pFile, 0L, SEEK_END);
uiFileSize = ftell(pFile);
fseek(pFile, 0L, SEEK_SET);
#if defined(_WIN32) || defined(_WINDOWS)
uiBytesRead = fread_s(uiMetaCheck, 6, 1, 6, pFile);
#else
uiBytesRead= fread(uiMetaCheck, 1, 6, pFile);
#endif
if (uiBytesRead == 6)
{
if ((uiMetaCheck[0] == 'M') && (uiMetaCheck[1] == 'e') && (uiMetaCheck[2] == 't') && (uiMetaCheck[3] == 'a'))
{
uiMetaSize = ((uint32_t)uiMetaCheck[4] & 0xFF) << 8;
uiMetaSize |= (uint32_t)uiMetaCheck[5] & 0xFF;
if (uiMetaSize < uiFileSize)
{
// looks like a valid file with meta data so seek past the meta data
iResult = 0;
fseek(pFile, uiMetaSize, SEEK_SET);
}
}
}
return(iResult);
}
// This is an example function to send HW Merge reference frames from RCD files created by the
// FLI Pilot application. It is provided as an example to help you
// extract the data from the RCD files for the FPRO_REFFRAMES structure required by the API.
int32_t SendHWMergeReferenceFiles(int32_t iHandle, const char* pDSNUFileName, const char* pPRNUFileName, uint32_t uiWidth, uint32_t uiHeight)
{
int32_t iResult;
FILE* pDSNUFile;
FILE* pPRNUFile;
FPRO_REFFRAMES refFrames;
uint32_t uiFrameSize;
size_t uiBytesRead;
iResult = 0;
memset(&refFrames, 0, sizeof(refFrames));
// Open the files and
// Check the meta data and seek past it
pDSNUFile = NULL;
pPRNUFile = NULL;
if (pDSNUFileName)
{
#if defined(_WIN32) || defined(_WINDOWS)
fopen_s(&pDSNUFile, pDSNUFileName, "r+b");
#else
pDSNUFile= fopen(pDSNUFileName, "r+b");
#endif
iResult = CheckReferenceMetaData(pDSNUFile);
}
if ((pPRNUFileName) && (iResult >= 0))
{
#if defined(_WIN32) || defined(_WINDOWS)
fopen_s(&pPRNUFile, pPRNUFileName, "r+b");
#else
pPRNUFile= fopen(pPRNUFileName, "r+b");
#endif
iResult = CheckReferenceMetaData(pPRNUFile);
}
if (iResult >= 0)
{
// Set up the width and height- should be the full image size e.g. 4096 x 4096)
iResult = 0;
refFrames.uiWidth = uiWidth;
refFrames.uiHeight = uiHeight;
if (pDSNUFile)
{
// Get space for the files
refFrames.pAdditiveLowGain = new int16_t[uiWidth * uiHeight];
refFrames.pAdditiveHighGain = new int16_t[uiWidth * uiHeight];
if ((refFrames.pAdditiveLowGain) && (refFrames.pAdditiveHighGain))
{
// Read the DSNU (additive) data
uiFrameSize = uiWidth * uiHeight * sizeof(uint16_t);
#if defined(_WIN32) || defined(_WINDOWS)
uiBytesRead = fread_s(refFrames.pAdditiveLowGain, uiFrameSize, 1, uiFrameSize, pDSNUFile);
#else
uiBytesRead= fread(refFrames.pAdditiveLowGain, 1, uiFrameSize, pDSNUFile);
#endif
if (uiBytesRead != uiFrameSize)
iResult = -1;
if (iResult >= 0)
{
// The high gain data starts immediately after the low gain data
#if defined(_WIN32) || defined(_WINDOWS)
uiBytesRead = fread_s(refFrames.pAdditiveHighGain, uiFrameSize, 1, uiFrameSize, pDSNUFile);
#else
uiBytesRead= fread(refFrames.pAdditiveHighGain, 1, uiFrameSize, pDSNUFile);
#endif
if (uiBytesRead != uiFrameSize)
iResult = -1;
}
}
}
if (pPRNUFile)
{
refFrames.pMultiplicativeLowGain = new uint16_t[uiWidth * uiHeight];
refFrames.pMultiplicativeHighGain = new uint16_t[uiWidth * uiHeight];
if ((refFrames.pMultiplicativeLowGain) && (refFrames.pMultiplicativeHighGain))
{
// Read the PRNU (multiplicative) data
if (iResult >= 0)
{
#if defined(_WIN32) || defined(_WINDOWS)
uiBytesRead = fread_s(refFrames.pMultiplicativeLowGain, uiFrameSize, 1, uiFrameSize, pPRNUFile);
#else
uiBytesRead= fread(refFrames.pMultiplicativeLowGain, 1, uiFrameSize, pPRNUFile);
#endif
if (uiBytesRead != uiFrameSize)
iResult = -1;
}
if (iResult >= 0)
{
// The high gain data starts immediately after the low gain data
#if defined(_WIN32) || defined(_WINDOWS)
uiBytesRead = fread_s(refFrames.pMultiplicativeHighGain, uiFrameSize, 1, uiFrameSize, pPRNUFile);
#else
uiBytesRead= fread(refFrames.pMultiplicativeHighGain, 1, uiFrameSize, pPRNUFile);
#endif
if (uiBytesRead != uiFrameSize)
iResult = -1;
}
}
}
// Finally we can call the API to send the reference frames
if (iResult >= 0)
{
// If NULL is passed down for the reference frame pointers, Identity
// reference frames are sent down (i.e. no correction during merge)
iResult = FPROAlgo_SetHardwareMergeReferenceFrames(iHandle, &refFrames);
}
// give the memory back
if (refFrames.pAdditiveLowGain)
delete[]refFrames.pAdditiveLowGain;
if (refFrames.pAdditiveHighGain)
delete[]refFrames.pAdditiveHighGain;
if (refFrames.pMultiplicativeLowGain)
delete[]refFrames.pMultiplicativeLowGain;
if (refFrames.pMultiplicativeHighGain)
delete[]refFrames.pMultiplicativeHighGain;
if (pDSNUFile)
fclose(pDSNUFile);
if (pPRNUFile)
fclose(pPRNUFile);
}
return(iResult);
}
Finger Lakes Instrumentation Camera API.
LIBFLIPRO_API FPROAlgo_SetHardwareMergeEnables(int32_t iHandle, FPRO_HWMERGEENABLE mergeEnables)
Enable/disable hardware merging options.
LIBFLIPRO_API FPROFrame_StreamDeinitialize(int32_t iHandle)
Deinitializes the Streamer interfaces.
LIBFLIPRO_API FPROAlgo_SetHardwareMergeReferenceFiles(int32_t iHandle, const wchar_t *pDSNUFile, const wchar_t *pPRNUFile)
Sets the reference frames used in PCIE Fibre hardware image merging.
LIBFLIPRO_API FPROCam_Open(FPRODEVICEINFO *pDevInfo, int32_t *pHandle)
Connects to the camera specified by the pDevInfo parameter.
LIBFLIPRO_API FPROCam_Close(int32_t iHandle)
Disconnects from the camera an releases the handle.
LIBFLIPRO_API FPROAlgo_SetHardwareMergeReferenceFrames(int32_t iHandle, FPRO_REFFRAMES *pRefFrames)
Sets the reference frames used in PCIE Fibre hardware image merging.
FPRO_IMAGE_FORMAT
Output Frame Formats for image merging and conversion.
Definition: libflipro.h:1349
@ IFORMAT_RCD
FLI native RCD Frame.
Definition: libflipro.h:1351
@ IFORMAT_TIFF
TIFF Formatted image.
Definition: libflipro.h:1352
LIBFLIPRO_API FPROFrame_StreamInitialize(int32_t iHandle, uint32_t uiFrameSizeBytes, wchar_t *pRootPath, wchar_t *pFilePrefix)
Initializes the Streamer interfaces.
LIBFLIPRO_API FPROFrame_StreamStart(int32_t iHandle, uint32_t uiFrameCount, uint64_t uiFrameIntervalMS)
Start the streaming operation.
LIBFLIPRO_API FPROFrame_StreamStop(int32_t iHandle)
Stop the streaming operation.
LIBFLIPRO_API FPROFrame_ComputeFrameSize(int32_t iHandle)
Computes the size in bytes of the image frame.
LIBFLIPRO_API FPROCtrl_SetExposure(int32_t iHandle, uint64_t uiExposureTime, uint64_t uiFrameDelay, bool bImmediate)
Sets the exposure time of the image sensor.
FPRO_HWMERGEFRAMES
Enables for Hardware Image Merging.
Definition: libflipro.h:1564
@ HWMERGE_FRAME_HIGHONLY
Only the corrected high gain pixels will be sent through to the API. The low gain pixels will be igno...
Definition: libflipro.h:1567
@ HWMERGE_FRAME_BOTH
Normal merge, both low and high gain planes are corrected and merged.
Definition: libflipro.h:1565
@ HWMERGE_FRAME_LOWONLY
Only the corrected low gain pixels will be sent through to the API. The high gain pixels will be igno...
Definition: libflipro.h:1566
LIBFLIPRO_API FPROFrame_SetImageArea(int32_t iHandle, uint32_t uiColOffset, uint32_t uiRowOffset, uint32_t uiWidth, uint32_t uiHeight)
Sets the area of the image sensor to be used to produce image frame data.
@ FPRO_STREAMER_STOPPED
Streaming Stopped. This is the default state. It also enters this state when the requested number of ...
Definition: libflipro.h:1142
@ FPRO_STREAMER_STOPPED_ERROR
If streaming has stopped due to an error, the status will be less than 0. Consult the log file for er...
Definition: libflipro.h:1141
LIBFLIPRO_API FPROCam_GetCameraList(FPRODEVICEINFO *pDeviceInfo, uint32_t *pNumDevices)
FPROCam_GetCameraList.
LIBFLIPRO_API FPROFrame_StreamGetStatistics(int32_t iHandle, FPROSTREAMSTATS *pStats)
Stop the streaming operation.
Definition: libflipro.h:365
Definition: libflipro.h:1184
FPROSTREAMERSTATUS iStatus
The status of the streamer. See FPROSTREAMERSTATUS.
Definition: libflipro.h:1192
uint64_t uiDiskFramesWritten
The total number of frames written to disk.
Definition: libflipro.h:1187
Definition: libflipro.h:1607
FPRO_IMAGE_FORMAT eMergeFormat
The image file format for the merged image. The Actual PCIE card only supports RCD and TIFF....
Definition: libflipro.h:1609
FPRO_HWMERGEFRAMES eMergeFrames
Specifies the frames to merge.
Definition: libflipro.h:1610
bool bMergeEnable
True if merging enabled. This must be true for the other enables to have any effect....
Definition: libflipro.h:1608
Definition: libflipro.h:1321
uint16_t * pMultiplicativeLowGain
Low Gain Multiply Reference Frame.
Definition: libflipro.h:1327
int16_t * pAdditiveHighGain
High Gain Additive Reference Frame.
Definition: libflipro.h:1326
int16_t * pAdditiveLowGain
Low Gain Additive Reference Frame.
Definition: libflipro.h:1325
uint32_t uiWidth
Width of the frames in pixels.
Definition: libflipro.h:1322
uint32_t uiHeight
Height of the frames in pixels.
Definition: libflipro.h:1323
uint16_t * pMultiplicativeHighGain
High Gain Multiply Reference Frame.
Definition: libflipro.h:1328