You can subscribe to this list here.
2004 |
Jan
|
Feb
|
Mar
|
Apr
|
May
(59) |
Jun
(40) |
Jul
(59) |
Aug
(81) |
Sep
(14) |
Oct
(9) |
Nov
(22) |
Dec
(1) |
---|---|---|---|---|---|---|---|---|---|---|---|---|
2005 |
Jan
(25) |
Feb
(3) |
Mar
(27) |
Apr
(14) |
May
(15) |
Jun
(112) |
Jul
(44) |
Aug
(7) |
Sep
(18) |
Oct
(34) |
Nov
(17) |
Dec
(20) |
2006 |
Jan
(12) |
Feb
|
Mar
(1) |
Apr
|
May
|
Jun
(3) |
Jul
(1) |
Aug
|
Sep
|
Oct
|
Nov
(1) |
Dec
(11) |
From: Pierre M. <sid...@us...> - 2005-06-09 17:09:09
|
Update of /cvsroot/robotflow/RobotFlow/Vision/include In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv20382 Modified Files: IntegralColorExtraction.h IntegralEdgesOriExtraction.h IntegralLBPExtraction.h Log Message: Added color boundary detection, rectangle difference features and fixed some issues in order to make this class an Object and a BufferedNode. Index: IntegralLBPExtraction.h =================================================================== RCS file: /cvsroot/robotflow/RobotFlow/Vision/include/IntegralLBPExtraction.h,v retrieving revision 1.1 retrieving revision 1.2 diff -C2 -d -r1.1 -r1.2 *** IntegralLBPExtraction.h 2 Jun 2005 16:49:38 -0000 1.1 --- IntegralLBPExtraction.h 9 Jun 2005 17:08:38 -0000 1.2 *************** *** 42,46 **** int i_numChannels, int i_numHoriIntRect, int i_numVertIntRect, int i_numSamples, int i_predicate, bool i_doInterpolation, ! bool i_useUniform, int i_startAngle, double i_maxValue); // --- 42,47 ---- int i_numChannels, int i_numHoriIntRect, int i_numVertIntRect, int i_numSamples, int i_predicate, bool i_doInterpolation, ! bool i_useUniform, int i_startAngle, double i_maxValue, ! bool i_useRectDiff); // *************** *** 159,162 **** --- 160,169 ---- int m_numValidPattern; + double m_maxFeatValue; + + bool m_useRectDiff; + double *m_tmpMeanFeatures; + double *m_curMeanVal; + // Precalculated table of interpolation points. CvPoint *m_samplePoints; Index: IntegralColorExtraction.h =================================================================== RCS file: /cvsroot/robotflow/RobotFlow/Vision/include/IntegralColorExtraction.h,v retrieving revision 1.1 retrieving revision 1.2 diff -C2 -d -r1.1 -r1.2 *** IntegralColorExtraction.h 2 Jun 2005 16:49:38 -0000 1.1 --- IntegralColorExtraction.h 9 Jun 2005 17:08:38 -0000 1.2 *************** *** 41,45 **** IntegralColorExtraction(int i_width, int i_height, int i_numChannels, int i_numHoriIntRect, int i_numVertIntRect, ! double i_maxValue); // --- 41,46 ---- IntegralColorExtraction(int i_width, int i_height, int i_numChannels, int i_numHoriIntRect, int i_numVertIntRect, ! double i_maxValue, bool i_useRectDiff, bool i_useBoundary, ! double i_boundaryMeanDiffThresh); // *************** *** 74,77 **** --- 75,80 ---- void calculate(int output_id, int count, FD::Buffer &out); + void Preprocess(IplImage *i_srcImg); + void Preprocess(const unsigned char *i_src); *************** *** 119,125 **** --- 122,136 ---- int m_numVertIntRect; int m_numIntRect; + int m_numFeatures; // Maximum pixel channel value double m_maxValue; + bool m_useRectDiff; + double *m_tmpMeanFeatures; + double *m_curMeanVal; + + bool m_useBoundary; + double m_boundaryMeanDiffThresh; + // Integral color descriptor for region of interest FD::RCPtr<FD::Vector<VisualFeatureDesc<double> *> > m_featVect; Index: IntegralEdgesOriExtraction.h =================================================================== RCS file: /cvsroot/robotflow/RobotFlow/Vision/include/IntegralEdgesOriExtraction.h,v retrieving revision 1.1 retrieving revision 1.2 diff -C2 -d -r1.1 -r1.2 *** IntegralEdgesOriExtraction.h 2 Jun 2005 16:49:38 -0000 1.1 --- IntegralEdgesOriExtraction.h 9 Jun 2005 17:08:38 -0000 1.2 *************** *** 40,45 **** // IntegralEdgesOriExtraction(int i_width, int i_height, ! int i_numChannels, int i_numOriBins, double i_edgesStrTresh, ! double i_maxStrengthValue); // --- 40,46 ---- // IntegralEdgesOriExtraction(int i_width, int i_height, ! int i_numChannels, int i_numHoriIntRect, int i_numVertIntRect, ! int i_numOriBins, double i_edgesStrTresh, ! double i_maxStrengthValue, bool i_useRectDiff); // *************** *** 125,128 **** --- 126,134 ---- // Maximum strength channel value double m_maxStrengthValue; + double m_maxFeatValue; + + bool m_useRectDiff; + double *m_tmpMeanFeatures; + double *m_curMeanVal; // Integral color descriptor for region of interest |
From: Dominic L. <ma...@us...> - 2005-06-08 20:13:31
|
Update of /cvsroot/robotflow/RobotFlow/MARIE/src In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv8874 Modified Files: newMarieDataRaw.cpp Log Message: added prettyPrint Index: newMarieDataRaw.cpp =================================================================== RCS file: /cvsroot/robotflow/RobotFlow/MARIE/src/newMarieDataRaw.cpp,v retrieving revision 1.3 retrieving revision 1.4 diff -C2 -d -r1.3 -r1.4 *** newMarieDataRaw.cpp 29 Mar 2005 15:20:43 -0000 1.3 --- newMarieDataRaw.cpp 8 Jun 2005 20:13:14 -0000 1.4 *************** *** 55,58 **** --- 55,63 ---- * @output_type MarieDataRaw * + * @parameter_name PRETTY_PRINT + * @parameter_type bool + * @parameter_description Pretty print variables (remove FD tags) + * @parameter_value false + * END*/ *************** *** 71,74 **** --- 76,82 ---- int m_rawMarieOutID; + //parameters + bool m_prettyPrint; + public: *************** *** 83,86 **** --- 91,97 ---- //outputs m_rawMarieOutID = addOutput("DATA_RAW"); + + //parameters + m_prettyPrint = dereference_cast<bool>(parameters.get("PRETTY_PRINT")); } *************** *** 97,101 **** //printOn on the stream ! tempStr << inputObject; dataRaw->setRawData(tempStr.str()); dataRaw->setCustomID(*customID); --- 108,121 ---- //printOn on the stream ! ! if (!m_prettyPrint) ! { ! tempStr << inputObject; ! } ! else ! { ! inputObject->prettyPrint(tempStr); ! } ! dataRaw->setRawData(tempStr.str()); dataRaw->setCustomID(*customID); |
From: Dominic L. <ma...@us...> - 2005-06-08 14:48:30
|
Update of /cvsroot/robotflow/RobotFlow/demo/SymbolRecog/n-files In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv12279 Modified Files: SymbolTracking.n colorTrain_SNCRZ30.n Added Files: Recog.n Log Message: Stand alone recog Index: colorTrain_SNCRZ30.n =================================================================== RCS file: /cvsroot/robotflow/RobotFlow/demo/SymbolRecog/n-files/colorTrain_SNCRZ30.n,v retrieving revision 1.2 retrieving revision 1.3 diff -C2 -d -r1.2 -r1.3 *** colorTrain_SNCRZ30.n 4 Nov 2004 18:34:43 -0000 1.2 --- colorTrain_SNCRZ30.n 8 Jun 2005 14:48:02 -0000 1.3 *************** *** 6,14 **** <Parameter name="DOWHILE" type="bool" value="" description="No description available"/> </Node> - <NetOutput name="ZOOM_POSITION" node="node_LOOP0_1" terminal="ZOOM_POSITION" object_type="" description="Returns the current zoom position of the Camera"/> <NetOutput name="TILT_POSITION" node="node_LOOP0_1" terminal="TILT_POSITION" object_type="" description="Returns the current tilt position of the Camera"/> <NetOutput name="PAN_POSITION" node="node_LOOP0_1" terminal="PAN_POSITION" object_type="" description="Returns the current pan position of the Camera"/> <NetOutput name="SAVE_FLAG" node="node_LOOP0_1" terminal="SAVE_FLAG" object_type="any" description="No description available"/> <NetOutput name="COMPONENTS_IMAGE" node="node_LOOP0_1" terminal="COMPONENTS_IMAGE" object_type="any" description="No description available"/> <Note x="-576" y="485" visible="1" text="MAIN network is an infinite loop."/> </Network> --- 6,14 ---- <Parameter name="DOWHILE" type="bool" value="" description="No description available"/> </Node> <NetOutput name="TILT_POSITION" node="node_LOOP0_1" terminal="TILT_POSITION" object_type="" description="Returns the current tilt position of the Camera"/> <NetOutput name="PAN_POSITION" node="node_LOOP0_1" terminal="PAN_POSITION" object_type="" description="Returns the current pan position of the Camera"/> <NetOutput name="SAVE_FLAG" node="node_LOOP0_1" terminal="SAVE_FLAG" object_type="any" description="No description available"/> <NetOutput name="COMPONENTS_IMAGE" node="node_LOOP0_1" terminal="COMPONENTS_IMAGE" object_type="any" description="No description available"/> + <NetOutput name="ZOOM_FACTOR" node="node_LOOP0_1" terminal="ZOOM_FACTOR" object_type="any" description="No description available"/> <Note x="-576" y="485" visible="1" text="MAIN network is an infinite loop."/> </Network> *************** *** 94,97 **** --- 94,104 ---- <Node name="node_RGB242RGB15_1" type="RGB242RGB15" x="-343.000000" y="208.000000"/> <Node name="node_NilObject_1" type="NilObject" x="-874.000000" y="270.000000"/> + <Node name="node_ZOOM_FACTOR_1" type="ZOOM_FACTOR" x="-268.000000" y="112.000000"/> + <Node name="node_TextProbe_1" type="TextProbe" x="-135.000000" y="111.000000"> + <Parameter name="BREAK_AT" type="int" value="" description="If set, the probe runs until (count = BREAK_AT)"/> + <Parameter name="SHOW" type="bool" value="true" description="Whether or not to show the the data by default"/> + <Parameter name="SKIP" type="int" value="" description="Count increment for each "Next""/> + <Parameter name="PROBE_NAME" type="string" value="" description="Name (title) of the probe"/> + </Node> <Link from="node_PTZControl_1" output="ABS_PAN" to="node_SNCRZ30_1" input="PAN_ABS_POS"/> <Link from="node_PTZControl_1" output="ABS_TILT" to="node_SNCRZ30_1" input="TILT_ABS_POS"/> *************** *** 102,114 **** <Link from="node_PTZControl_1" output="ABS_ZOOM" to="node_SNCRZ30_1" input="ZOOM_ABS_POS"/> <Link from="node_NilObject_1" output="VALUE" to="node_SNCRZ30_1" input="ZOOM_REL_POS"/> <NetCondition name="CONDITION" node="node_Constant_1" terminal="VALUE"/> - <NetOutput name="ZOOM_POSITION" node="node_SNCRZ30_1" terminal="ZOOM_POSITION" object_type="int" description="Returns the current zoom position of the Camera"/> <NetOutput name="TILT_POSITION" node="node_SNCRZ30_1" terminal="TILT_POSITION" object_type="int" description="Returns the current tilt position of the Camera"/> <NetOutput name="PAN_POSITION" node="node_SNCRZ30_1" terminal="PAN_POSITION" object_type="int" description="Returns the current pan position of the Camera"/> <NetOutput name="SAVE_FLAG" node="node_SUBNET0_1" terminal="SAVE_FLAG" object_type="" description="The object from THEN or ELSE depending on COND"/> <NetOutput name="COMPONENTS_IMAGE" node="node_SUBNET0_1" terminal="COMPONENTS_IMAGE" object_type="" description="No description available"/> <Note x="-927" y="445" visible="1" text="A Graphical User Interface will be displayed with PTZ control"/> <Note x="-527" y="444" visible="1" text="Sony SNC-RZ30 driver"/> <Note x="-83" y="442" visible="1" text="Color training GUI will be displayed."/> </Network> </Document> --- 109,143 ---- <Link from="node_PTZControl_1" output="ABS_ZOOM" to="node_SNCRZ30_1" input="ZOOM_ABS_POS"/> <Link from="node_NilObject_1" output="VALUE" to="node_SNCRZ30_1" input="ZOOM_REL_POS"/> + <Link from="node_SNCRZ30_1" output="ZOOM_POSITION" to="node_ZOOM_FACTOR_1" input="ABS_ZOOM"/> + <Link from="node_ZOOM_FACTOR_1" output="ZOOM_FACTOR" to="node_TextProbe_1" input="INPUT"/> <NetCondition name="CONDITION" node="node_Constant_1" terminal="VALUE"/> <NetOutput name="TILT_POSITION" node="node_SNCRZ30_1" terminal="TILT_POSITION" object_type="int" description="Returns the current tilt position of the Camera"/> <NetOutput name="PAN_POSITION" node="node_SNCRZ30_1" terminal="PAN_POSITION" object_type="int" description="Returns the current pan position of the Camera"/> <NetOutput name="SAVE_FLAG" node="node_SUBNET0_1" terminal="SAVE_FLAG" object_type="" description="The object from THEN or ELSE depending on COND"/> <NetOutput name="COMPONENTS_IMAGE" node="node_SUBNET0_1" terminal="COMPONENTS_IMAGE" object_type="" description="No description available"/> + <NetOutput name="ZOOM_FACTOR" node="node_TextProbe_1" terminal="OUTPUT" object_type="any" description="Pass through"/> <Note x="-927" y="445" visible="1" text="A Graphical User Interface will be displayed with PTZ control"/> <Note x="-527" y="444" visible="1" text="Sony SNC-RZ30 driver"/> <Note x="-83" y="442" visible="1" text="Color training GUI will be displayed."/> </Network> + <Network type="subnet" name="ZOOM_FACTOR"> + <Node name="node_Div_1" type="Div" x="-122.000000" y="157.000000"/> + <Node name="node_Constant_1" type="Constant" x="-234.000000" y="166.000000"> + <Parameter name="VALUE" type="float" value="750" description="The value"/> + </Node> + <Node name="node_Add_1" type="Add" x="-14.000000" y="149.000000"/> + <Node name="node_Constant_2" type="Constant" x="-170.000000" y="97.000000"> + <Parameter name="VALUE" type="float" value="1.0" description="The value"/> + </Node> + <Node name="node_NOP_1" type="NOP" x="-476.000000" y="150.000000"/> + <Node name="node_NOP_2" type="NOP" x="201.000000" y="149.000000"/> + <Link from="node_NOP_1" output="OUTPUT" to="node_Div_1" input="NUM"/> + <Link from="node_Constant_1" output="VALUE" to="node_Div_1" input="DEN"/> + <Link from="node_Constant_2" output="VALUE" to="node_Add_1" input="INPUT1"/> + <Link from="node_Div_1" output="OUTPUT" to="node_Add_1" input="INPUT2"/> + <Link from="node_Add_1" output="OUTPUT" to="node_NOP_2" input="INPUT"/> + <NetInput name="ABS_ZOOM" node="node_NOP_1" terminal="INPUT" object_type="any" description="The input"/> + <NetOutput name="ZOOM_FACTOR" node="node_NOP_2" terminal="OUTPUT" object_type="any" description="The output = The input"/> + <Note x="0" y="0" visible="0" text="Created with FlowDesigner 0.9.0"/> + </Network> </Document> --- NEW FILE: Recog.n --- #!/usr/bin/env batchflow <?xml version="1.0"?> <Document> <Network type="subnet" name="MAIN"> <Node name="node_MAIN_LOOP_1" type="MAIN_LOOP" x="-304.000000" y="-116.000000"> <Parameter name="DOWHILE" type="bool" value="" description="No description available"/> </Node> <Node name="node_LoadFile_1" type="LoadFile" x="-695.000000" y="-97.000000"> <Parameter name="FILENAME" type="string" value="../color_lookup/black_white.data" description="No description available"/> </Node> <Node name="node_LoadFile_2" type="LoadFile" x="-694.000000" y="-123.000000"> <Parameter name="FILENAME" type="string" value="../dict/fox.dict" description="No description available"/> </Node> <Node name="node_LoadFile_3" type="LoadFile" x="-691.000000" y="-152.000000"> <Parameter name="FILENAME" type="string" value="../neural_networks/net_172_7_36.nnet" description="No description available"/> </Node> <Link from="node_LoadFile_1" output="OUTPUT" to="node_MAIN_LOOP_1" input="BLACK_WHITE_LOOKUP"/> <Link from="node_LoadFile_2" output="OUTPUT" to="node_MAIN_LOOP_1" input="DICT"/> <Link from="node_LoadFile_3" output="OUTPUT" to="node_MAIN_LOOP_1" input="NNET"/> <NetOutput name="SENTENCE" node="node_MAIN_LOOP_1" terminal="SENTENCE" object_type="" description="Pass through"/> <Note x="0" y="0" visible="0" text="Created with FlowDesigner 0.9.0"/> </Network> <Network type="iterator" name="MAIN_LOOP"> <Node name="node_READv2_1" type="READv2" x="-51.000000" y="162.000000"> <Parameter name="MIN_SYMBOL_HEIGHT" type="int" value="30" description="No description available"/> <Parameter name="MIN_SYMBOL_WIDTH" type="int" value="15" description="No description available"/> </Node> <Node name="node_SNCRZ30_1" type="SNCRZ30" x="-1451.000000" y="160.000000"> <Parameter name="PAN_SPEED" type="int" value="24" description="Pan speed 0-24"/> <Parameter name="TILT_SPEED" type="int" value="20" description="Tilt speed 0-20"/> <Parameter name="HOST" type="string" value="192.168.1.149" description="The HOST where to connect (camera IP or network name)"/> <Parameter name="CONTINUOUS" type="bool" value="true" description="Continuously grab images "/> <Parameter name="TRANSMIT_SPEED" type="int" value="0 " description="transfer rate FPS [0=FASTEST]"/> <Parameter name="WAIT_REPLY" type="bool" value="false" description="Wait for camera reply on commands"/> <Parameter name="INQUIRY_POSITION" type="bool" value="false" description="Inquiry position of the pan,tilt,zoom from the device"/> </Node> <Node name="node_NilObject_1" type="NilObject" x="-1709.000000" y="122.000000"/> <Node name="node_TextProbe_1" type="TextProbe" x="65.000000" y="161.000000"> <Parameter name="BREAK_AT" type="int" value="-1" description="If set, the probe runs until (count = BREAK_AT)"/> <Parameter name="SHOW" type="bool" value="true" description="Whether or not to show the the data by default"/> <Parameter name="SKIP" type="int" value="" description="Count increment for each "Next""/> <Parameter name="PROBE_NAME" type="string" value="SENTENCE" description="Name (title) of the probe"/> </Node> <Node name="node_Constant_2" type="Constant" x="-558.000000" y="140.000000"> <Parameter name="VALUE" type="bool" value="true" description="The value"/> </Node> <Node name="node_Constant_3" type="Constant" x="-810.000000" y="438.000000"> <Parameter name="VALUE" type="bool" value="true" description="The value"/> </Node> <Node name="node_ImageProbeSDL_1" type="ImageProbeSDL" x="-373.000000" y="210.000000"> <Parameter name="HEIGHT" type="int" value="240" description="The height of the image"/> <Parameter name="WIDTH" type="int" value="320" description="The width of the image"/> </Node> <Node name="node_Binarize_1" type="Binarize" x="-531.000000" y="209.000000"/> <Node name="node_RGB242RGB15_1" type="RGB242RGB15" x="-1200.000000" y="182.000000"/> <Node name="node_StatIntensityAnalyser_1" type="StatIntensityAnalyser" x="-1035.000000" y="182.000000"> <Parameter name="FRACTION_ANALYSED" type="float" value="0.1" description="Fraction of pixels to analyse (on a 0-1 scale)"/> </Node> <Node name="node_Add_1" type="Add" x="-749.000000" y="169.000000"/> <Node name="node_Div_1" type="Div" x="-927.000000" y="271.000000"/> <Node name="node_Constant_1" type="Constant" x="-1108.000000" y="386.000000"> <Parameter name="VALUE" type="float" value="2" description="The value"/> </Node> <Link from="node_NilObject_1" output="VALUE" to="node_SNCRZ30_1" input="PAN_ABS_POS"/> <Link from="node_NilObject_1" output="VALUE" to="node_SNCRZ30_1" input="TILT_ABS_POS"/> <Link from="node_NilObject_1" output="VALUE" to="node_SNCRZ30_1" input="PAN_REL_POS"/> <Link from="node_NilObject_1" output="VALUE" to="node_SNCRZ30_1" input="TILT_REL_POS"/> <Link from="node_NilObject_1" output="VALUE" to="node_SNCRZ30_1" input="ZOOM_ABS_POS"/> <Link from="node_NilObject_1" output="VALUE" to="node_SNCRZ30_1" input="ZOOM_REL_POS"/> <Link from="node_READv2_1" output="SENTENCE" to="node_TextProbe_1" input="INPUT"/> <Link from="node_Constant_2" output="VALUE" to="node_READv2_1" input="SIGN_TRACKING_ACTIVATED"/> <Link from="node_ImageProbeSDL_1" output="OUTPUT" to="node_READv2_1" input="RGB15_BINARIZED"/> <Link from="node_Binarize_1" output="IMAGE_OUT" to="node_ImageProbeSDL_1" input="INPUT"/> <Link from="node_SNCRZ30_1" output="IMAGE" to="node_Binarize_1" input="IMAGE_IN">-1306 182.5 -1019 300 -766 301 -644.5 216.5 </Link> <Link from="node_SNCRZ30_1" output="IMAGE" to="node_RGB242RGB15_1" input="RGB24_IMAGE"/> <Link from="node_RGB242RGB15_1" output="RGB15_IMAGE" to="node_StatIntensityAnalyser_1" input="IMAGE_IN"/> <Link from="node_StatIntensityAnalyser_1" output="MAX_INTENSITY" to="node_Add_1" input="INPUT1"/> <Link from="node_StatIntensityAnalyser_1" output="MIN_INTENSITY" to="node_Add_1" input="INPUT2"/> <Link from="node_Add_1" output="OUTPUT" to="node_Div_1" input="NUM"/> <Link from="node_Constant_1" output="VALUE" to="node_Div_1" input="DEN"/> <Link from="node_Div_1" output="OUTPUT" to="node_Binarize_1" input="THRESHOLD"/> <NetOutput name="SENTENCE" node="node_TextProbe_1" terminal="OUTPUT" object_type="any" description="Pass through"/> <NetInput name="NNET" node="node_READv2_1" terminal="NNET" object_type="any" description="No description available"/> <NetInput name="DICT" node="node_READv2_1" terminal="DICT" object_type="any" description="No description available"/> <NetInput name="BLACK_WHITE_LOOKUP" node="node_READv2_1" terminal="BLACK_WHITE_LOOKUP" object_type="any" description="No description available"/> <NetCondition name="CONDITION" node="node_Constant_3" terminal="VALUE"/> <Note x="0" y="0" visible="0" text="Created with FlowDesigner 0.9.0"/> </Network> </Document> Index: SymbolTracking.n =================================================================== RCS file: /cvsroot/robotflow/RobotFlow/demo/SymbolRecog/n-files/SymbolTracking.n,v retrieving revision 1.9 retrieving revision 1.10 diff -C2 -d -r1.9 -r1.10 *** SymbolTracking.n 2 Jan 2005 14:44:01 -0000 1.9 --- SymbolTracking.n 8 Jun 2005 14:48:02 -0000 1.10 *************** *** 28,33 **** <NetOutput name="SENTENCE" node="node_MAIN_LOOP_1" terminal="SENTENCE" object_type="" description="Pass through"/> <NetOutput name="ORIGINAL_TEXT" node="node_MAIN_LOOP_1" terminal="ORIGINAL_TEXT" object_type="" description="Pass through"/> - <NetOutput name="REL_PAN" node="node_MAIN_LOOP_1" terminal="REL_PAN" object_type="" description="The output object = input object"/> - <NetOutput name="REL_TILT" node="node_MAIN_LOOP_1" terminal="REL_TILT" object_type="" description="The output object = input object"/> <NetOutput name="ABS_ZOOM" node="node_MAIN_LOOP_1" terminal="ABS_ZOOM" object_type="" description="The output object = input object"/> <NetOutput name="READING_FLAG" node="node_MAIN_LOOP_1" terminal="READING_FLAG" object_type="any" description="No description available"/> --- 28,31 ---- *************** *** 50,62 **** <Node name="node_RGB242RGB15_1" type="RGB242RGB15" x="-553.000000" y="293.000000"/> <Node name="node_NilObject_1" type="NilObject" x="-1193.000000" y="146.000000"/> ! <Node name="node_Feedback_1" type="Feedback" x="260.000000" y="246.000000"> ! <Parameter name="DELAY" type="int" value="1" description="Number of iteration for the delay"/> ! <Parameter name="BEFORE_LIMIT" type="int" value="0" description="When count - DELAY is smaller or equal to BEFORE_LIMIT, the input is pulled from BEFORE at (DELAY - count + BEFORE_LIMIT)"/> ! </Node> ! <Node name="node_Feedback_2" type="Feedback" x="257.000000" y="175.000000"> ! <Parameter name="DELAY" type="int" value="1" description="Number of iteration for the delay"/> ! <Parameter name="BEFORE_LIMIT" type="int" value="0" description="When count - DELAY is smaller or equal to BEFORE_LIMIT, the input is pulled from BEFORE at (DELAY - count + BEFORE_LIMIT)"/> ! </Node> ! <Node name="node_Feedback_3" type="Feedback" x="260.000000" y="304.000000"> <Parameter name="DELAY" type="int" value="1" description="Number of iteration for the delay"/> <Parameter name="BEFORE_LIMIT" type="int" value="0" description="When count - DELAY is smaller or equal to BEFORE_LIMIT, the input is pulled from BEFORE at (DELAY - count + BEFORE_LIMIT)"/> --- 48,52 ---- <Node name="node_RGB242RGB15_1" type="RGB242RGB15" x="-553.000000" y="293.000000"/> <Node name="node_NilObject_1" type="NilObject" x="-1193.000000" y="146.000000"/> ! <Node name="node_Feedback_3" type="Feedback" x="628.000000" y="307.000000"> <Parameter name="DELAY" type="int" value="1" description="Number of iteration for the delay"/> <Parameter name="BEFORE_LIMIT" type="int" value="0" description="When count - DELAY is smaller or equal to BEFORE_LIMIT, the input is pulled from BEFORE at (DELAY - count + BEFORE_LIMIT)"/> *************** *** 75,141 **** <Parameter name="PROBE_NAME" type="string" value="ORIGINAL_TEXT" description="Name (title) of the probe"/> </Node> - <Node name="node_NOP_1" type="NOP" x="103.000000" y="169.000000"/> - <Node name="node_NOP_2" type="NOP" x="102.000000" y="239.000000"/> - <Node name="node_NOP_3" type="NOP" x="101.000000" y="295.000000"/> <Node name="node_ImageProbeSDL_1" type="ImageProbeSDL" x="-664.000000" y="208.000000"> <Parameter name="HEIGHT" type="int" value="240" description="The height of the image"/> <Parameter name="WIDTH" type="int" value="320" description="The width of the image"/> </Node> ! <Node name="node_SKIP_N_1" type="SKIP_N" x="7.000000" y="170.000000"> ! <Parameter name="SKIP_N" type="int" value="10" description="The value"/> ! </Node> ! <Node name="node_SKIP_N_2" type="SKIP_N" x="10.000000" y="238.000000"> ! <Parameter name="SKIP_N" type="int" value="10" description="The value"/> ! </Node> ! <Node name="node_SKIP_N_3" type="SKIP_N" x="9.000000" y="295.000000"> ! <Parameter name="SKIP_N" type="int" value="10" description="The value"/> ! </Node> ! <Node name="node_ComponentsViewer_1" type="ComponentsViewer" x="-75.000000" y="381.000000"/> ! <Node name="node_NOP_4" type="NOP" x="-572.000000" y="372.000000"/> ! <Node name="node_Components_1" type="Components" x="-351.000000" y="396.000000"> ! <Parameter name="XGAP" type="int" value="3" description="max X distance between two pixels of the same color blob."/> ! <Parameter name="YGAP" type="int" value="3" description="max Y distance between two pixels of the same color blob."/> ! <Parameter name="NUM_COLOR" type="int" value="5" description="Number of color to extract."/> ! <Parameter name="MIN_AREA" type="int" value="16" description="minimum area of color blocs"/> </Node> ! <Node name="node_ImageProbe_1" type="ImageProbe" x="257.000000" y="387.000000"> ! <Parameter name="HEIGHT" type="int" value="240" description="The height of the image"/> ! <Parameter name="WIDTH" type="int" value="320" description="The width of the image"/> ! <Parameter name="BREAK_AT" type="int" value="" description="No description available"/> ! <Parameter name="SHOW" type="bool" value="true" description="No description available"/> ! <Parameter name="SKIP" type="int" value="" description="No description available"/> </Node> - <Node name="node_NilObject_3" type="NilObject" x="41.000000" y="413.000000"/> - <Link from="node_SNCRZ30_1" output="ZOOM_POSITION" to="node_READ_1" input="CURRENT_ABS_ZOOM"/> <Link from="node_NilObject_1" output="VALUE" to="node_SNCRZ30_1" input="TILT_ABS_POS"/> <Link from="node_NilObject_1" output="VALUE" to="node_SNCRZ30_1" input="PAN_ABS_POS"/> - <Link from="node_NilObject_2" output="VALUE" to="node_Feedback_1" input="BEFORE">-78.5 312 -5.80059e-270 294 167 253.5 </Link> <Link from="node_NilObject_2" output="VALUE" to="node_Feedback_3" input="BEFORE"/> <Link from="node_READ_1" output="SENTENCE" to="node_TextProbe_1" input="INPUT"/> <Link from="node_READ_1" output="ORIGINAL_TEXT" to="node_TextProbe_2" input="INPUT"/> - <Link from="node_Feedback_1" output="DELAY" to="node_SNCRZ30_1" input="TILT_REL_POS">355.5 253.5 447 254 447 503 -1106 504 -1106 191 -1044.5 192.5 </Link> <Link from="node_RGB242RGB15_1" output="RGB15_IMAGE" to="node_READ_1" input="RGB15_IMAGE"/> <Link from="node_ImageProbeSDL_1" output="OUTPUT" to="node_RGB242RGB15_1" input="RGB24_IMAGE"/> <Link from="node_SNCRZ30_1" output="IMAGE" to="node_ImageProbeSDL_1" input="INPUT"/> - <Link from="node_Feedback_3" output="DELAY" to="node_SNCRZ30_1" input="ZOOM_ABS_POS">355.5 311.5 396 311 396 431 -1067 431 -1068 209 -1044.5 207.5 </Link> <Link from="node_NilObject_1" output="VALUE" to="node_SNCRZ30_1" input="ZOOM_REL_POS"/> ! <Link from="node_Feedback_2" output="DELAY" to="node_SNCRZ30_1" input="PAN_REL_POS">352.5 182.5 531 182 531 576 -1176 577 -1175 175 -1044.5 177.5 </Link> ! <Link from="node_NOP_1" output="OUTPUT" to="node_Feedback_2" input="INPUT"/> ! <Link from="node_NOP_2" output="OUTPUT" to="node_Feedback_1" input="INPUT"/> ! <Link from="node_SKIP_N_2" output="OUTPUT" to="node_NOP_2" input="INPUT"/> ! <Link from="node_NilObject_2" output="VALUE" to="node_Feedback_2" input="BEFORE"/> ! <Link from="node_NOP_3" output="OUTPUT" to="node_Feedback_3" input="INPUT"/> ! <Link from="node_SKIP_N_3" output="OUTPUT" to="node_NOP_3" input="INPUT"/> ! <Link from="node_READ_1" output="REL_PAN_COMMAND" to="node_SKIP_N_1" input="INPUT"/> ! <Link from="node_READ_1" output="REL_TILT_COMMAND" to="node_SKIP_N_2" input="INPUT"/> ! <Link from="node_READ_1" output="ABS_ZOOM_COMMAND" to="node_SKIP_N_3" input="INPUT"/> ! <Link from="node_NOP_4" output="OUTPUT" to="node_READ_1" input="COLOR_LOOKUP"/> ! <Link from="node_Components_1" output="BLOBS" to="node_ComponentsViewer_1" input="DATA"/> ! <Link from="node_NOP_4" output="OUTPUT" to="node_Components_1" input="LOOKUP"/> ! <Link from="node_RGB242RGB15_1" output="RGB15_IMAGE" to="node_Components_1" input="IMAGE"/> ! <Link from="node_NOP_4" output="OUTPUT" to="node_ComponentsViewer_1" input="LOOKUP"/> ! <Link from="node_ComponentsViewer_1" output="IMAGE" to="node_ImageProbe_1" input="INPUT"/> ! <Link from="node_NilObject_3" output="VALUE" to="node_ImageProbe_1" input="SAVE_FILENAME"/> ! <Link from="node_SKIP_N_1" output="OUTPUT" to="node_NOP_1" input="INPUT"/> <NetCondition name="CONDITION" node="node_Constant_1" terminal="VALUE"/> <NetInput name="BLACK_WHITE_LOOKUP" node="node_READ_1" terminal="BLACK_WHITE_LOOKUP" object_type="any" description="No description available"/> --- 65,110 ---- <Parameter name="PROBE_NAME" type="string" value="ORIGINAL_TEXT" description="Name (title) of the probe"/> </Node> <Node name="node_ImageProbeSDL_1" type="ImageProbeSDL" x="-664.000000" y="208.000000"> <Parameter name="HEIGHT" type="int" value="240" description="The height of the image"/> <Parameter name="WIDTH" type="int" value="320" description="The width of the image"/> </Node> ! <Node name="node_SNCRZ30RS232_1" type="SNCRZ30RS232" x="430.000000" y="202.000000"> ! <Parameter name="PAN_SPEED" type="int" value="24" description="Pan speed 0-24"/> ! <Parameter name="TILT_SPEED" type="int" value="20" description="Tilt speed 0-20"/> ! <Parameter name="SERIAL_PORT" type="string" value="/dev/ttyUSB0" description="No Description Available"/> </Node> ! <Node name="node_NilObject_4" type="NilObject" x="184.000000" y="156.000000"/> ! <Node name="node_TEST_COMMAND_1" type="TEST_COMMAND" x="144.000000" y="218.000000"/> ! <Node name="node_TEST_COMMAND_2" type="TEST_COMMAND" x="144.000000" y="185.000000"/> ! <Node name="node_TEST_COMMAND_3" type="TEST_COMMAND" x="146.000000" y="250.000000"/> ! <Node name="node_TextProbe_3" type="TextProbe" x="17.000000" y="157.000000"> ! <Parameter name="BREAK_AT" type="int" value="-1" description="If set, the probe runs until (count = BREAK_AT)"/> ! <Parameter name="SHOW" type="bool" value="true" description="Whether or not to show the the data by default"/> ! <Parameter name="SKIP" type="int" value="" description="Count increment for each "Next""/> ! <Parameter name="PROBE_NAME" type="string" value="REL_PAN" description="Name (title) of the probe"/> </Node> <Link from="node_NilObject_1" output="VALUE" to="node_SNCRZ30_1" input="TILT_ABS_POS"/> <Link from="node_NilObject_1" output="VALUE" to="node_SNCRZ30_1" input="PAN_ABS_POS"/> <Link from="node_NilObject_2" output="VALUE" to="node_Feedback_3" input="BEFORE"/> <Link from="node_READ_1" output="SENTENCE" to="node_TextProbe_1" input="INPUT"/> <Link from="node_READ_1" output="ORIGINAL_TEXT" to="node_TextProbe_2" input="INPUT"/> <Link from="node_RGB242RGB15_1" output="RGB15_IMAGE" to="node_READ_1" input="RGB15_IMAGE"/> <Link from="node_ImageProbeSDL_1" output="OUTPUT" to="node_RGB242RGB15_1" input="RGB24_IMAGE"/> <Link from="node_SNCRZ30_1" output="IMAGE" to="node_ImageProbeSDL_1" input="INPUT"/> <Link from="node_NilObject_1" output="VALUE" to="node_SNCRZ30_1" input="ZOOM_REL_POS"/> ! <Link from="node_NilObject_1" output="VALUE" to="node_SNCRZ30_1" input="PAN_REL_POS"/> ! <Link from="node_NilObject_1" output="VALUE" to="node_SNCRZ30_1" input="TILT_REL_POS"/> ! <Link from="node_NilObject_1" output="VALUE" to="node_SNCRZ30_1" input="ZOOM_ABS_POS"/> ! <Link from="node_NilObject_4" output="VALUE" to="node_SNCRZ30RS232_1" input="PAN_ABS_POS"/> ! <Link from="node_NilObject_4" output="VALUE" to="node_SNCRZ30RS232_1" input="TILT_ABS_POS"/> ! <Link from="node_SNCRZ30RS232_1" output="ZOOM_POSITION" to="node_Feedback_3" input="INPUT"/> ! <Link from="node_Feedback_3" output="DELAY" to="node_READ_1" input="CURRENT_ABS_ZOOM"/> ! <Link from="node_TEST_COMMAND_1" output="OUTPUT" to="node_SNCRZ30RS232_1" input="TILT_REL_POS"/> ! <Link from="node_READ_1" output="REL_TILT_COMMAND" to="node_TEST_COMMAND_1" input="INPUT"/> ! <Link from="node_TEST_COMMAND_2" output="OUTPUT" to="node_SNCRZ30RS232_1" input="PAN_REL_POS"/> ! <Link from="node_TEST_COMMAND_3" output="OUTPUT" to="node_SNCRZ30RS232_1" input="ZOOM"/> ! <Link from="node_READ_1" output="ABS_ZOOM_COMMAND" to="node_TEST_COMMAND_3" input="INPUT"/> ! <Link from="node_TextProbe_3" output="OUTPUT" to="node_TEST_COMMAND_2" input="INPUT"/> ! <Link from="node_READ_1" output="REL_PAN_COMMAND" to="node_TextProbe_3" input="INPUT"/> <NetCondition name="CONDITION" node="node_Constant_1" terminal="VALUE"/> <NetInput name="BLACK_WHITE_LOOKUP" node="node_READ_1" terminal="BLACK_WHITE_LOOKUP" object_type="any" description="No description available"/> *************** *** 145,159 **** <NetOutput name="SENTENCE" node="node_TextProbe_1" terminal="OUTPUT" object_type="any" description="Pass through"/> <NetOutput name="ORIGINAL_TEXT" node="node_TextProbe_2" terminal="OUTPUT" object_type="any" description="Pass through"/> - <NetOutput name="REL_PAN" node="node_Feedback_2" terminal="OUTPUT" object_type="any" description="The output object = input object"/> - <NetOutput name="REL_TILT" node="node_Feedback_1" terminal="OUTPUT" object_type="any" description="The output object = input object"/> <NetOutput name="ABS_ZOOM" node="node_Feedback_3" terminal="OUTPUT" object_type="any" description="The output object = input object"/> <NetOutput name="READING_FLAG" node="node_READ_1" terminal="READING_FLAG" object_type="any" description="No description available"/> ! <NetInput name="COLOR_LOOKUP" node="node_NOP_4" terminal="INPUT" object_type="any" description="The input"/> ! <NetOutput name="COMPONENTS" node="node_ImageProbe_1" terminal="OUTPUT" object_type="any" description="No description available"/> <Note x="0" y="0" visible="0" text="Created with FlowDesigner 0.8.1"/> <Note x="-877" y="393" visible="1" text="You must have a Sony SNC-RZ30 Network Camera for this network to work. You can replace this by having a V4L2 capture board and a PTZ camera."/> <Note x="-223" y="97" visible="1" text="The READ network will do all the processing to extract text from images at each iteration."/> - <Note x="-325" y="593" visible="1" text="The Components, ComponentsViewer and ImageProbe are useful only to display color components for the user. They can be removed without changing the outcome of the algorithm."/> - <Note x="439" y="136" visible="1" text="SKIP_N blocks are useful because the SNCRZ30 camera is too slow and we have to limit the number of PTZ commands else it will overflow. We give a commande every 15 iterations."/> </Network> <Network type="subnet" name="SKIP_N"> --- 114,125 ---- <NetOutput name="SENTENCE" node="node_TextProbe_1" terminal="OUTPUT" object_type="any" description="Pass through"/> <NetOutput name="ORIGINAL_TEXT" node="node_TextProbe_2" terminal="OUTPUT" object_type="any" description="Pass through"/> <NetOutput name="ABS_ZOOM" node="node_Feedback_3" terminal="OUTPUT" object_type="any" description="The output object = input object"/> <NetOutput name="READING_FLAG" node="node_READ_1" terminal="READING_FLAG" object_type="any" description="No description available"/> ! <NetOutput name="TILT_POSITION" node="node_SNCRZ30RS232_1" terminal="TILT_POSITION" object_type="int" description="Returns the current tilt position of the Camera"/> ! <NetOutput name="PAN_POSITION" node="node_SNCRZ30RS232_1" terminal="PAN_POSITION" object_type="int" description="Returns the current pan position of the Camera"/> ! <NetInput name="COLOR_LOOKUP" node="node_READ_1" terminal="COLOR_LOOKUP" object_type="any" description="No description available"/> <Note x="0" y="0" visible="0" text="Created with FlowDesigner 0.8.1"/> <Note x="-877" y="393" visible="1" text="You must have a Sony SNC-RZ30 Network Camera for this network to work. You can replace this by having a V4L2 capture board and a PTZ camera."/> <Note x="-223" y="97" visible="1" text="The READ network will do all the processing to extract text from images at each iteration."/> </Network> <Network type="subnet" name="SKIP_N"> *************** *** 181,184 **** --- 147,166 ---- <Note x="0" y="0" visible="0" text="Created with FlowDesigner 0.8.1"/> </Network> + <Network type="subnet" name="TEST_COMMAND"> + <Node name="node_IF_1" type="IF" x="-312.000000" y="-42.000000"> + <Parameter name="PULL_ANYWAY" type="bool" value="" description="If true, the IF statement pulls also on the branch not taken"/> + </Node> + <Node name="node_isNil_1" type="isNil" x="-460.000000" y="-56.000000"/> + <Node name="node_NOP_1" type="NOP" x="-571.000000" y="-41.000000"/> + <Node name="node_ToInt_1" type="ToInt" x="-459.000000" y="-19.000000"/> + <Link from="node_isNil_1" output="OUTPUT" to="node_IF_1" input="COND"/> + <Link from="node_NOP_1" output="OUTPUT" to="node_IF_1" input="THEN"/> + <Link from="node_ToInt_1" output="OUTPUT" to="node_IF_1" input="ELSE"/> + <Link from="node_NOP_1" output="OUTPUT" to="node_ToInt_1" input="INPUT"/> + <Link from="node_NOP_1" output="OUTPUT" to="node_isNil_1" input="INPUT"/> + <NetInput name="INPUT" node="node_NOP_1" terminal="INPUT" object_type="any" description="The input"/> + <NetOutput name="OUTPUT" node="node_IF_1" terminal="OUTPUT" object_type="any" description="The object from THEN or ELSE depending on COND"/> + <Note x="0" y="0" visible="0" text="Created with FlowDesigner 0.9.0"/> + </Network> <Parameter name="BLACK_WHITE_LOOKUP_FILE" type="string" value="../color_lookup/black_white.data"/> <Parameter name="NNET_FILE" type="string" value="../neural_networks/net_172_7_36.nnet"/> |
From: Dominic L. <ma...@us...> - 2005-06-08 14:32:03
|
Update of /cvsroot/robotflow/RobotFlow/Vision/n-files In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv2989 Modified Files: READv2.n Log Message: removed unused subnets Index: READv2.n =================================================================== RCS file: /cvsroot/robotflow/RobotFlow/Vision/n-files/READv2.n,v retrieving revision 1.3 retrieving revision 1.4 diff -C2 -d -r1.3 -r1.4 *** READv2.n 8 Jun 2005 14:28:35 -0000 1.3 --- READv2.n 8 Jun 2005 14:31:54 -0000 1.4 *************** *** 15,31 **** <Note x="-3418" y="-384" visible="1" text="The MAIN network will load configuration from the files containing : - black and white color lookup - the neural network to use for recog - The dictionary to use * You can change this configuration by double clicking on the LoadFile blocks"/> </Network> - <Network type="subnet" name="COMPONENTS_EXTRACT"> - <Node name="node_Components_1" type="Components" x="467.000000" y="113.000000"> - <Parameter name="XGAP" type="int" value="2" description=""/> - <Parameter name="YGAP" type="int" value="2" description=""/> - <Parameter name="NUM_COLOR" type="int" value="4" description=""/> - <Parameter name="MIN_AREA" type="int" value="16" description=""/> - </Node> - <Node name="node_NOP_1" type="NOP" x="699.000000" y="113.000000"/> - <Link from="node_Components_1" output="BLOBS" to="node_NOP_1" input="INPUT"/> - <NetOutput name="COMPONENTS" node="node_NOP_1" terminal="OUTPUT" object_type="any" description="The output = The input"/> - <NetInput name="IMAGE" node="node_Components_1" terminal="IMAGE" object_type="Image" description="The Image to extract colors from."/> - <NetInput name="LOOKUP" node="node_Components_1" terminal="LOOKUP" object_type="ColorLookup" description="The color lookup to use"/> - </Network> <Network type="subnet" name="MAIN_PROCESS"> <Node name="node_PROCESS_SEGMENTS_1" type="PROCESS_SEGMENTS" x="184.000000" y="321.000000"> --- 15,18 ---- *************** *** 58,260 **** <Note x="291" y="47" visible="1" text="SKIP_N is useful because the camera is too slow, if we don't use SKIP_N, the camera will be overloaded with commands, which creates a terrific lag."/> </Network> - <Network type="subnet" name="PAN_TILT_CTRL"> - <Node name="node_GenericPID_1" type="GenericPID" x="-605.000000" y="-237.000000"> - <Parameter name="P_GAIN" type="float" value="0.5" description="Proportionnal gain."/> - <Parameter name="I_GAIN" type="float" value="0.0" description="Integral gain."/> - <Parameter name="D_GAIN" type="float" value="0.0" description="Derivative gain."/> - <Parameter name="I_MAX" type="float" value="100" description="The maximum Integral value."/> - </Node> - <Node name="node_Constant_1" type="Constant" x="-800.000000" y="-229.000000"> - <Parameter name="VALUE" type="bool" value="false" description="The value"/> - </Node> - <Node name="node_GenericPID_2" type="GenericPID" x="-608.000000" y="-154.000000"> - <Parameter name="P_GAIN" type="float" value="0.5" description="Proportionnal gain."/> - <Parameter name="I_GAIN" type="float" value="0.0" description="Integral gain."/> - <Parameter name="D_GAIN" type="float" value="0.0" description="Derivative gain."/> - <Parameter name="I_MAX" type="float" value="100" description="The maximum Integral value."/> - </Node> - <Node name="node_Constant_2" type="Constant" x="-796.000000" y="-146.000000"> - <Parameter name="VALUE" type="bool" value="false" description="The value"/> - </Node> - <Node name="node_NOP_1" type="NOP" x="-1393.000000" y="-271.000000"/> - <Node name="node_NOP_2" type="NOP" x="-1393.000000" y="-183.000000"/> - <Node name="node_NOP_3" type="NOP" x="-1393.000000" y="-153.000000"/> - <Node name="node_IF_1" type="IF" x="-294.000000" y="-253.000000"> - <Parameter name="PULL_ANYWAY" type="bool" value="" description="If true, the IF statement pulls also on the branch not taken"/> - </Node> - <Node name="node_isNil_1" type="isNil" x="-401.000000" y="-270.000000"/> - <Node name="node_NilObject_1" type="NilObject" x="-507.000000" y="-254.000000"/> - <Node name="node_IF_2" type="IF" x="-296.000000" y="-168.000000"> - <Parameter name="PULL_ANYWAY" type="bool" value="" description="If true, the IF statement pulls also on the branch not taken"/> - </Node> - <Node name="node_isNil_2" type="isNil" x="-406.000000" y="-184.000000"/> - <Node name="node_NilObject_2" type="NilObject" x="-485.000000" y="-168.000000"/> - <Node name="node_TEST_SCALING_1" type="AUTO_SCALE" x="-921.000000" y="-245.000000"/> - <Node name="node_TEST_SCALING_2" type="AUTO_SCALE" x="-916.000000" y="-161.000000"/> - <Node name="node_NOP_4" type="NOP" x="-1392.000000" y="-85.000000"/> - <Node name="node_NOP_5" type="NOP" x="-1394.000000" y="4.000000"/> - <Node name="node_TEST_ZOOMING_1" type="AUTO_ZOOM" x="-936.000000" y="-61.000000"/> - <Node name="node_Add_1" type="Add" x="-609.000000" y="-54.000000"/> - <Node name="node_IF_3" type="IF" x="-294.000000" y="-70.000000"> - <Parameter name="PULL_ANYWAY" type="bool" value="" description="If true, the IF statement pulls also on the branch not taken"/> - </Node> - <Node name="node_isNil_3" type="isNil" x="-418.000000" y="-84.000000"/> - <Node name="node_Constant_3" type="Constant" x="-497.000000" y="-69.000000"> - <Parameter name="VALUE" type="int" value="0" description="The value"/> - </Node> - <Link from="node_Constant_1" output="VALUE" to="node_GenericPID_1" input="RESET"/> - <Link from="node_Constant_2" output="VALUE" to="node_GenericPID_2" input="RESET"/> - <Link from="node_isNil_1" output="OUTPUT" to="node_IF_1" input="COND"/> - <Link from="node_NOP_1" output="OUTPUT" to="node_isNil_1" input="INPUT"/> - <Link from="node_NilObject_1" output="VALUE" to="node_IF_1" input="THEN"/> - <Link from="node_isNil_2" output="OUTPUT" to="node_IF_2" input="COND"/> - <Link from="node_NilObject_2" output="VALUE" to="node_IF_2" input="THEN"/> - <Link from="node_NOP_2" output="OUTPUT" to="node_isNil_2" input="INPUT"/> - <Link from="node_NOP_1" output="OUTPUT" to="node_TEST_SCALING_1" input="DELTA"/> - <Link from="node_NOP_3" output="OUTPUT" to="node_TEST_SCALING_1" input="ZOOM_FACTOR"/> - <Link from="node_NOP_2" output="OUTPUT" to="node_TEST_SCALING_2" input="DELTA"/> - <Link from="node_NOP_3" output="OUTPUT" to="node_TEST_SCALING_2" input="ZOOM_FACTOR"/> - <Link from="node_TEST_SCALING_2" output="SCALE" to="node_GenericPID_2" input="EPSILON"/> - <Link from="node_TEST_SCALING_1" output="SCALE" to="node_GenericPID_1" input="EPSILON"/> - <Link from="node_GenericPID_2" output="OUTPUT" to="node_IF_2" input="ELSE"/> - <Link from="node_GenericPID_1" output="OUTPUT" to="node_IF_1" input="ELSE"/> - <Link from="node_NOP_1" output="OUTPUT" to="node_TEST_ZOOMING_1" input="DELTA_X"/> - <Link from="node_NOP_2" output="OUTPUT" to="node_TEST_ZOOMING_1" input="DELTA_Y"/> - <Link from="node_NOP_4" output="OUTPUT" to="node_TEST_ZOOMING_1" input="BOUNDARY"/> - <Link from="node_TEST_ZOOMING_1" output="REL_ZOOM" to="node_Add_1" input="INPUT1"/> - <Link from="node_NOP_5" output="OUTPUT" to="node_Add_1" input="INPUT2"/> - <Link from="node_isNil_3" output="OUTPUT" to="node_IF_3" input="COND"/> - <Link from="node_NOP_4" output="OUTPUT" to="node_isNil_3" input="INPUT"/> - <Link from="node_Add_1" output="OUTPUT" to="node_IF_3" input="ELSE"/> - <Link from="node_Constant_3" output="VALUE" to="node_IF_3" input="THEN"/> - <NetInput name="DELTA_X" node="node_NOP_1" terminal="INPUT" object_type="any" description="The input"/> - <NetInput name="DELTA_Y" node="node_NOP_2" terminal="INPUT" object_type="any" description="The input"/> - <NetInput name="ZOOM_FACTOR" node="node_NOP_3" terminal="INPUT" object_type="any" description="The input"/> - <NetOutput name="REL_PAN" node="node_IF_1" terminal="OUTPUT" object_type="any" description="The object from THEN or ELSE depending on COND"/> - <NetOutput name="REL_TILT" node="node_IF_2" terminal="OUTPUT" object_type="any" description="The object from THEN or ELSE depending on COND"/> - <NetInput name="BOUNDARY" node="node_NOP_4" terminal="INPUT" object_type="any" description="The input"/> - <NetInput name="CURRENT_ZOOM" node="node_NOP_5" terminal="INPUT" object_type="any" description="The input"/> - <NetOutput name="ABS_ZOOM" node="node_IF_3" terminal="OUTPUT" object_type="any" description="The object from THEN or ELSE depending on COND"/> - <Note x="0" y="0" visible="0" text="Created with FlowDesigner 0.8.1"/> - <Note x="-973" y="140" visible="1" text="According to DELTA_X and DELTA_Y and the BOUNDARY inputs, we will output incremental pan-tilt-zoom commands to center the color blobs in the image and get the maximum resolution possible."/> - </Network> - <Network type="subnet" name="SKIP_N"> - <Node name="node_IF_1" type="IF" x="890.000000" y="-24.000000"> - <Parameter name="PULL_ANYWAY" type="bool" value="false" description="If true, the IF statement pulls also on the branch not taken"/> - </Node> - <Node name="node_Equal_1" type="Equal" x="768.000000" y="-53.000000"/> - <Node name="node_Constant_1" type="Constant" x="600.000000" y="-36.000000"> - <Parameter name="VALUE" type="int" value="0" description="The value"/> - </Node> - <Node name="node_IterCount_1" type="IterCount" x="292.000000" y="-77.000000"/> - <Node name="node_Modulo_1" type="Modulo" x="609.000000" y="-69.000000"/> - <Node name="node_Constant_2" type="Constant" x="397.000000" y="-62.000000"> - <Parameter name="VALUE" type="subnet_param" value="SKIP_N" description="The value"/> - </Node> - <Node name="node_NilObject_1" type="NilObject" x="732.000000" y="55.000000"/> - <Link from="node_Equal_1" output="OUTPUT" to="node_IF_1" input="COND"/> - <Link from="node_NilObject_1" output="VALUE" to="node_IF_1" input="ELSE"/> - <Link from="node_Modulo_1" output="REMAINDER" to="node_Equal_1" input="INPUT1"/> - <Link from="node_Constant_1" output="VALUE" to="node_Equal_1" input="INPUT2"/> - <Link from="node_IterCount_1" output="OUTPUT" to="node_Modulo_1" input="DIVIDEND"/> - <Link from="node_Constant_2" output="VALUE" to="node_Modulo_1" input="DIVISOR"/> - <NetInput name="INPUT" node="node_IF_1" terminal="THEN" object_type="any" description="What to do if the condition is true"/> - <NetOutput name="OUTPUT" node="node_IF_1" terminal="OUTPUT" object_type="any" description="The object from THEN or ELSE depending on COND"/> - <Note x="0" y="0" visible="0" text="Created with FlowDesigner 0.8.1"/> - <Note x="474" y="214" visible="1" text="Useful to skip N iteration (returning nilObject except when modulo = 0)"/> - </Network> - <Network type="subnet" name="AUTO_SCALE"> - <Node name="node_NOP_1" type="NOP" x="-257.000000" y="-156.000000"/> - <Node name="node_NOP_2" type="NOP" x="-746.000000" y="-100.000000"/> - <Node name="node_Power_1" type="Power" x="-283.000000" y="-95.000000"/> - <Node name="node_Mul_1" type="Mul" x="-432.000000" y="-86.000000"/> - <Node name="node_Constant_2" type="Constant" x="-597.000000" y="16.000000"> - <Parameter name="VALUE" type="float" value="-1" description="The value"/> - </Node> - <Node name="node_NOP_3" type="NOP" x="92.000000" y="-150.000000"/> - <Node name="node_Constant_3" type="Constant" x="-446.000000" y="-136.000000"> - <Parameter name="VALUE" type="float" value="1.1" description="The value"/> - </Node> - <Node name="node_Mul_2" type="Mul" x="-66.000000" y="-150.000000"/> - <Node name="node_Sub_1" type="Sub" x="-571.000000" y="-92.000000"/> - <Node name="node_Constant_1" type="Constant" x="-730.000000" y="-60.000000"> - <Parameter name="VALUE" type="float" value="1" description="The value"/> - </Node> - <Link from="node_Mul_1" output="OUTPUT" to="node_Power_1" input="EXP"/> - <Link from="node_Constant_2" output="VALUE" to="node_Mul_1" input="INPUT2"/> - <Link from="node_Constant_3" output="VALUE" to="node_Power_1" input="BASE"/> - <Link from="node_NOP_1" output="OUTPUT" to="node_Mul_2" input="INPUT1"/> - <Link from="node_Power_1" output="OUTPUT" to="node_Mul_2" input="INPUT2"/> - <Link from="node_Sub_1" output="OUTPUT" to="node_Mul_1" input="INPUT1"/> - <Link from="node_NOP_2" output="OUTPUT" to="node_Sub_1" input="INPUT1"/> - <Link from="node_Constant_1" output="VALUE" to="node_Sub_1" input="INPUT2"/> - <Link from="node_Mul_2" output="OUTPUT" to="node_NOP_3" input="INPUT"/> - <NetInput name="DELTA" node="node_NOP_1" terminal="INPUT" object_type="any" description="The input"/> - <NetInput name="ZOOM_FACTOR" node="node_NOP_2" terminal="INPUT" object_type="any" description="The input"/> - <NetOutput name="SCALE" node="node_NOP_3" terminal="OUTPUT" object_type="any" description="The output = The input"/> - <Note x="0" y="0" visible="0" text="Created with FlowDesigner 0.8.1"/> - <Note x="-293" y="126" visible="1" text="Will scale DELTA value according to the ZOOM factor."/> - </Network> - <Network type="subnet" name="AUTO_ZOOM"> - <Node name="node_IF_1" type="IF" x="-378.000000" y="-90.000000"> - <Parameter name="PULL_ANYWAY" type="bool" value="" description="If true, the IF statement pulls also on the branch not taken"/> - </Node> - <Node name="node_AND_1" type="AND" x="-780.000000" y="-97.000000"> - <Parameter name="PULL_ANYWAY" type="bool" value="false" description="Pull on INPUT2 even if INPUT1 is false"/> - </Node> - <Node name="node_Smaller_1" type="Smaller" x="-965.000000" y="-103.000000"/> - <Node name="node_NOP_1" type="NOP" x="-1404.000000" y="-109.000000"/> - <Node name="node_NOP_2" type="NOP" x="-1405.000000" y="-49.000000"/> - <Node name="node_NOP_3" type="NOP" x="-1300.000000" y="-198.000000"/> - <Node name="node_Constant_2" type="Constant" x="-1183.000000" y="-96.000000"> - <Parameter name="VALUE" type="float" value="10" description="The value"/> - </Node> - <Node name="node_Smaller_2" type="Smaller" x="-966.000000" y="-42.000000"/> - <Node name="node_AND_2" type="AND" x="-614.000000" y="-105.000000"> - <Parameter name="PULL_ANYWAY" type="bool" value="false" description="Pull on INPUT2 even if INPUT1 is false"/> - </Node> - <Node name="node_Greater_1" type="Greater" x="-797.000000" y="-192.000000"/> - <Node name="node_Constant_3" type="Constant" x="-1058.000000" y="-162.000000"> - <Parameter name="VALUE" type="int" value="15" description="The value"/> - </Node> - <Node name="node_Constant_5" type="Constant" x="-809.000000" y="86.000000"> - <Parameter name="VALUE" type="int" value="-50" description="The value"/> - </Node> - <Node name="node_IF_2" type="IF" x="-563.000000" y="72.000000"> - <Parameter name="PULL_ANYWAY" type="bool" value="" description="If true, the IF statement pulls also on the branch not taken"/> - </Node> - <Node name="node_Constant_6" type="Constant" x="-709.000000" y="72.000000"> - <Parameter name="VALUE" type="int" value="0" description="The value"/> - </Node> - <Node name="node_ABS_1" type="ABS" x="-1284.000000" y="-109.000000"/> - <Node name="node_ABS_2" type="ABS" x="-1282.000000" y="-49.000000"/> - <Node name="node_Constant_1" type="Constant" x="-535.000000" y="-91.000000"> - <Parameter name="VALUE" type="int" value="50" description="The value"/> - </Node> - <Link from="node_Smaller_1" output="OUTPUT" to="node_AND_1" input="INPUT1"/> - <Link from="node_Constant_2" output="VALUE" to="node_Smaller_1" input="INPUT2"/> - <Link from="node_Smaller_2" output="OUTPUT" to="node_AND_1" input="INPUT2"/> - <Link from="node_AND_2" output="OUTPUT" to="node_IF_1" input="COND"/> - <Link from="node_Greater_1" output="OUTPUT" to="node_AND_2" input="INPUT1"/> - <Link from="node_NOP_3" output="OUTPUT" to="node_Greater_1" input="INPUT1"/> - <Link from="node_Constant_3" output="VALUE" to="node_Greater_1" input="INPUT2"/> - <Link from="node_IF_2" output="OUTPUT" to="node_IF_1" input="ELSE"/> - <Link from="node_Greater_1" output="OUTPUT" to="node_IF_2" input="COND"/> - <Link from="node_Constant_6" output="VALUE" to="node_IF_2" input="THEN"/> - <Link from="node_NOP_1" output="OUTPUT" to="node_ABS_1" input="INPUT"/> - <Link from="node_ABS_1" output="OUTPUT" to="node_Smaller_1" input="INPUT1"/> - <Link from="node_NOP_2" output="OUTPUT" to="node_ABS_2" input="INPUT"/> - <Link from="node_Constant_2" output="VALUE" to="node_Smaller_2" input="INPUT2">-1149 -96 -1149 -36 -1049.5 -34.5 </Link> - <Link from="node_ABS_2" output="OUTPUT" to="node_Smaller_2" input="INPUT1"/> - <Link from="node_AND_1" output="OUTPUT" to="node_AND_2" input="INPUT2"/> - <Link from="node_Constant_5" output="VALUE" to="node_IF_2" input="ELSE"/> - <Link from="node_Constant_1" output="VALUE" to="node_IF_1" input="THEN"/> - <NetInput name="DELTA_X" node="node_NOP_1" terminal="INPUT" object_type="any" description="The input"/> - <NetInput name="DELTA_Y" node="node_NOP_2" terminal="INPUT" object_type="any" description="The input"/> - <NetInput name="BOUNDARY" node="node_NOP_3" terminal="INPUT" object_type="any" description="The input"/> - <NetOutput name="REL_ZOOM" node="node_IF_1" terminal="OUTPUT" object_type="any" description="The object from THEN or ELSE depending on COND"/> - <Note x="0" y="0" visible="0" text="Created with FlowDesigner 0.8.1"/> - <Note x="-1080" y="162" visible="1" text="The idea is to get the color component centered enough before sending zoom increments. If we are too close to the image border, we send decrement the zoom value."/> - </Network> <Network type="subnet" name="ABS"> <Node name="node_IF_1" type="IF" x="-241.000000" y="-18.000000"> --- 45,48 ---- *************** *** 384,388 **** <Node name="node_Concatenate_2" type="Concatenate" x="-624.000000" y="132.000000"/> <Node name="node_NewSymbolIdentify_1" type="NewSymbolIdentify" x="-308.000000" y="216.000000"> ! <Parameter name="THRESHOLD" type="float" value="0.80" description=""/> </Node> <Node name="node_Concat_1" type="Concat" x="-208.000000" y="134.000000"/> --- 172,176 ---- <Node name="node_Concatenate_2" type="Concatenate" x="-624.000000" y="132.000000"/> <Node name="node_NewSymbolIdentify_1" type="NewSymbolIdentify" x="-308.000000" y="216.000000"> ! <Parameter name="THRESHOLD" type="float" value="0.50" description=""/> </Node> <Node name="node_Concat_1" type="Concat" x="-208.000000" y="134.000000"/> *************** *** 478,519 **** <Note x="-125" y="380" visible="1" text="This is a loop to extract all characters information from the image components and store the result into an accumulator."/> </Network> - <Network type="subnet" name="FIND_THRESHOLD"> - <Node name="node_StatIntensityAnalyser_1" type="StatIntensityAnalyser" x="-257.000000" y="-292.000000"> - <Parameter name="FRACTION_ANALYSED" type="float" value="0.1" description=""/> - </Node> - <Node name="node_Add_1" type="Add" x="71.000000" y="-290.000000"/> - <Node name="node_Div_1" type="Div" x="247.000000" y="-283.000000"/> - <Node name="node_Constant_1" type="Constant" x="57.000000" y="-245.000000"> - <Parameter name="VALUE" type="float" value="2.5" description=""/> - </Node> - <Link from="node_StatIntensityAnalyser_1" output="MAX_INTENSITY" to="node_Add_1" input="INPUT1"/> - <Link from="node_StatIntensityAnalyser_1" output="MIN_INTENSITY" to="node_Add_1" input="INPUT2"/> - <Link from="node_Add_1" output="OUTPUT" to="node_Div_1" input="NUM"/> - <Link from="node_Constant_1" output="VALUE" to="node_Div_1" input="DEN"/> - <NetInput name="IMAGE_IN" node="node_StatIntensityAnalyser_1" terminal="IMAGE_IN" object_type="any" description="No description available"/> - <NetOutput name="THRESHOLD" node="node_Div_1" terminal="OUTPUT" object_type="any" description="No description available"/> - <Note x="10" y="10" visible="1" text="Calculate the maximum intensity and minimum image intensity with random pixels (1%). Threshold is calculated with : (MAX+ MIN) / 2.5"/> - </Network> - <Network type="subnet" name="ZOOM_FACTOR"> - <Node name="node_Div_1" type="Div" x="-132.000000" y="147.000000"/> - <Node name="node_Constant_1" type="Constant" x="-244.000000" y="156.000000"> - <Parameter name="VALUE" type="float" value="683" description="The value"/> - </Node> - <Node name="node_Add_1" type="Add" x="-24.000000" y="139.000000"/> - <Node name="node_Constant_2" type="Constant" x="-180.000000" y="87.000000"> - <Parameter name="VALUE" type="float" value="1.0" description="The value"/> - </Node> - <Node name="node_NOP_1" type="NOP" x="-486.000000" y="140.000000"/> - <Node name="node_NOP_2" type="NOP" x="191.000000" y="139.000000"/> - <Link from="node_Constant_1" output="VALUE" to="node_Div_1" input="DEN"/> - <Link from="node_Constant_2" output="VALUE" to="node_Add_1" input="INPUT1"/> - <Link from="node_Div_1" output="OUTPUT" to="node_Add_1" input="INPUT2"/> - <Link from="node_NOP_1" output="OUTPUT" to="node_Div_1" input="NUM"/> - <Link from="node_Add_1" output="OUTPUT" to="node_NOP_2" input="INPUT"/> - <NetInput name="ABS_ZOOM_VALUE" node="node_NOP_1" terminal="INPUT" object_type="any" description="The input"/> - <NetOutput name="ZOOM_FACTOR" node="node_NOP_2" terminal="OUTPUT" object_type="any" description="The output = The input"/> - <Note x="0" y="0" visible="0" text="Created with FlowDesigner 0.8.1"/> - <Note x="-186" y="428" visible="1" text="Zoom factor 1X - 25X is calculated according to the Sony SNC-RZ30 camera."/> - </Network> <Network type="subnet" name="PROCESS_SEGMENTS"> <Node name="node_SENTENCE_READER_1" type="SENTENCE_READER" x="-334.000000" y="-384.000000"> --- 266,269 ---- *************** *** 528,566 **** <Note x="-373" y="-194" visible="1" text="Will output Original text (without the dicitonary) and the sentence (if any) recognized in the image"/> </Network> - <Network type="subnet" name="COLOR_TRACKER"> - <Node name="node_ColorTracker_1" type="ColorTracker" x="-746.000000" y="-212.000000"/> - <Node name="node_Constant_1" type="Constant" x="-1053.000000" y="-220.000000"> - <Parameter name="VALUE" type="int" value="0" description="The value"/> - </Node> - <Node name="node_IF_1" type="IF" x="-182.000000" y="-294.000000"> - <Parameter name="PULL_ANYWAY" type="bool" value="" description="If true, the IF statement pulls also on the branch not taken"/> - </Node> - <Node name="node_IF_2" type="IF" x="-183.000000" y="-235.000000"> - <Parameter name="PULL_ANYWAY" type="bool" value="" description="If true, the IF statement pulls also on the branch not taken"/> - </Node> - <Node name="node_IF_3" type="IF" x="-183.000000" y="-175.000000"> - <Parameter name="PULL_ANYWAY" type="bool" value="" description="If true, the IF statement pulls also on the branch not taken"/> - </Node> - <Node name="node_NOP_1" type="NOP" x="-512.000000" y="-343.000000"/> - <Node name="node_NilObject_1" type="NilObject" x="-485.000000" y="-159.000000"/> - <Node name="node_NOP_2" type="NOP" x="-1018.000000" y="-161.000000"/> - <Link from="node_Constant_1" output="VALUE" to="node_ColorTracker_1" input="COLOR_ID"/> - <Link from="node_ColorTracker_1" output="DELTA_X" to="node_IF_1" input="THEN"/> - <Link from="node_ColorTracker_1" output="DELTA_Y" to="node_IF_2" input="THEN"/> - <Link from="node_NOP_1" output="OUTPUT" to="node_IF_1" input="COND"/> - <Link from="node_NOP_1" output="OUTPUT" to="node_IF_2" input="COND"/> - <Link from="node_NOP_1" output="OUTPUT" to="node_IF_3" input="COND"/> - <Link from="node_NilObject_1" output="VALUE" to="node_IF_1" input="ELSE"/> - <Link from="node_NilObject_1" output="VALUE" to="node_IF_2" input="ELSE"/> - <Link from="node_NilObject_1" output="VALUE" to="node_IF_3" input="ELSE"/> - <Link from="node_NOP_2" output="OUTPUT" to="node_ColorTracker_1" input="COMPONENTS"/> - <Link from="node_ColorTracker_1" output="BOUNDARY" to="node_IF_3" input="THEN"/> - <NetOutput name="DELTA_X" node="node_IF_1" terminal="OUTPUT" object_type="any" description="The object from THEN or ELSE depending on COND"/> - <NetOutput name="DELTA_Y" node="node_IF_2" terminal="OUTPUT" object_type="any" description="The object from THEN or ELSE depending on COND"/> - <NetOutput name="BOUNDARY" node="node_IF_3" terminal="OUTPUT" object_type="any" description="The object from THEN or ELSE depending on COND"/> - <NetInput name="ACTIVATED" node="node_NOP_1" terminal="INPUT" object_type="any" description="The input"/> - <NetInput name="COMPONENTS" node="node_NOP_2" terminal="INPUT" object_type="any" description="The input"/> - <Note x="0" y="0" visible="0" text="Created with FlowDesigner 0.8.1"/> - </Network> <Network type="subnet" name="IF_ACTIVATED_DO"> <Node name="node_IF_1" type="IF" x="-316.000000" y="-92.000000"> --- 278,281 ---- |
From: Dominic L. <ma...@us...> - 2005-06-08 14:28:50
|
Update of /cvsroot/robotflow/RobotFlow/Vision/n-files In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv706 Modified Files: READv2.n Log Message: Removing camera dependency Index: READv2.n =================================================================== RCS file: /cvsroot/robotflow/RobotFlow/Vision/n-files/READv2.n,v retrieving revision 1.2 retrieving revision 1.3 diff -C2 -d -r1.2 -r1.3 *** READv2.n 6 Jun 2005 20:59:14 -0000 1.2 --- READv2.n 8 Jun 2005 14:28:35 -0000 1.3 *************** *** 3,7 **** <Document category="RobotFlow:LABORIUS"> <Network type="subnet" name="MAIN"> ! <Node name="node_LOOP0_1" type="MAIN_PROCESS" x="-3397.000000" y="-618.000000"/> <NetInput name="NNET" node="node_LOOP0_1" terminal="NNET" object_type="" description="No description available"/> <NetInput name="DICT" node="node_LOOP0_1" terminal="DICT" object_type="" description="No description available"/> --- 3,10 ---- <Document category="RobotFlow:LABORIUS"> <Network type="subnet" name="MAIN"> ! <Node name="node_LOOP0_1" type="MAIN_PROCESS" x="-3397.000000" y="-618.000000"> ! <Parameter name="MIN_SYMBOL_HEIGHT" type="subnet_param" value="MIN_SYMBOL_HEIGHT" description="The value"/> ! <Parameter name="MIN_SYMBOL_WIDTH" type="subnet_param" value="MIN_SYMBOL_WIDTH" description="The value"/> ! </Node> <NetInput name="NNET" node="node_LOOP0_1" terminal="NNET" object_type="" description="No description available"/> <NetInput name="DICT" node="node_LOOP0_1" terminal="DICT" object_type="" description="No description available"/> *************** *** 9,13 **** <NetOutput name="SENTENCE" node="node_LOOP0_1" terminal="SENTENCE" object_type="" description="The object from THEN or ELSE depending on COND"/> <NetInput name="BLACK_WHITE_LOOKUP" node="node_LOOP0_1" terminal="BLACK_WHITE_LOOKUP" object_type="any" description="No description available"/> ! <NetInput name="RGB15_BINARIZED" node="node_LOOP0_1" terminal="RGB15_BINARIZED" object_type="Image" description="The Image to extract colors from."/> <Note x="-3418" y="-384" visible="1" text="The MAIN network will load configuration from the files containing : - black and white color lookup - the neural network to use for recog - The dictionary to use * You can change this configuration by double clicking on the LoadFile blocks"/> </Network> --- 12,16 ---- <NetOutput name="SENTENCE" node="node_LOOP0_1" terminal="SENTENCE" object_type="" description="The object from THEN or ELSE depending on COND"/> <NetInput name="BLACK_WHITE_LOOKUP" node="node_LOOP0_1" terminal="BLACK_WHITE_LOOKUP" object_type="any" description="No description available"/> ! <NetInput name="RGB15_BINARIZED" node="node_LOOP0_1" terminal="RGB15_BINARIZED" object_type="any" description="No description available"/> <Note x="-3418" y="-384" visible="1" text="The MAIN network will load configuration from the files containing : - black and white color lookup - the neural network to use for recog - The dictionary to use * You can change this configuration by double clicking on the LoadFile blocks"/> </Network> *************** *** 26,41 **** </Network> <Network type="subnet" name="MAIN_PROCESS"> ! <Node name="node_PROCESS_SEGMENTS_1" type="PROCESS_SEGMENTS" x="184.000000" y="321.000000"/> <Node name="node_IF_ACTIVATED_DO_4" type="IF_ACTIVATED_DO" x="696.000000" y="313.000000"/> <Node name="node_NOP_3" type="NOP" x="221.000000" y="183.000000"/> ! <Node name="node_Components_1" type="Components" x="-252.000000" y="335.000000"> ! <Parameter name="XGAP" type="int" value="1" description=""/> ! <Parameter name="YGAP" type="int" value="1" description=""/> <Parameter name="NUM_COLOR" type="int" value="2" description=""/> ! <Parameter name="MIN_AREA" type="int" value="16" description=""/> </Node> <Link from="node_NOP_3" output="OUTPUT" to="node_IF_ACTIVATED_DO_4" input="SIGN_TRACKING_ACTIVATED"/> - <Link from="node_Components_1" output="BLOBS" to="node_PROCESS_SEGMENTS_1" input="BLACK_WHITE_COMPONENTS"/> <Link from="node_PROCESS_SEGMENTS_1" output="SENTENCE" to="node_IF_ACTIVATED_DO_4" input="DO"/> <NetInput name="NNET" node="node_PROCESS_SEGMENTS_1" terminal="NNET" object_type="" description="No description available"/> <NetInput name="DICT" node="node_PROCESS_SEGMENTS_1" terminal="DICT" object_type="" description="No description available"/> --- 29,49 ---- </Network> <Network type="subnet" name="MAIN_PROCESS"> ! <Node name="node_PROCESS_SEGMENTS_1" type="PROCESS_SEGMENTS" x="184.000000" y="321.000000"> ! <Parameter name="MIN_SYMBOL_HEIGHT" type="subnet_param" value="MIN_SYMBOL_HEIGHT" description="The value"/> ! <Parameter name="MIN_SYMBOL_WIDTH" type="subnet_param" value="MIN_SYMBOL_WIDTH" description="The value"/> ! </Node> <Node name="node_IF_ACTIVATED_DO_4" type="IF_ACTIVATED_DO" x="696.000000" y="313.000000"/> <Node name="node_NOP_3" type="NOP" x="221.000000" y="183.000000"/> ! <Node name="node_Components_1" type="Components" x="-327.000000" y="336.000000"> ! <Parameter name="XGAP" type="int" value="3" description=""/> ! <Parameter name="YGAP" type="int" value="3" description=""/> <Parameter name="NUM_COLOR" type="int" value="2" description=""/> ! <Parameter name="MIN_AREA" type="int" value="5" description=""/> </Node> + <Node name="node_NOP_1" type="NOP" x="-509.000000" y="298.000000"/> <Link from="node_NOP_3" output="OUTPUT" to="node_IF_ACTIVATED_DO_4" input="SIGN_TRACKING_ACTIVATED"/> <Link from="node_PROCESS_SEGMENTS_1" output="SENTENCE" to="node_IF_ACTIVATED_DO_4" input="DO"/> + <Link from="node_NOP_1" output="OUTPUT" to="node_Components_1" input="IMAGE"/> + <Link from="node_Components_1" output="BLOBS" to="node_PROCESS_SEGMENTS_1" input="BLACK_WHITE_COMPONENTS"/> <NetInput name="NNET" node="node_PROCESS_SEGMENTS_1" terminal="NNET" object_type="" description="No description available"/> <NetInput name="DICT" node="node_PROCESS_SEGMENTS_1" terminal="DICT" object_type="" description="No description available"/> *************** *** 43,47 **** <NetInput name="SIGN_TRACKING_ACTIVATED" node="node_NOP_3" terminal="INPUT" object_type="any" description="The input"/> <NetInput name="BLACK_WHITE_LOOKUP" node="node_Components_1" terminal="LOOKUP" object_type="ColorLookup" description="The color lookup to use"/> ! <NetInput name="RGB15_BINARIZED" node="node_Components_1" terminal="IMAGE" object_type="Image" description="The Image to extract colors from."/> <Note x="-1480" y="269" visible="1" text="Sony SNC-RZ30 Network Camera controller."/> <Note x="-1013" y="42" visible="1" text="Image must be converted to RGB15 from the camera before training."/> --- 51,55 ---- <NetInput name="SIGN_TRACKING_ACTIVATED" node="node_NOP_3" terminal="INPUT" object_type="any" description="The input"/> <NetInput name="BLACK_WHITE_LOOKUP" node="node_Components_1" terminal="LOOKUP" object_type="ColorLookup" description="The color lookup to use"/> ! <NetInput name="RGB15_BINARIZED" node="node_NOP_1" terminal="INPUT" object_type="any" description="The input"/> <Note x="-1480" y="269" visible="1" text="Sony SNC-RZ30 Network Camera controller."/> <Note x="-1013" y="42" visible="1" text="Image must be converted to RGB15 from the camera before training."/> *************** *** 281,289 **** </Node> <Node name="node_STRING_EXTRACTOR_1" type="STRING_EXTRACTOR" x="86.000000" y="321.000000"> <Parameter name="DOWHILE" type="bool" value="" description="No description available"/> </Node> <Node name="node_ExtractSentence_v2_1" type="ExtractSentence_v2" x="441.000000" y="329.000000"> ! <Parameter name="VERTICAL_TOLERANCE" type="float" value="0.5" description=""/> ! <Parameter name="HORIZONTAL_TOLERANCE" type="float" value="0.5" description=""/> </Node> <Node name="node_NOP_2" type="NOP" x="-524.000000" y="299.000000"/> --- 289,299 ---- </Node> <Node name="node_STRING_EXTRACTOR_1" type="STRING_EXTRACTOR" x="86.000000" y="321.000000"> + <Parameter name="MIN_SYMBOL_HEIGHT" type="subnet_param" value="MIN_SYMBOL_HEIGHT" description="The value"/> + <Parameter name="MIN_SYMBOL_WIDTH" type="subnet_param" value="MIN_SYMBOL_WIDTH" description="The value"/> <Parameter name="DOWHILE" type="bool" value="" description="No description available"/> </Node> <Node name="node_ExtractSentence_v2_1" type="ExtractSentence_v2" x="441.000000" y="329.000000"> ! <Parameter name="VERTICAL_TOLERANCE" type="float" value="0.8" description=""/> ! <Parameter name="HORIZONTAL_TOLERANCE" type="float" value="0.8" description=""/> </Node> <Node name="node_NOP_2" type="NOP" x="-524.000000" y="299.000000"/> *************** *** 356,360 **** <Parameter name="CHANNEL" type="int" value="1" description=""/> </Node> ! <Node name="node_Scale_1" type="Scale" x="-787.000000" y="115.000000"> <Parameter name="WIDTH" type="int" value="13" description=""/> <Parameter name="HEIGHT" type="int" value="13" description=""/> --- 366,370 ---- <Parameter name="CHANNEL" type="int" value="1" description=""/> </Node> ! <Node name="node_Scale_1" type="Scale" x="-860.000000" y="113.000000"> <Parameter name="WIDTH" type="int" value="13" description=""/> <Parameter name="HEIGHT" type="int" value="13" description=""/> *************** *** 388,395 **** <Node name="node_Greater_2" type="Greater" x="-184.000000" y="-185.000000"/> <Node name="node_Constant_2" type="Constant" x="-482.000000" y="-274.000000"> ! <Parameter name="VALUE" type="int" value="30" description="The value"/> </Node> <Node name="node_Constant_3" type="Constant" x="-481.000000" y="-247.000000"> ! <Parameter name="VALUE" type="int" value="30" description="The value"/> </Node> <Node name="node_NilObject_1" type="NilObject" x="443.000000" y="-169.000000"/> --- 398,405 ---- <Node name="node_Greater_2" type="Greater" x="-184.000000" y="-185.000000"/> <Node name="node_Constant_2" type="Constant" x="-482.000000" y="-274.000000"> ! <Parameter name="VALUE" type="subnet_param" value="MIN_SYMBOL_HEIGHT" description="The value"/> </Node> <Node name="node_Constant_3" type="Constant" x="-481.000000" y="-247.000000"> ! <Parameter name="VALUE" type="subnet_param" value="MIN_SYMBOL_WIDTH" description="The value"/> </Node> <Node name="node_NilObject_1" type="NilObject" x="443.000000" y="-169.000000"/> *************** *** 401,405 **** <Link from="node_VectorSetIndex_1" output="OUTPUT" to="node_Concatenate_1" input="INPUT1">215.5 235 269 236 268 121 306 119.5 </Link> <Link from="node_SymbolExtractor_1" output="SYMBOL" to="node_EXTRA_INFO_1" input="INPUT"/> - <Link from="node_Scale_1" output="OUTPUT" to="node_Concatenate_2" input="INPUT1"/> <Link from="node_EXTRA_INFO_1" output="OUTPUT" to="node_Concatenate_2" input="INPUT2"/> <Link from="node_RectBoundaries_1" output="OUTPUT" to="node_Concat_1" input="INPUT1"/> --- 411,414 ---- *************** *** 412,422 **** <Link from="node_AND_1" output="OUTPUT" to="node_IF_1" input="COND"/> <Link from="node_Greater_1" output="OUTPUT" to="node_AND_1" input="INPUT1"/> - <Link from="node_RectAnalyser_1" output="WIDTH" to="node_Greater_2" input="INPUT2"/> - <Link from="node_RectAnalyser_1" output="HEIGHT" to="node_Greater_1" input="INPUT2"/> <Link from="node_Greater_2" output="OUTPUT" to="node_AND_1" input="INPUT2"/> - <Link from="node_Constant_2" output="VALUE" to="node_Greater_1" input="INPUT1"/> - <Link from="node_Constant_3" output="VALUE" to="node_Greater_2" input="INPUT1"/> <Link from="node_Concatenate_1" output="OUTPUT" to="node_IF_1" input="THEN">452 124 548 125 549 -185 671 -185 </Link> <Link from="node_NilObject_1" output="VALUE" to="node_IF_1" input="ELSE"/> <NetInput name="DATA" node="node_SymbolExtractor_1" terminal="DATA" object_type="any" description="No description available"/> <NetInput name="INDEX" node="node_SymbolExtractor_1" terminal="INDEX" object_type="any" description="No description available"/> --- 421,432 ---- <Link from="node_AND_1" output="OUTPUT" to="node_IF_1" input="COND"/> <Link from="node_Greater_1" output="OUTPUT" to="node_AND_1" input="INPUT1"/> <Link from="node_Greater_2" output="OUTPUT" to="node_AND_1" input="INPUT2"/> <Link from="node_Concatenate_1" output="OUTPUT" to="node_IF_1" input="THEN">452 124 548 125 549 -185 671 -185 </Link> <Link from="node_NilObject_1" output="VALUE" to="node_IF_1" input="ELSE"/> + <Link from="node_RectAnalyser_1" output="HEIGHT" to="node_Greater_1" input="INPUT1"/> + <Link from="node_Constant_2" output="VALUE" to="node_Greater_1" input="INPUT2"/> + <Link from="node_RectAnalyser_1" output="WIDTH" to="node_Greater_2" input="INPUT1"/> + <Link from="node_Constant_3" output="VALUE" to="node_Greater_2" input="INPUT2"/> + <Link from="node_Scale_1" output="OUTPUT" to="node_Concatenate_2" input="INPUT1"/> <NetInput name="DATA" node="node_SymbolExtractor_1" terminal="DATA" object_type="any" description="No description available"/> <NetInput name="INDEX" node="node_SymbolExtractor_1" terminal="INDEX" object_type="any" description="No description available"/> *************** *** 440,444 **** <Node name="node_Smaller_1" type="Smaller" x="-235.000000" y="16.000000"/> <Node name="node_IterCount_1" type="IterCount" x="-500.000000" y="9.000000"/> ! <Node name="node_SYMBOL_EXTRACTOR_1" type="SYMBOL_EXTRACTOR" x="-107.000000" y="121.000000"/> <Node name="node_NOP_1" type="NOP" x="-99.000000" y="203.000000"/> <Node name="node_IF_2" type="IF" x="384.000000" y="135.000000"> --- 450,457 ---- <Node name="node_Smaller_1" type="Smaller" x="-235.000000" y="16.000000"/> <Node name="node_IterCount_1" type="IterCount" x="-500.000000" y="9.000000"/> ! <Node name="node_SYMBOL_EXTRACTOR_1" type="SYMBOL_EXTRACTOR" x="-107.000000" y="121.000000"> ! <Parameter name="MIN_SYMBOL_HEIGHT" type="subnet_param" value="MIN_SYMBOL_HEIGHT" description="The value"/> ! <Parameter name="MIN_SYMBOL_WIDTH" type="subnet_param" value="MIN_SYMBOL_WIDTH" description="The value"/> ! </Node> <Node name="node_NOP_1" type="NOP" x="-99.000000" y="203.000000"/> <Node name="node_IF_2" type="IF" x="384.000000" y="135.000000"> *************** *** 455,460 **** <Link from="node_NOP_1" output="OUTPUT" to="node_IF_2" input="THEN"/> <Link from="node_Accumulate_1" output="OUTPUT" to="node_IF_2" input="ELSE"/> - <Link from="node_NOP_1" output="OUTPUT" to="node_Accumulate_1" input="ACCUM"/> <Link from="node_SYMBOL_EXTRACTOR_1" output="SYMBOL_VECTOR" to="node_Accumulate_1" input="INPUT"/> <NetInput name="DATA" node="node_SYMBOL_EXTRACTOR_1" terminal="DATA" object_type="any" description="No description available"/> <NetInput name="NNET" node="node_SYMBOL_EXTRACTOR_1" terminal="NNET" object_type="any" description="No description available"/> --- 468,473 ---- <Link from="node_NOP_1" output="OUTPUT" to="node_IF_2" input="THEN"/> <Link from="node_Accumulate_1" output="OUTPUT" to="node_IF_2" input="ELSE"/> <Link from="node_SYMBOL_EXTRACTOR_1" output="SYMBOL_VECTOR" to="node_Accumulate_1" input="INPUT"/> + <Link from="node_NOP_1" output="OUTPUT" to="node_Accumulate_1" input="ACCUM"/> <NetInput name="DATA" node="node_SYMBOL_EXTRACTOR_1" terminal="DATA" object_type="any" description="No description available"/> <NetInput name="NNET" node="node_SYMBOL_EXTRACTOR_1" terminal="NNET" object_type="any" description="No description available"/> *************** *** 482,576 **** <Note x="10" y="10" visible="1" text="Calculate the maximum intensity and minimum image intensity with random pixels (1%). Threshold is calculated with : (MAX+ MIN) / 2.5"/> </Network> - <Network type="subnet" name="MULTI_SIGN_TRACKING"> - <Node name="node_MultiSignTracking_1" type="MultiSignTracking" x="349.000000" y="-56.000000"> - <Parameter name="MIN_WIDTH" type="int" value="25" description="minimum width to obtain before we are ready for recognition."/> - <Parameter name="MIN_HEIGHT" type="int" value="35" description="minimum height to obtain before we are ready for recognition."/> - <Parameter name="FOREGROUND_COLOR_ID" type="int" value="1" description="The color number being tracked."/> - <Parameter name="BACKGROUND_COLOR_ID" type="int" value="0" description="The color number being tracked."/> - </Node> - <Node name="node_NOP_1" type="NOP" x="945.000000" y="-105.000000"/> - <Node name="node_COLOR_TRACKER_1" type="COLOR_TRACKER" x="353.000000" y="85.000000"/> - <Node name="node_Subsumption_1" type="Subsumption" x="1002.000000" y="12.000000"/> - <Node name="node_Subsumption_2" type="Subsumption" x="1004.000000" y="58.000000"/> - <Node name="node_Subsumption_3" type="Subsumption" x="1006.000000" y="111.000000"/> - <Node name="node_NOP_2" type="NOP" x="-61.000000" y="92.000000"/> - <Node name="node_Constant_1" type="Constant" x="119.000000" y="42.000000"> - <Parameter name="VALUE" type="bool" value="true" description="The value"/> - </Node> - <Node name="node_NOP_3" type="NOP" x="27.000000" y="-63.000000"/> - <Node name="node_NOP_4" type="NOP" x="27.000000" y="-9.000000"/> - <Node name="node_IF_1" type="IF" x="724.000000" y="87.000000"> - <Parameter name="PULL_ANYWAY" type="bool" value="" description="If true, the IF statement pulls also on the branch not taken"/> - </Node> - <Node name="node_isNil_1" type="isNil" x="587.000000" y="72.000000"/> - <Node name="node_NilObject_1" type="NilObject" x="570.000000" y="111.000000"/> - <Node name="node_IF_2" type="IF" x="727.000000" y="147.000000"> - <Parameter name="PULL_ANYWAY" type="bool" value="" description="If true, the IF statement pulls also on the branch not taken"/> - </Node> - <Node name="node_IF_3" type="IF" x="723.000000" y="18.000000"> - <Parameter name="PULL_ANYWAY" type="bool" value="" description="If true, the IF statement pulls also on the branch not taken"/> - </Node> - <Node name="node_NilObject_2" type="NilObject" x="728.000000" y="-46.000000"/> - <Node name="node_RectBoundaries_1" type="RectBoundaries" x="340.000000" y="170.000000"/> - <Node name="node_SymbolExtractor_1" type="SymbolExtractor" x="84.000000" y="169.000000"> - <Parameter name="CHANNEL" type="int" value="0" description="Channel from which to extract the symbol"/> - </Node> - <Node name="node_Constant_2" type="Constant" x="-122.000000" y="176.000000"> - <Parameter name="VALUE" type="int" value="0" description="The value"/> - </Node> - <Node name="node_SymbolCounter_1" type="SymbolCounter" x="216.000000" y="245.000000"> - <Parameter name="CHANNEL" type="int" value="0" description="Channel from which to extract the symbol"/> - <Parameter name="DEPTH" type="int" value="2" description="Depth of the image (in bytes per pixel)"/> - </Node> - <Node name="node_IF_4" type="IF" x="630.000000" y="266.000000"> - <Parameter name="PULL_ANYWAY" type="bool" value="" description="If true, the IF statement pulls also on the branch not taken"/> - </Node> - <Node name="node_Greater_1" type="Greater" x="494.000000" y="251.000000"/> - <Node name="node_Constant_3" type="Constant" x="339.000000" y="277.000000"> - <Parameter name="VALUE" type="int" value="0" description="The value"/> - </Node> - <Node name="node_Constant_4" type="Constant" x="467.000000" y="306.000000"> - <Parameter name="VALUE" type="bool" value="false" description="The value"/> - </Node> - <Link from="node_MultiSignTracking_1" output="COMPONENTS" to="node_NOP_1" input="INPUT"/> - <Link from="node_Constant_1" output="VALUE" to="node_COLOR_TRACKER_1" input="ACTIVATED"/> - <Link from="node_NOP_3" output="OUTPUT" to="node_MultiSignTracking_1" input="ACTIVATED"/> - <Link from="node_NOP_2" output="OUTPUT" to="node_COLOR_TRACKER_1" input="COMPONENTS"/> - <Link from="node_NOP_4" output="OUTPUT" to="node_MultiSignTracking_1" input="COMPONENTS"/> - <Link from="node_isNil_1" output="OUTPUT" to="node_IF_1" input="COND"/> - <Link from="node_COLOR_TRACKER_1" output="DELTA_X" to="node_isNil_1" input="INPUT"/> - <Link from="node_NilObject_1" output="VALUE" to="node_IF_1" input="THEN"/> - <Link from="node_MultiSignTracking_1" output="DELTA_X" to="node_IF_1" input="ELSE"/> - <Link from="node_IF_1" output="OUTPUT" to="node_Subsumption_1" input="LOW_PRIORITY"/> - <Link from="node_isNil_1" output="OUTPUT" to="node_IF_2" input="COND"/> - <Link from="node_IF_2" output="OUTPUT" to="node_Subsumption_2" input="LOW_PRIORITY"/> - <Link from="node_NilObject_1" output="VALUE" to="node_IF_2" input="THEN"/> - <Link from="node_MultiSignTracking_1" output="DELTA_Y" to="node_IF_2" input="ELSE"/> - <Link from="node_MultiSignTracking_1" output="BOUNDARY" to="node_IF_3" input="ELSE"/> - <Link from="node_NilObject_1" output="VALUE" to="node_IF_3" input="THEN"/> - <Link from="node_isNil_1" output="OUTPUT" to="node_IF_3" input="COND"/> - <Link from="node_IF_3" output="OUTPUT" to="node_Subsumption_3" input="LOW_PRIORITY"/> - <Link from="node_NilObject_2" output="VALUE" to="node_Subsumption_1" input="HIGH_PRIORITY"/> - <Link from="node_NilObject_2" output="VALUE" to="node_Subsumption_2" input="HIGH_PRIORITY"/> - <Link from="node_NilObject_2" output="VALUE" to="node_Subsumption_3" input="HIGH_PRIORITY"/> - <Link from="node_Constant_2" output="VALUE" to="node_SymbolExtractor_1" input="INDEX"/> - <Link from="node_SymbolExtractor_1" output="SYMBOL" to="node_RectBoundaries_1" input="CRECT"/> - <Link from="node_SymbolCounter_1" output="COUNT" to="node_Greater_1" input="INPUT1"/> - <Link from="node_SymbolExtractor_1" output="SYMBOL" to="node_SymbolCounter_1" input="DATA"/> - <Link from="node_Greater_1" output="OUTPUT" to="node_IF_4" input="COND"/> - <Link from="node_Constant_3" output="VALUE" to="node_Greater_1" input="INPUT2"/> - <Link from="node_RectBoundaries_1" output="OUTPUT" to="node_IF_4" input="THEN">397 170 398 265 575 266 </Link> - <Link from="node_Constant_4" output="VALUE" to="node_IF_4" input="ELSE"/> - <NetOutput name="COMPONENTS" node="node_NOP_1" terminal="OUTPUT" object_type="any" description="The output = The input"/> - <NetInput name="COMPONENTS" node="node_NOP_2" terminal="INPUT" object_type="any" description="The input"/> - <NetOutput name="DELTA_X" node="node_Subsumption_1" terminal="OUTPUT" object_type="any" description="No description available"/> - <NetOutput name="DELTA_Y" node="node_Subsumption_2" terminal="OUTPUT" object_type="any" description="No description available"/> - <NetOutput name="BOUNDARY" node="node_Subsumption_3" terminal="OUTPUT" object_type="any" description="No description available"/> - <NetInput name="SIGN_TRACKING_ACTIVATED" node="node_NOP_3" terminal="INPUT" object_type="any" description="The input"/> - <NetInput name="BLACK_WHITE_COMPONENTS" node="node_NOP_4" terminal="INPUT" object_type="any" description="The input"/> - <Note x="0" y="0" visible="0" text="Created with FlowDesigner 0.8.1"/> - <Note x="198" y="472" visible="1" text="If activated, the MultiSignTracking behavior will output probable characters components, delta_x, delta_y and boundray of the group of components that could be characters."/> - <Note x="843" y="370" visible="1" text="If we can't find blobs of foreground color surrounded by the background color, we will use only foreground color to "scan"."/> - </Network> <Network type="subnet" name="ZOOM_FACTOR"> <Node name="node_Div_1" type="Div" x="-132.000000" y="147.000000"/> --- 495,498 ---- *************** *** 595,599 **** </Network> <Network type="subnet" name="PROCESS_SEGMENTS"> ! <Node name="node_SENTENCE_READER_1" type="SENTENCE_READER" x="-334.000000" y="-384.000000"/> <NetInput name="NNET" node="node_SENTENCE_READER_1" terminal="NNET" object_type="" description="No description available"/> <NetInput name="DICT" node="node_SENTENCE_READER_1" terminal="DICT" object_type="" description="No description available"/> --- 517,524 ---- </Network> <Network type="subnet" name="PROCESS_SEGMENTS"> ! <Node name="node_SENTENCE_READER_1" type="SENTENCE_READER" x="-334.000000" y="-384.000000"> ! <Parameter name="MIN_SYMBOL_HEIGHT" type="subnet_param" value="MIN_SYMBOL_HEIGHT" description="The value"/> ! <Parameter name="MIN_SYMBOL_WIDTH" type="subnet_param" value="MIN_SYMBOL_WIDTH" description="The value"/> ! </Node> <NetInput name="NNET" node="node_SENTENCE_READER_1" terminal="NNET" object_type="" description="No description available"/> <NetInput name="DICT" node="node_SENTENCE_READER_1" terminal="DICT" object_type="" description="No description available"/> *************** *** 649,651 **** --- 574,578 ---- <Note x="0" y="0" visible="0" text="Created with FlowDesigner 0.8.1"/> </Network> + <Parameter name="MIN_SYMBOL_HEIGHT" type="int" value="10"/> + <Parameter name="MIN_SYMBOL_WIDTH" type="int" value="10"/> </Document> |
From: Dominic L. <ma...@us...> - 2005-06-08 14:19:19
|
Update of /cvsroot/robotflow/RobotFlow/Vision/src In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv28244 Modified Files: ExtractSentence_v2.cc StatIntensityAnalyser.cc Log Message: removed comments, added avg and std dev outputs Index: StatIntensityAnalyser.cc =================================================================== RCS file: /cvsroot/robotflow/RobotFlow/Vision/src/StatIntensityAnalyser.cc,v retrieving revision 1.4 retrieving revision 1.5 diff -C2 -d -r1.4 -r1.5 *** StatIntensityAnalyser.cc 29 Mar 2005 15:20:48 -0000 1.4 --- StatIntensityAnalyser.cc 8 Jun 2005 14:19:07 -0000 1.5 *************** *** 58,63 **** * @output_description Minimum intensity of analysed sample on a 0-1 scale * ! ! END*/ --- 58,69 ---- * @output_description Minimum intensity of analysed sample on a 0-1 scale * ! * @output_name AVG_INTENSITY ! * @output_type float ! * @output_description Average intensity ! * ! * @output_name STD_INTENSITY ! * @output_type float ! * @output_description Intensity standard deviation ! * END*/ *************** *** 69,73 **** int mMaxIntensityID; int mMinIntensityID; ! int mAverageIntensityID; // parameters --- 75,80 ---- int mMaxIntensityID; int mMinIntensityID; ! int m_avgIntensityID; ! int m_stdIntensityID; // parameters *************** *** 85,88 **** --- 92,97 ---- mMaxIntensityID = addOutput("MAX_INTENSITY"); mMinIntensityID = addOutput("MIN_INTENSITY"); + m_avgIntensityID = addOutput("AVG_INTENSITY"); + m_stdIntensityID = addOutput("STD_INTENSITY"); //parameters *************** *** 100,108 **** (*outputs[mMaxIntensityID].buffer)[count] = ObjectRef (Float::alloc( 0 )); (*outputs[mMinIntensityID].buffer)[count] = ObjectRef (Float::alloc( 0 )); ! return; } // Acquiring image Image &image = object_cast<Image>(dataInValue); unsigned short *pSourceImage = (unsigned short*) image.get_data(); --- 109,126 ---- (*outputs[mMaxIntensityID].buffer)[count] = ObjectRef (Float::alloc( 0 )); (*outputs[mMinIntensityID].buffer)[count] = ObjectRef (Float::alloc( 0 )); ! (*outputs[m_avgIntensityID].buffer)[count] = ObjectRef (Float::alloc( 0 )); ! (*outputs[m_stdIntensityID].buffer)[count] = ObjectRef (Float::alloc( 0 )); ! return; } // Acquiring image Image &image = object_cast<Image>(dataInValue); + + + if (image.get_pixelsize() != 2) { + throw new GeneralException("Pixelsize !=2",__FILE__,__LINE__); + } + + unsigned short *pSourceImage = (unsigned short*) image.get_data(); *************** *** 160,166 **** float minIntensity = *(min_element( intensityVector.begin(), intensityVector.end() )); (*outputs[mMaxIntensityID].buffer)[count] = ObjectRef (Float::alloc( maxIntensity )); (*outputs[mMinIntensityID].buffer)[count] = ObjectRef (Float::alloc( minIntensity )); ! }//calculate_behavior }; --- 178,201 ---- float minIntensity = *(min_element( intensityVector.begin(), intensityVector.end() )); + float average = 0; + float std_dev = 0; + + //average + for (int i = 0; i < intensityVector.size(); i++) { + average += intensityVector[i]; + } + average /= (float) intensityVector.size(); + + //std dev + for (int i = 0; i < intensityVector.size(); i++) { + std_dev += (intensityVector[i] - average) * (intensityVector[i] - average); + } + + std_dev = sqrt(std_dev); + (*outputs[mMaxIntensityID].buffer)[count] = ObjectRef (Float::alloc( maxIntensity )); (*outputs[mMinIntensityID].buffer)[count] = ObjectRef (Float::alloc( minIntensity )); ! (*outputs[m_avgIntensityID].buffer)[count] = ObjectRef (Float::alloc( average )); ! (*outputs[m_stdIntensityID].buffer)[count] = ObjectRef (Float::alloc( std_dev )); }//calculate_behavior }; Index: ExtractSentence_v2.cc =================================================================== RCS file: /cvsroot/robotflow/RobotFlow/Vision/src/ExtractSentence_v2.cc,v retrieving revision 1.3 retrieving revision 1.4 diff -C2 -d -r1.3 -r1.4 *** ExtractSentence_v2.cc 29 Mar 2005 15:20:47 -0000 1.3 --- ExtractSentence_v2.cc 8 Jun 2005 14:19:07 -0000 1.4 *************** *** 124,128 **** Vector<String> &dict = object_cast<Vector<String> >(getInput(mDictID,count)); ! cerr<<"dict size "<<dict.size()<<endl; //invalid input, returning empty string! --- 124,128 ---- Vector<String> &dict = object_cast<Vector<String> >(getInput(mDictID,count)); ! //cerr<<"dict size "<<dict.size()<<endl; //invalid input, returning empty string! |
From: Dominic L. <ma...@us...> - 2005-06-08 13:46:01
|
Update of /cvsroot/robotflow/RobotFlow/Vision/src In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv9322 Modified Files: CvBiModalTest.cc Log Message: added threshold output Index: CvBiModalTest.cc =================================================================== RCS file: /cvsroot/robotflow/RobotFlow/Vision/src/CvBiModalTest.cc,v retrieving revision 1.3 retrieving revision 1.4 diff -C2 -d -r1.3 -r1.4 *** CvBiModalTest.cc 17 May 2005 18:18:28 -0000 1.3 --- CvBiModalTest.cc 8 Jun 2005 13:45:47 -0000 1.4 *************** *** 51,56 **** --- 51,62 ---- * @output_description binarized image * + * @output_name THRESHOLD + * @output_type float + * @output_description Binarization threshold + * END*/ + #define TILE_SIZE 8 + class CvBiModalTest : public BufferedNode { *************** *** 62,69 **** --- 68,77 ---- IplImage *m_gray; IplImage *m_binarized; + IplImage *m_background; int m_imageInID; int m_imageOutID; int m_binImageOutID; + int m_binThresholdOutID; double m_minDist; double m_maxStd; *************** *** 77,81 **** m_imageOutID = addOutput("CONTOUR_IMG"); m_binImageOutID = addOutput("BINARIZED_IMG"); ! m_minDist = dereference_cast<float>(parameters.get("MIN_DIST")); m_maxStd = dereference_cast<float>(parameters.get("MAX_STD")); --- 85,89 ---- m_imageOutID = addOutput("CONTOUR_IMG"); m_binImageOutID = addOutput("BINARIZED_IMG"); ! m_binThresholdOutID = addOutput("THRESHOLD"); m_minDist = dereference_cast<float>(parameters.get("MIN_DIST")); m_maxStd = dereference_cast<float>(parameters.get("MAX_STD")); *************** *** 89,92 **** --- 97,101 ---- m_gray = cvCreateImage( cvSize(320,240), IPL_DEPTH_8U, 1 ); m_binarized = cvCreateImage( cvSize(320,240), IPL_DEPTH_8U, 1 ); + m_background = cvCreateImage( cvSize(320,240), IPL_DEPTH_8U, 1 ); } *************** *** 98,101 **** --- 107,111 ---- cvReleaseImage(&m_gray); cvReleaseImage(&m_binarized); + cvReleaseImage(&m_background); } *************** *** 108,111 **** --- 118,181 ---- + void processBackground(CvRect rect, IplImage *src, IplImage *dest, double std_max) { + + cvSetImageROI(src,rect); + cvSetImageROI(dest,rect); + + CvScalar mean0; + CvScalar stddev0; + + //Get Mean, std dev + cvAvgSdv(src,&mean0,&stddev0); + + double std2sum = sqrt(stddev0.val[0] * stddev0.val[0]+ + stddev0.val[1] * stddev0.val[1] + + stddev0.val[2] * stddev0.val[2]); + + unsigned char color = 0; + + if (stddev0.val[0] < std_max && + stddev0.val[1] < std_max && + stddev0.val[2] < std_max) { + color = 0xFF; + } + else { + color = 0x00; + } + + //draw on the image the background + for (int row = rect.y; row < rect.y + rect.height; row++) { + + char *basePtr = &dest->imageData[(row * dest->widthStep) + (rect.x * dest->nChannels)]; + char *imgPtr = basePtr; + + for (; imgPtr < basePtr + (dest->nChannels * rect.width); imgPtr += dest->nChannels) { + *imgPtr = color; + } + } + + } + + bool insideBackgroundContour(CvSeq *backgroundContour, CvRect rect) { + + for( ; backgroundContour != 0; backgroundContour = backgroundContour->h_next ) { + + CvRect backgroundRect = cvBoundingRect(backgroundContour,0); + + if ((rect.x >= backgroundRect.x) && + (rect.y >= backgroundRect.y) && + ((rect.x + rect.width) <= (backgroundRect.x + backgroundRect.width)) && + ((rect.y + rect.height) <= (backgroundRect.y + backgroundRect.height))) { + return true; + } + + } + + + return false; + + } + + bool processRect(CvRect rect, IplImage *src, IplImage *dest, double std_max, double min_dist, double *threshold = 0) { *************** *** 128,133 **** --- 198,212 ---- cvAvgSdv(src,&mean0,&stddev0); + /* + double test = sqrt(stddev0.val[0] * stddev0.val[0] + + stddev0.val[1] * stddev0.val[1] + + stddev0.val[2] * stddev0.val[2]); + + if (test > std_max) return true; + else return false; + */ + //cerr<<"CV made (mean0): "<<mean0.val[0]<<" "<<mean0.val[1]<<" "<<mean0.val[2]<<endl; //cerr<<"CV made (stddev0): "<<stddev0.val[0]<<" "<<stddev0.val[1]<<" "<<stddev0.val[2]<<endl; *************** *** 242,255 **** } ! ! ! //TEST CONDITIONS ! if (mean_dist > min_dist && ! (stddev1.val[0] < mean_dist / std_max) && (stddev1.val[1] < mean_dist / std_max) && (stddev1.val[2] < mean_dist / std_max) && (stddev2.val[0] < mean_dist / std_max) && (stddev2.val[1] < mean_dist / std_max) && ! (stddev2.val[2] < mean_dist / std_max)) { bimodal = true; } --- 321,338 ---- } ! /* ! (stddev1.val[0] < mean_dist / std_max) && (stddev1.val[1] < mean_dist / std_max) && (stddev1.val[2] < mean_dist / std_max) && (stddev2.val[0] < mean_dist / std_max) && (stddev2.val[1] < mean_dist / std_max) && ! (stddev2.val[2] < mean_dist / std_max) ! */ ! /* ! (std1sum < mean_dist / std_max) && ! (std2sum < mean_dist / std_max) ! */ ! //TEST CONDITIONS ! if (mean_dist > min_dist) { bimodal = true; } *************** *** 275,298 **** cvResetImageROI(m_gray); cvResetImageROI(m_binarized); cvCopy(m_src,m_dest,NULL); //clear binarized image ! memset(m_binarized->imageData,0xFF,320*240); struct timeb t1, t2; ftime(&t1); ! //TILE MODE ! /* ! for (int y = 0; y < 240; y+= 16) { ! for (int x = 0; x < 320; x+= 16) { CvRect rect; rect.x = x; rect.y = y; ! rect.width = 16; ! rect.height = 16; ! if (processRect(rect,m_src,m_dest,m_maxStd,m_minDist)) { CvScalar color = CV_RGB(255,0,0); CvPoint p1,p2; --- 358,393 ---- cvResetImageROI(m_gray); cvResetImageROI(m_binarized); + cvResetImageROI(m_background); cvCopy(m_src,m_dest,NULL); //clear binarized image ! memset(m_binarized->imageData,0x80,320*240); struct timeb t1, t2; ftime(&t1); ! cvCvtColor(m_src, m_gray, CV_BGR2GRAY); ! cvCvtColor(m_src, m_background, CV_BGR2GRAY); ! cvSmooth(m_gray,m_gray,CV_GAUSSIAN,3); ! cvSmooth(m_background,m_background,CV_GAUSSIAN,3); ! //TILE MODE FOR BACKGROUND ! CvMemStorage* backgroundStorage = cvCreateMemStorage(0); ! CvSeq* backgroundContour = 0; ! ! for (int y = 0; y < 240; y+= TILE_SIZE) { ! for (int x = 0; x < 320; x+= TILE_SIZE) { CvRect rect; rect.x = x; rect.y = y; ! rect.width = TILE_SIZE; ! rect.height = TILE_SIZE; ! ! processBackground(rect,m_src,m_background,m_maxStd); ! ! /* ! double threshold; ! if (processRect(rect,m_src,m_dest,m_maxStd,m_minDist,&threshold)) { CvScalar color = CV_RGB(255,0,0); CvPoint p1,p2; *************** *** 302,317 **** p2.y = rect.y + rect.height; cvRectangle(m_dest,p1,p2,color); } } } ! */ //EDGE MODE ! cvCvtColor(m_src, m_gray, CV_BGR2GRAY); ! cvSmooth(m_gray,m_gray,CV_GAUSSIAN,3); ! cvCanny(m_gray, m_edges, 128, 255, 3); CvMemStorage* storage = cvCreateMemStorage(0); CvSeq* contour = 0; cvFindContours(m_edges, storage, &contour, sizeof(CvContour), CV_RETR_CCOMP, CV_CHAIN_APPROX_SIMPLE ); --- 397,418 ---- p2.y = rect.y + rect.height; cvRectangle(m_dest,p1,p2,color); + //cerr<<"threshold "<<threshold<<endl; + binarize(rect,m_gray,m_binarized,threshold); } + */ } } ! cvResetImageROI(m_background); ! cvFindContours(m_background, backgroundStorage, ! &backgroundContour, sizeof(CvContour), CV_RETR_CCOMP, CV_CHAIN_APPROX_SIMPLE ); ! //EDGE MODE ! ! cvCanny(m_gray, m_edges, 55, 200, 3); CvMemStorage* storage = cvCreateMemStorage(0); CvSeq* contour = 0; + double threshold; cvFindContours(m_edges, storage, &contour, sizeof(CvContour), CV_RETR_CCOMP, CV_CHAIN_APPROX_SIMPLE ); *************** *** 320,324 **** CvRect rect = cvBoundingRect(contour,0); ! double threshold; if (processRect(rect,m_src,m_dest,m_maxStd,m_minDist, &threshold)) { --- 421,428 ---- CvRect rect = cvBoundingRect(contour,0); ! ! ! if (!insideBackgroundContour(backgroundContour,rect)) continue; ! if (processRect(rect,m_src,m_dest,m_maxStd,m_minDist, &threshold)) { *************** *** 335,339 **** cvRectangle(m_dest,p1,p2,color); ! //void binarize(CvRect rect, IplImage *src, IplImage *dest, double threshold) { binarize(rect,m_gray,m_binarized,threshold); } --- 439,443 ---- cvRectangle(m_dest,p1,p2,color); ! binarize(rect,m_gray,m_binarized,threshold); } *************** *** 344,347 **** --- 448,454 ---- + cvReleaseMemStorage(&backgroundStorage); + + ftime(&t2); double timeDiff=(t2.time-t1.time)+((t2.millitm-t1.millitm)/1000.0); *************** *** 357,363 **** ! memcpy(m_binImg->get_data(), m_binarized->imageData,m_binImg->get_size()); (*outputs[m_binImageOutID].buffer)[count] = ObjectRef(m_binImg); } catch (BaseException *e) { --- 464,473 ---- ! memcpy(m_binImg->get_data(), m_background->imageData,m_binImg->get_size()); (*outputs[m_binImageOutID].buffer)[count] = ObjectRef(m_binImg); + (*outputs[m_binThresholdOutID].buffer)[count] = ObjectRef(Float::alloc(threshold / 255.0)); + + } catch (BaseException *e) { |
From: Dominic L. <ma...@us...> - 2005-06-08 13:18:48
|
Update of /cvsroot/robotflow/RobotFlow/demo/SymbolRecog/dict In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv24814 Modified Files: fox.dict Log Message: fixed fox dict Index: fox.dict =================================================================== RCS file: /cvsroot/robotflow/RobotFlow/demo/SymbolRecog/dict/fox.dict,v retrieving revision 1.1 retrieving revision 1.2 diff -C2 -d -r1.1 -r1.2 *** fox.dict 16 Aug 2004 20:53:31 -0000 1.1 --- fox.dict 8 Jun 2005 13:18:39 -0000 1.2 *************** *** 1,3 **** ! <String A,BROWN,DOG,FOX,JUMPS,LAZY,OVER,QUICK,THE> ! --- 1,2 ---- ! <Vector<String> <String A> <String BROWN> <String DOG> <String FOX> <String JUMPS> <String LAZY> <String OVER> <String QUICK> <String THE> <String SERVICE> <String EXIT> <String PROJECT> <String URBAN> <String TU> <String ES> <String UN> <String ROBOT> > |
From: Dominic L. <ma...@us...> - 2005-06-06 20:59:25
|
Update of /cvsroot/robotflow/RobotFlow/Vision/n-files In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv9784 Modified Files: READv2.n Log Message: removing camera-dependant control, fixed links Index: READv2.n =================================================================== RCS file: /cvsroot/robotflow/RobotFlow/Vision/n-files/READv2.n,v retrieving revision 1.1 retrieving revision 1.2 diff -C2 -d -r1.1 -r1.2 *** READv2.n 6 Jun 2005 20:55:17 -0000 1.1 --- READv2.n 6 Jun 2005 20:59:14 -0000 1.2 *************** *** 9,12 **** --- 9,13 ---- <NetOutput name="SENTENCE" node="node_LOOP0_1" terminal="SENTENCE" object_type="" description="The object from THEN or ELSE depending on COND"/> <NetInput name="BLACK_WHITE_LOOKUP" node="node_LOOP0_1" terminal="BLACK_WHITE_LOOKUP" object_type="any" description="No description available"/> + <NetInput name="RGB15_BINARIZED" node="node_LOOP0_1" terminal="RGB15_BINARIZED" object_type="Image" description="The Image to extract colors from."/> <Note x="-3418" y="-384" visible="1" text="The MAIN network will load configuration from the files containing : - black and white color lookup - the neural network to use for recog - The dictionary to use * You can change this configuration by double clicking on the LoadFile blocks"/> </Network> *************** *** 26,32 **** <Network type="subnet" name="MAIN_PROCESS"> <Node name="node_PROCESS_SEGMENTS_1" type="PROCESS_SEGMENTS" x="184.000000" y="321.000000"/> ! <Node name="node_IF_ACTIVATED_DO_4" type="IF_ACTIVATED_DO" x="721.000000" y="305.000000"/> <Node name="node_NOP_3" type="NOP" x="221.000000" y="183.000000"/> ! <Node name="node_Components_1" type="Components" x="-335.000000" y="341.000000"> <Parameter name="XGAP" type="int" value="1" description=""/> <Parameter name="YGAP" type="int" value="1" description=""/> --- 27,33 ---- <Network type="subnet" name="MAIN_PROCESS"> <Node name="node_PROCESS_SEGMENTS_1" type="PROCESS_SEGMENTS" x="184.000000" y="321.000000"/> ! <Node name="node_IF_ACTIVATED_DO_4" type="IF_ACTIVATED_DO" x="696.000000" y="313.000000"/> <Node name="node_NOP_3" type="NOP" x="221.000000" y="183.000000"/> ! <Node name="node_Components_1" type="Components" x="-252.000000" y="335.000000"> <Parameter name="XGAP" type="int" value="1" description=""/> <Parameter name="YGAP" type="int" value="1" description=""/> *************** *** 36,39 **** --- 37,41 ---- <Link from="node_NOP_3" output="OUTPUT" to="node_IF_ACTIVATED_DO_4" input="SIGN_TRACKING_ACTIVATED"/> <Link from="node_Components_1" output="BLOBS" to="node_PROCESS_SEGMENTS_1" input="BLACK_WHITE_COMPONENTS"/> + <Link from="node_PROCESS_SEGMENTS_1" output="SENTENCE" to="node_IF_ACTIVATED_DO_4" input="DO"/> <NetInput name="NNET" node="node_PROCESS_SEGMENTS_1" terminal="NNET" object_type="" description="No description available"/> <NetInput name="DICT" node="node_PROCESS_SEGMENTS_1" terminal="DICT" object_type="" description="No description available"/> *************** *** 274,278 **** <Network type="subnet" name="SENTENCE_READER"> <Node name="node_NewAccumulator_1" type="NewAccumulator" x="-243.000000" y="342.000000"/> ! <Node name="node_SymbolCounter_1" type="SymbolCounter" x="-289.000000" y="386.000000"> <Parameter name="CHANNEL" type="int" value="1" description=""/> <Parameter name="DEPTH" type="int" value="2" description=""/> --- 276,280 ---- <Network type="subnet" name="SENTENCE_READER"> <Node name="node_NewAccumulator_1" type="NewAccumulator" x="-243.000000" y="342.000000"/> ! <Node name="node_SymbolCounter_1" type="SymbolCounter" x="-391.000000" y="327.000000"> <Parameter name="CHANNEL" type="int" value="1" description=""/> <Parameter name="DEPTH" type="int" value="2" description=""/> |
From: Dominic L. <ma...@us...> - 2005-06-06 20:55:39
|
Update of /cvsroot/robotflow/RobotFlow/Vision/n-files In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv8106 Modified Files: Makefile.am READ.n Added Files: READv2.n Log Message: removing camera-dependant control --- NEW FILE: READv2.n --- #!/usr/bin/env batchflow <?xml version="1.0"?> <Document category="RobotFlow:LABORIUS"> <Network type="subnet" name="MAIN"> <Node name="node_LOOP0_1" type="MAIN_PROCESS" x="-3397.000000" y="-618.000000"/> <NetInput name="NNET" node="node_LOOP0_1" terminal="NNET" object_type="" description="No description available"/> <NetInput name="DICT" node="node_LOOP0_1" terminal="DICT" object_type="" description="No description available"/> <NetInput name="SIGN_TRACKING_ACTIVATED" node="node_LOOP0_1" terminal="SIGN_TRACKING_ACTIVATED" object_type="any" description="The input"/> <NetOutput name="SENTENCE" node="node_LOOP0_1" terminal="SENTENCE" object_type="" description="The object from THEN or ELSE depending on COND"/> <NetInput name="BLACK_WHITE_LOOKUP" node="node_LOOP0_1" terminal="BLACK_WHITE_LOOKUP" object_type="any" description="No description available"/> <Note x="-3418" y="-384" visible="1" text="The MAIN network will load configuration from the files containing : - black and white color lookup - the neural network to use for recog - The dictionary to use * You can change this configuration by double clicking on the LoadFile blocks"/> </Network> <Network type="subnet" name="COMPONENTS_EXTRACT"> <Node name="node_Components_1" type="Components" x="467.000000" y="113.000000"> <Parameter name="XGAP" type="int" value="2" description=""/> <Parameter name="YGAP" type="int" value="2" description=""/> <Parameter name="NUM_COLOR" type="int" value="4" description=""/> <Parameter name="MIN_AREA" type="int" value="16" description=""/> </Node> <Node name="node_NOP_1" type="NOP" x="699.000000" y="113.000000"/> <Link from="node_Components_1" output="BLOBS" to="node_NOP_1" input="INPUT"/> <NetOutput name="COMPONENTS" node="node_NOP_1" terminal="OUTPUT" object_type="any" description="The output = The input"/> <NetInput name="IMAGE" node="node_Components_1" terminal="IMAGE" object_type="Image" description="The Image to extract colors from."/> <NetInput name="LOOKUP" node="node_Components_1" terminal="LOOKUP" object_type="ColorLookup" description="The color lookup to use"/> </Network> <Network type="subnet" name="MAIN_PROCESS"> <Node name="node_PROCESS_SEGMENTS_1" type="PROCESS_SEGMENTS" x="184.000000" y="321.000000"/> <Node name="node_IF_ACTIVATED_DO_4" type="IF_ACTIVATED_DO" x="721.000000" y="305.000000"/> <Node name="node_NOP_3" type="NOP" x="221.000000" y="183.000000"/> <Node name="node_Components_1" type="Components" x="-335.000000" y="341.000000"> <Parameter name="XGAP" type="int" value="1" description=""/> <Parameter name="YGAP" type="int" value="1" description=""/> <Parameter name="NUM_COLOR" type="int" value="2" description=""/> <Parameter name="MIN_AREA" type="int" value="16" description=""/> </Node> <Link from="node_NOP_3" output="OUTPUT" to="node_IF_ACTIVATED_DO_4" input="SIGN_TRACKING_ACTIVATED"/> <Link from="node_Components_1" output="BLOBS" to="node_PROCESS_SEGMENTS_1" input="BLACK_WHITE_COMPONENTS"/> <NetInput name="NNET" node="node_PROCESS_SEGMENTS_1" terminal="NNET" object_type="" description="No description available"/> <NetInput name="DICT" node="node_PROCESS_SEGMENTS_1" terminal="DICT" object_type="" description="No description available"/> <NetOutput name="SENTENCE" node="node_IF_ACTIVATED_DO_4" terminal="OUTPUT" object_type="" description="The object from THEN or ELSE depending on COND"/> <NetInput name="SIGN_TRACKING_ACTIVATED" node="node_NOP_3" terminal="INPUT" object_type="any" description="The input"/> <NetInput name="BLACK_WHITE_LOOKUP" node="node_Components_1" terminal="LOOKUP" object_type="ColorLookup" description="The color lookup to use"/> <NetInput name="RGB15_BINARIZED" node="node_Components_1" terminal="IMAGE" object_type="Image" description="The Image to extract colors from."/> <Note x="-1480" y="269" visible="1" text="Sony SNC-RZ30 Network Camera controller."/> <Note x="-1013" y="42" visible="1" text="Image must be converted to RGB15 from the camera before training."/> <Note x="-323" y="44" visible="1" text="From the components of the image, we try to extract a textual messages that are color coded (foreground color = text, background color = color of the sheet of paper)"/> <Note x="159" y="551" visible="1" text="Image segments are scaled and process by the neural network. We will look in the dictionary for words that match the best."/> <Note x="291" y="47" visible="1" text="SKIP_N is useful because the camera is too slow, if we don't use SKIP_N, the camera will be overloaded with commands, which creates a terrific lag."/> </Network> <Network type="subnet" name="PAN_TILT_CTRL"> <Node name="node_GenericPID_1" type="GenericPID" x="-605.000000" y="-237.000000"> <Parameter name="P_GAIN" type="float" value="0.5" description="Proportionnal gain."/> <Parameter name="I_GAIN" type="float" value="0.0" description="Integral gain."/> <Parameter name="D_GAIN" type="float" value="0.0" description="Derivative gain."/> <Parameter name="I_MAX" type="float" value="100" description="The maximum Integral value."/> </Node> <Node name="node_Constant_1" type="Constant" x="-800.000000" y="-229.000000"> <Parameter name="VALUE" type="bool" value="false" description="The value"/> </Node> <Node name="node_GenericPID_2" type="GenericPID" x="-608.000000" y="-154.000000"> <Parameter name="P_GAIN" type="float" value="0.5" description="Proportionnal gain."/> <Parameter name="I_GAIN" type="float" value="0.0" description="Integral gain."/> <Parameter name="D_GAIN" type="float" value="0.0" description="Derivative gain."/> <Parameter name="I_MAX" type="float" value="100" description="The maximum Integral value."/> </Node> <Node name="node_Constant_2" type="Constant" x="-796.000000" y="-146.000000"> <Parameter name="VALUE" type="bool" value="false" description="The value"/> </Node> <Node name="node_NOP_1" type="NOP" x="-1393.000000" y="-271.000000"/> <Node name="node_NOP_2" type="NOP" x="-1393.000000" y="-183.000000"/> <Node name="node_NOP_3" type="NOP" x="-1393.000000" y="-153.000000"/> <Node name="node_IF_1" type="IF" x="-294.000000" y="-253.000000"> <Parameter name="PULL_ANYWAY" type="bool" value="" description="If true, the IF statement pulls also on the branch not taken"/> </Node> <Node name="node_isNil_1" type="isNil" x="-401.000000" y="-270.000000"/> <Node name="node_NilObject_1" type="NilObject" x="-507.000000" y="-254.000000"/> <Node name="node_IF_2" type="IF" x="-296.000000" y="-168.000000"> <Parameter name="PULL_ANYWAY" type="bool" value="" description="If true, the IF statement pulls also on the branch not taken"/> </Node> <Node name="node_isNil_2" type="isNil" x="-406.000000" y="-184.000000"/> <Node name="node_NilObject_2" type="NilObject" x="-485.000000" y="-168.000000"/> <Node name="node_TEST_SCALING_1" type="AUTO_SCALE" x="-921.000000" y="-245.000000"/> <Node name="node_TEST_SCALING_2" type="AUTO_SCALE" x="-916.000000" y="-161.000000"/> <Node name="node_NOP_4" type="NOP" x="-1392.000000" y="-85.000000"/> <Node name="node_NOP_5" type="NOP" x="-1394.000000" y="4.000000"/> <Node name="node_TEST_ZOOMING_1" type="AUTO_ZOOM" x="-936.000000" y="-61.000000"/> <Node name="node_Add_1" type="Add" x="-609.000000" y="-54.000000"/> <Node name="node_IF_3" type="IF" x="-294.000000" y="-70.000000"> <Parameter name="PULL_ANYWAY" type="bool" value="" description="If true, the IF statement pulls also on the branch not taken"/> </Node> <Node name="node_isNil_3" type="isNil" x="-418.000000" y="-84.000000"/> <Node name="node_Constant_3" type="Constant" x="-497.000000" y="-69.000000"> <Parameter name="VALUE" type="int" value="0" description="The value"/> </Node> <Link from="node_Constant_1" output="VALUE" to="node_GenericPID_1" input="RESET"/> <Link from="node_Constant_2" output="VALUE" to="node_GenericPID_2" input="RESET"/> <Link from="node_isNil_1" output="OUTPUT" to="node_IF_1" input="COND"/> <Link from="node_NOP_1" output="OUTPUT" to="node_isNil_1" input="INPUT"/> <Link from="node_NilObject_1" output="VALUE" to="node_IF_1" input="THEN"/> <Link from="node_isNil_2" output="OUTPUT" to="node_IF_2" input="COND"/> <Link from="node_NilObject_2" output="VALUE" to="node_IF_2" input="THEN"/> <Link from="node_NOP_2" output="OUTPUT" to="node_isNil_2" input="INPUT"/> <Link from="node_NOP_1" output="OUTPUT" to="node_TEST_SCALING_1" input="DELTA"/> <Link from="node_NOP_3" output="OUTPUT" to="node_TEST_SCALING_1" input="ZOOM_FACTOR"/> <Link from="node_NOP_2" output="OUTPUT" to="node_TEST_SCALING_2" input="DELTA"/> <Link from="node_NOP_3" output="OUTPUT" to="node_TEST_SCALING_2" input="ZOOM_FACTOR"/> <Link from="node_TEST_SCALING_2" output="SCALE" to="node_GenericPID_2" input="EPSILON"/> <Link from="node_TEST_SCALING_1" output="SCALE" to="node_GenericPID_1" input="EPSILON"/> <Link from="node_GenericPID_2" output="OUTPUT" to="node_IF_2" input="ELSE"/> <Link from="node_GenericPID_1" output="OUTPUT" to="node_IF_1" input="ELSE"/> <Link from="node_NOP_1" output="OUTPUT" to="node_TEST_ZOOMING_1" input="DELTA_X"/> <Link from="node_NOP_2" output="OUTPUT" to="node_TEST_ZOOMING_1" input="DELTA_Y"/> <Link from="node_NOP_4" output="OUTPUT" to="node_TEST_ZOOMING_1" input="BOUNDARY"/> <Link from="node_TEST_ZOOMING_1" output="REL_ZOOM" to="node_Add_1" input="INPUT1"/> <Link from="node_NOP_5" output="OUTPUT" to="node_Add_1" input="INPUT2"/> <Link from="node_isNil_3" output="OUTPUT" to="node_IF_3" input="COND"/> <Link from="node_NOP_4" output="OUTPUT" to="node_isNil_3" input="INPUT"/> <Link from="node_Add_1" output="OUTPUT" to="node_IF_3" input="ELSE"/> <Link from="node_Constant_3" output="VALUE" to="node_IF_3" input="THEN"/> <NetInput name="DELTA_X" node="node_NOP_1" terminal="INPUT" object_type="any" description="The input"/> <NetInput name="DELTA_Y" node="node_NOP_2" terminal="INPUT" object_type="any" description="The input"/> <NetInput name="ZOOM_FACTOR" node="node_NOP_3" terminal="INPUT" object_type="any" description="The input"/> <NetOutput name="REL_PAN" node="node_IF_1" terminal="OUTPUT" object_type="any" description="The object from THEN or ELSE depending on COND"/> <NetOutput name="REL_TILT" node="node_IF_2" terminal="OUTPUT" object_type="any" description="The object from THEN or ELSE depending on COND"/> <NetInput name="BOUNDARY" node="node_NOP_4" terminal="INPUT" object_type="any" description="The input"/> <NetInput name="CURRENT_ZOOM" node="node_NOP_5" terminal="INPUT" object_type="any" description="The input"/> <NetOutput name="ABS_ZOOM" node="node_IF_3" terminal="OUTPUT" object_type="any" description="The object from THEN or ELSE depending on COND"/> <Note x="0" y="0" visible="0" text="Created with FlowDesigner 0.8.1"/> <Note x="-973" y="140" visible="1" text="According to DELTA_X and DELTA_Y and the BOUNDARY inputs, we will output incremental pan-tilt-zoom commands to center the color blobs in the image and get the maximum resolution possible."/> </Network> <Network type="subnet" name="SKIP_N"> <Node name="node_IF_1" type="IF" x="890.000000" y="-24.000000"> <Parameter name="PULL_ANYWAY" type="bool" value="false" description="If true, the IF statement pulls also on the branch not taken"/> </Node> <Node name="node_Equal_1" type="Equal" x="768.000000" y="-53.000000"/> <Node name="node_Constant_1" type="Constant" x="600.000000" y="-36.000000"> <Parameter name="VALUE" type="int" value="0" description="The value"/> </Node> <Node name="node_IterCount_1" type="IterCount" x="292.000000" y="-77.000000"/> <Node name="node_Modulo_1" type="Modulo" x="609.000000" y="-69.000000"/> <Node name="node_Constant_2" type="Constant" x="397.000000" y="-62.000000"> <Parameter name="VALUE" type="subnet_param" value="SKIP_N" description="The value"/> </Node> <Node name="node_NilObject_1" type="NilObject" x="732.000000" y="55.000000"/> <Link from="node_Equal_1" output="OUTPUT" to="node_IF_1" input="COND"/> <Link from="node_NilObject_1" output="VALUE" to="node_IF_1" input="ELSE"/> <Link from="node_Modulo_1" output="REMAINDER" to="node_Equal_1" input="INPUT1"/> <Link from="node_Constant_1" output="VALUE" to="node_Equal_1" input="INPUT2"/> <Link from="node_IterCount_1" output="OUTPUT" to="node_Modulo_1" input="DIVIDEND"/> <Link from="node_Constant_2" output="VALUE" to="node_Modulo_1" input="DIVISOR"/> <NetInput name="INPUT" node="node_IF_1" terminal="THEN" object_type="any" description="What to do if the condition is true"/> <NetOutput name="OUTPUT" node="node_IF_1" terminal="OUTPUT" object_type="any" description="The object from THEN or ELSE depending on COND"/> <Note x="0" y="0" visible="0" text="Created with FlowDesigner 0.8.1"/> <Note x="474" y="214" visible="1" text="Useful to skip N iteration (returning nilObject except when modulo = 0)"/> </Network> <Network type="subnet" name="AUTO_SCALE"> <Node name="node_NOP_1" type="NOP" x="-257.000000" y="-156.000000"/> <Node name="node_NOP_2" type="NOP" x="-746.000000" y="-100.000000"/> <Node name="node_Power_1" type="Power" x="-283.000000" y="-95.000000"/> <Node name="node_Mul_1" type="Mul" x="-432.000000" y="-86.000000"/> <Node name="node_Constant_2" type="Constant" x="-597.000000" y="16.000000"> <Parameter name="VALUE" type="float" value="-1" description="The value"/> </Node> <Node name="node_NOP_3" type="NOP" x="92.000000" y="-150.000000"/> <Node name="node_Constant_3" type="Constant" x="-446.000000" y="-136.000000"> <Parameter name="VALUE" type="float" value="1.1" description="The value"/> </Node> <Node name="node_Mul_2" type="Mul" x="-66.000000" y="-150.000000"/> <Node name="node_Sub_1" type="Sub" x="-571.000000" y="-92.000000"/> <Node name="node_Constant_1" type="Constant" x="-730.000000" y="-60.000000"> <Parameter name="VALUE" type="float" value="1" description="The value"/> </Node> <Link from="node_Mul_1" output="OUTPUT" to="node_Power_1" input="EXP"/> <Link from="node_Constant_2" output="VALUE" to="node_Mul_1" input="INPUT2"/> <Link from="node_Constant_3" output="VALUE" to="node_Power_1" input="BASE"/> <Link from="node_NOP_1" output="OUTPUT" to="node_Mul_2" input="INPUT1"/> <Link from="node_Power_1" output="OUTPUT" to="node_Mul_2" input="INPUT2"/> <Link from="node_Sub_1" output="OUTPUT" to="node_Mul_1" input="INPUT1"/> <Link from="node_NOP_2" output="OUTPUT" to="node_Sub_1" input="INPUT1"/> <Link from="node_Constant_1" output="VALUE" to="node_Sub_1" input="INPUT2"/> <Link from="node_Mul_2" output="OUTPUT" to="node_NOP_3" input="INPUT"/> <NetInput name="DELTA" node="node_NOP_1" terminal="INPUT" object_type="any" description="The input"/> <NetInput name="ZOOM_FACTOR" node="node_NOP_2" terminal="INPUT" object_type="any" description="The input"/> <NetOutput name="SCALE" node="node_NOP_3" terminal="OUTPUT" object_type="any" description="The output = The input"/> <Note x="0" y="0" visible="0" text="Created with FlowDesigner 0.8.1"/> <Note x="-293" y="126" visible="1" text="Will scale DELTA value according to the ZOOM factor."/> </Network> <Network type="subnet" name="AUTO_ZOOM"> <Node name="node_IF_1" type="IF" x="-378.000000" y="-90.000000"> <Parameter name="PULL_ANYWAY" type="bool" value="" description="If true, the IF statement pulls also on the branch not taken"/> </Node> <Node name="node_AND_1" type="AND" x="-780.000000" y="-97.000000"> <Parameter name="PULL_ANYWAY" type="bool" value="false" description="Pull on INPUT2 even if INPUT1 is false"/> </Node> <Node name="node_Smaller_1" type="Smaller" x="-965.000000" y="-103.000000"/> <Node name="node_NOP_1" type="NOP" x="-1404.000000" y="-109.000000"/> <Node name="node_NOP_2" type="NOP" x="-1405.000000" y="-49.000000"/> <Node name="node_NOP_3" type="NOP" x="-1300.000000" y="-198.000000"/> <Node name="node_Constant_2" type="Constant" x="-1183.000000" y="-96.000000"> <Parameter name="VALUE" type="float" value="10" description="The value"/> </Node> <Node name="node_Smaller_2" type="Smaller" x="-966.000000" y="-42.000000"/> <Node name="node_AND_2" type="AND" x="-614.000000" y="-105.000000"> <Parameter name="PULL_ANYWAY" type="bool" value="false" description="Pull on INPUT2 even if INPUT1 is false"/> </Node> <Node name="node_Greater_1" type="Greater" x="-797.000000" y="-192.000000"/> <Node name="node_Constant_3" type="Constant" x="-1058.000000" y="-162.000000"> <Parameter name="VALUE" type="int" value="15" description="The value"/> </Node> <Node name="node_Constant_5" type="Constant" x="-809.000000" y="86.000000"> <Parameter name="VALUE" type="int" value="-50" description="The value"/> </Node> <Node name="node_IF_2" type="IF" x="-563.000000" y="72.000000"> <Parameter name="PULL_ANYWAY" type="bool" value="" description="If true, the IF statement pulls also on the branch not taken"/> </Node> <Node name="node_Constant_6" type="Constant" x="-709.000000" y="72.000000"> <Parameter name="VALUE" type="int" value="0" description="The value"/> </Node> <Node name="node_ABS_1" type="ABS" x="-1284.000000" y="-109.000000"/> <Node name="node_ABS_2" type="ABS" x="-1282.000000" y="-49.000000"/> <Node name="node_Constant_1" type="Constant" x="-535.000000" y="-91.000000"> <Parameter name="VALUE" type="int" value="50" description="The value"/> </Node> <Link from="node_Smaller_1" output="OUTPUT" to="node_AND_1" input="INPUT1"/> <Link from="node_Constant_2" output="VALUE" to="node_Smaller_1" input="INPUT2"/> <Link from="node_Smaller_2" output="OUTPUT" to="node_AND_1" input="INPUT2"/> <Link from="node_AND_2" output="OUTPUT" to="node_IF_1" input="COND"/> <Link from="node_Greater_1" output="OUTPUT" to="node_AND_2" input="INPUT1"/> <Link from="node_NOP_3" output="OUTPUT" to="node_Greater_1" input="INPUT1"/> <Link from="node_Constant_3" output="VALUE" to="node_Greater_1" input="INPUT2"/> <Link from="node_IF_2" output="OUTPUT" to="node_IF_1" input="ELSE"/> <Link from="node_Greater_1" output="OUTPUT" to="node_IF_2" input="COND"/> <Link from="node_Constant_6" output="VALUE" to="node_IF_2" input="THEN"/> <Link from="node_NOP_1" output="OUTPUT" to="node_ABS_1" input="INPUT"/> <Link from="node_ABS_1" output="OUTPUT" to="node_Smaller_1" input="INPUT1"/> <Link from="node_NOP_2" output="OUTPUT" to="node_ABS_2" input="INPUT"/> <Link from="node_Constant_2" output="VALUE" to="node_Smaller_2" input="INPUT2">-1149 -96 -1149 -36 -1049.5 -34.5 </Link> <Link from="node_ABS_2" output="OUTPUT" to="node_Smaller_2" input="INPUT1"/> <Link from="node_AND_1" output="OUTPUT" to="node_AND_2" input="INPUT2"/> <Link from="node_Constant_5" output="VALUE" to="node_IF_2" input="ELSE"/> <Link from="node_Constant_1" output="VALUE" to="node_IF_1" input="THEN"/> <NetInput name="DELTA_X" node="node_NOP_1" terminal="INPUT" object_type="any" description="The input"/> <NetInput name="DELTA_Y" node="node_NOP_2" terminal="INPUT" object_type="any" description="The input"/> <NetInput name="BOUNDARY" node="node_NOP_3" terminal="INPUT" object_type="any" description="The input"/> <NetOutput name="REL_ZOOM" node="node_IF_1" terminal="OUTPUT" object_type="any" description="The object from THEN or ELSE depending on COND"/> <Note x="0" y="0" visible="0" text="Created with FlowDesigner 0.8.1"/> <Note x="-1080" y="162" visible="1" text="The idea is to get the color component centered enough before sending zoom increments. If we are too close to the image border, we send decrement the zoom value."/> </Network> <Network type="subnet" name="ABS"> <Node name="node_IF_1" type="IF" x="-241.000000" y="-18.000000"> <Parameter name="PULL_ANYWAY" type="bool" value="" description="If true, the IF statement pulls also on the branch not taken"/> </Node> <Node name="node_Mul_1" type="Mul" x="-397.000000" y="-19.000000"/> <Node name="node_Constant_1" type="Constant" x="-603.000000" y="-11.000000"> <Parameter name="VALUE" type="float" value="-1" description="The value"/> </Node> <Node name="node_NOP_1" type="NOP" x="-911.000000" y="-109.000000"/> <Node name="node_Smaller_1" type="Smaller" x="-389.000000" y="-100.000000"/> <Node name="node_Constant_2" type="Constant" x="-604.000000" y="-91.000000"> <Parameter name="VALUE" type="float" value="0" description="The value"/> </Node> <Link from="node_Constant_1" output="VALUE" to="node_Mul_1" input="INPUT2"/> <Link from="node_Smaller_1" output="OUTPUT" to="node_IF_1" input="COND"/> <Link from="node_NOP_1" output="OUTPUT" to="node_Smaller_1" input="INPUT1"/> <Link from="node_Constant_2" output="VALUE" to="node_Smaller_1" input="INPUT2"/> <Link from="node_Mul_1" output="OUTPUT" to="node_IF_1" input="THEN"/> <Link from="node_NOP_1" output="OUTPUT" to="node_Mul_1" input="INPUT1"/> <Link from="node_NOP_1" output="OUTPUT" to="node_IF_1" input="ELSE">-892 -109 -826 51 -466 51 -296 -3 </Link> <NetInput name="INPUT" node="node_NOP_1" terminal="INPUT" object_type="any" description="The input"/> <NetOutput name="OUTPUT" node="node_IF_1" terminal="OUTPUT" object_type="any" description="The object from THEN or ELSE depending on COND"/> <Note x="0" y="0" visible="0" text="Created with FlowDesigner 0.8.1"/> <Note x="-580" y="233" visible="1" text="Returns the absolute value (always positive)"/> </Network> <Network type="subnet" name="SENTENCE_READER"> <Node name="node_NewAccumulator_1" type="NewAccumulator" x="-243.000000" y="342.000000"/> <Node name="node_SymbolCounter_1" type="SymbolCounter" x="-289.000000" y="386.000000"> <Parameter name="CHANNEL" type="int" value="1" description=""/> <Parameter name="DEPTH" type="int" value="2" description=""/> </Node> <Node name="node_STRING_EXTRACTOR_1" type="STRING_EXTRACTOR" x="86.000000" y="321.000000"> <Parameter name="DOWHILE" type="bool" value="" description="No description available"/> </Node> <Node name="node_ExtractSentence_v2_1" type="ExtractSentence_v2" x="441.000000" y="329.000000"> <Parameter name="VERTICAL_TOLERANCE" type="float" value="0.5" description=""/> <Parameter name="HORIZONTAL_TOLERANCE" type="float" value="0.5" description=""/> </Node> <Node name="node_NOP_2" type="NOP" x="-524.000000" y="299.000000"/> <Link from="node_SymbolCounter_1" output="COUNT" to="node_STRING_EXTRACTOR_1" input="SYMBOL_COUNT"/> <Link from="node_NOP_2" output="OUTPUT" to="node_STRING_EXTRACTOR_1" input="DATA"/> <Link from="node_NOP_2" output="OUTPUT" to="node_SymbolCounter_1" input="DATA"/> <Link from="node_NewAccumulator_1" output="OUTPUT" to="node_STRING_EXTRACTOR_1" input="ACCUM"/> <Link from="node_STRING_EXTRACTOR_1" output="ACCUM_OUT" to="node_ExtractSentence_v2_1" input="SYMBOL_LIST"/> <NetInput name="NNET" node="node_STRING_EXTRACTOR_1" terminal="NNET" object_type="any" description="No description available"/> <NetInput name="DICT" node="node_ExtractSentence_v2_1" terminal="DICT" object_type="any" description="No description available"/> <NetOutput name="SENTENCE" node="node_ExtractSentence_v2_1" terminal="SENTENCE" object_type="string" description="A string corresponding to the sentence read"/> <NetOutput name="ORIGINAL_TEXT" node="node_ExtractSentence_v2_1" terminal="ORIGINAL_TEXT" object_type="string" description="Original Text one letter at a time."/> <NetInput name="BLACK_WHITE_COMPONENTS" node="node_NOP_2" terminal="INPUT" object_type="any" description="The input"/> <Note x="-440" y="254" visible="1" text="Black and white components are extracted from the image."/> <Note x="-1115" y="154" visible="1" text="This is an adaptative threshold for the binarization."/> <Note x="27" y="527" visible="1" text="All characters are then processed by the neural network."/> <Note x="483" y="512" visible="1" text="We use a dictionary to fit the neural network output for each characters used as probability of occurence of a letter in a word."/> </Network> <Network type="subnet" name="EXTRA_INFO"> <Node name="node_Div_1" type="Div" x="-212.000000" y="133.000000"/> <Node name="node_Concatenate_1" type="Concatenate" x="255.000000" y="143.000000"/> <Node name="node_Div_2" type="Div" x="-211.000000" y="172.000000"/> <Node name="node_Concatenate_2" type="Concatenate" x="49.000000" y="169.000000"/> <Node name="node_Div_3" type="Div" x="-212.000000" y="214.000000"/> <Node name="node_Float2Vect_1" type="Float2Vect" x="-126.000000" y="134.000000"> <Parameter name="LOOKAHEAD" type="int" value="" description=""/> <Parameter name="LOOKBACK" type="int" value="" description=""/> </Node> <Node name="node_Float2Vect_2" type="Float2Vect" x="-126.000000" y="173.000000"> <Parameter name="LOOKAHEAD" type="int" value="" description=""/> <Parameter name="LOOKBACK" type="int" value="" description=""/> </Node> <Node name="node_Float2Vect_3" type="Float2Vect" x="-127.000000" y="213.000000"> <Parameter name="LOOKAHEAD" type="int" value="" description=""/> <Parameter name="LOOKBACK" type="int" value="" description=""/> </Node> <Node name="node_NOP_1" type="NOP" x="-741.000000" y="154.000000"/> <Node name="node_RectAnalyser_1" type="RectAnalyser" x="-664.000000" y="155.000000"/> <Node name="node_ToFloat_1" type="ToFloat" x="-365.000000" y="104.000000"/> <Node name="node_ToFloat_2" type="ToFloat" x="-365.000000" y="139.000000"/> <Node name="node_ToFloat_3" type="ToFloat" x="-366.000000" y="170.000000"/> <Node name="node_ToFloat_4" type="ToFloat" x="-364.000000" y="201.000000"/> <Link from="node_Float2Vect_1" output="OUTPUT" to="node_Concatenate_1" input="INPUT1"/> <Link from="node_Concatenate_2" output="OUTPUT" to="node_Concatenate_1" input="INPUT2">95 169 97 151 155 150.5 </Link> <Link from="node_Float2Vect_2" output="OUTPUT" to="node_Concatenate_2" input="INPUT1"/> <Link from="node_Float2Vect_3" output="OUTPUT" to="node_Concatenate_2" input="INPUT2"/> <Link from="node_Div_1" output="OUTPUT" to="node_Float2Vect_1" input="INPUT"/> <Link from="node_Div_2" output="OUTPUT" to="node_Float2Vect_2" input="INPUT"/> <Link from="node_Div_3" output="OUTPUT" to="node_Float2Vect_3" input="INPUT"/> <Link from="node_NOP_1" output="OUTPUT" to="node_RectAnalyser_1" input="CRECT"/> <Link from="node_ToFloat_1" output="OUTPUT" to="node_Div_1" input="NUM"/> <Link from="node_RectAnalyser_1" output="HEIGHT" to="node_ToFloat_1" input="INPUT"/> <Link from="node_ToFloat_1" output="OUTPUT" to="node_Div_3" input="DEN"/> <Link from="node_RectAnalyser_1" output="WIDTH" to="node_ToFloat_2" input="INPUT"/> <Link from="node_ToFloat_2" output="OUTPUT" to="node_Div_1" input="DEN"/> <Link from="node_ToFloat_2" output="OUTPUT" to="node_Div_2" input="DEN"/> <Link from="node_RectAnalyser_1" output="X_RELATIVE_CENTER_OF_GRAVITY" to="node_ToFloat_3" input="INPUT"/> <Link from="node_ToFloat_3" output="OUTPUT" to="node_Div_2" input="NUM"/> <Link from="node_RectAnalyser_1" output="Y_RELATIVE_CENTER_OF_GRAVITY" to="node_ToFloat_4" input="INPUT"/> <Link from="node_ToFloat_4" output="OUTPUT" to="node_Div_3" input="NUM"/> <NetInput name="INPUT" node="node_NOP_1" terminal="INPUT" object_type="any" description="No description available"/> <NetOutput name="OUTPUT" node="node_Concatenate_1" terminal="OUTPUT" object_type="any" description="No description available"/> <Note x="-285" y="457" visible="1" text="Extract WIDTH / HEIGHT X_CENTER_OF_GRAVITY / WIDTH and Y_CENTER_OF_GRAVITY / HEIGHT to be used as additional inputs for the neural network."/> </Network> <Network type="subnet" name="SYMBOL_EXTRACTOR"> <Node name="node_NNetExec_1" type="NNetExec" x="-447.000000" y="141.000000"> <Parameter name="OUTPUTLENGTH" type="int" value="36" description=""/> </Node> <Node name="node_SymbolExtractor_1" type="SymbolExtractor" x="-990.000000" y="115.000000"> <Parameter name="CHANNEL" type="int" value="1" description=""/> </Node> <Node name="node_Scale_1" type="Scale" x="-787.000000" y="115.000000"> <Parameter name="WIDTH" type="int" value="13" description=""/> <Parameter name="HEIGHT" type="int" value="13" description=""/> <Parameter name="DEBUG" type="int" value="0" description=""/> </Node> <Node name="node_DCVector_1" type="DCVector" x="-49.000000" y="220.000000"> <Parameter name="LENGTH" type="int" value="1" description=""/> <Parameter name="VALUE" type="float" value="-2" description=""/> </Node> <Node name="node_VectorSetIndex_1" type="VectorSetIndex" x="162.000000" y="235.000000"/> <Node name="node_Constant_1" type="Constant" x="-112.000000" y="251.000000"> <Parameter name="VALUE" type="int" value="0" description=""/> </Node> <Node name="node_RectBoundaries_1" type="RectBoundaries" x="-450.000000" y="22.000000"/> <Node name="node_Concatenate_1" type="Concatenate" x="406.000000" y="127.000000"/> <Node name="node_EXTRA_INFO_1" type="EXTRA_INFO" x="-804.000000" y="140.000000"/> <Node name="node_Concatenate_2" type="Concatenate" x="-624.000000" y="132.000000"/> <Node name="node_NewSymbolIdentify_1" type="NewSymbolIdentify" x="-308.000000" y="216.000000"> <Parameter name="THRESHOLD" type="float" value="0.80" description=""/> </Node> <Node name="node_Concat_1" type="Concat" x="-208.000000" y="134.000000"/> <Node name="node_ToFloat_1" type="ToFloat" x="-189.000000" y="215.000000"/> <Node name="node_RectAnalyser_1" type="RectAnalyser" x="-659.000000" y="-180.000000"/> <Node name="node_IF_1" type="IF" x="726.000000" y="-185.000000"> <Parameter name="PULL_ANYWAY" type="bool" value="" description="If true, the IF statement pulls also on the branch not taken"/> </Node> <Node name="node_AND_1" type="AND" x="-40.000000" y="-201.000000"> <Parameter name="PULL_ANYWAY" type="bool" value="false" description="Pull on INPUT2 even if INPUT1 is false"/> </Node> <Node name="node_Greater_1" type="Greater" x="-184.000000" y="-241.000000"/> <Node name="node_Greater_2" type="Greater" x="-184.000000" y="-185.000000"/> <Node name="node_Constant_2" type="Constant" x="-482.000000" y="-274.000000"> <Parameter name="VALUE" type="int" value="30" description="The value"/> </Node> <Node name="node_Constant_3" type="Constant" x="-481.000000" y="-247.000000"> <Parameter name="VALUE" type="int" value="30" description="The value"/> </Node> <Node name="node_NilObject_1" type="NilObject" x="443.000000" y="-169.000000"/> <Link from="node_Concatenate_2" output="OUTPUT" to="node_NNetExec_1" input="INPUT"/> <Link from="node_SymbolExtractor_1" output="SYMBOL" to="node_Scale_1" input="CRECT"/> <Link from="node_DCVector_1" output="OUTPUT" to="node_VectorSetIndex_1" input="VECTOR"/> <Link from="node_Constant_1" output="VALUE" to="node_VectorSetIndex_1" input="INDEX"/> <Link from="node_SymbolExtractor_1" output="SYMBOL" to="node_RectBoundaries_1" input="CRECT">-932 115 -644 22 -504.5 22 </Link> <Link from="node_VectorSetIndex_1" output="OUTPUT" to="node_Concatenate_1" input="INPUT1">215.5 235 269 236 268 121 306 119.5 </Link> <Link from="node_SymbolExtractor_1" output="SYMBOL" to="node_EXTRA_INFO_1" input="INPUT"/> <Link from="node_Scale_1" output="OUTPUT" to="node_Concatenate_2" input="INPUT1"/> <Link from="node_EXTRA_INFO_1" output="OUTPUT" to="node_Concatenate_2" input="INPUT2"/> <Link from="node_RectBoundaries_1" output="OUTPUT" to="node_Concat_1" input="INPUT1"/> <Link from="node_NNetExec_1" output="OUTPUT" to="node_Concat_1" input="INPUT2"/> <Link from="node_Concat_1" output="OUTPUT" to="node_Concatenate_1" input="INPUT2"/> <Link from="node_NNetExec_1" output="OUTPUT" to="node_NewSymbolIdentify_1" input="INPUT"/> <Link from="node_ToFloat_1" output="OUTPUT" to="node_VectorSetIndex_1" input="VALUE">-162 215 -153 235 50.5 235 </Link> <Link from="node_NewSymbolIdentify_1" output="SYMBOL_ID" to="node_ToFloat_1" input="INPUT"/> <Link from="node_SymbolExtractor_1" output="SYMBOL" to="node_RectAnalyser_1" input="CRECT"/> <Link from="node_AND_1" output="OUTPUT" to="node_IF_1" input="COND"/> <Link from="node_Greater_1" output="OUTPUT" to="node_AND_1" input="INPUT1"/> <Link from="node_RectAnalyser_1" output="WIDTH" to="node_Greater_2" input="INPUT2"/> <Link from="node_RectAnalyser_1" output="HEIGHT" to="node_Greater_1" input="INPUT2"/> <Link from="node_Greater_2" output="OUTPUT" to="node_AND_1" input="INPUT2"/> <Link from="node_Constant_2" output="VALUE" to="node_Greater_1" input="INPUT1"/> <Link from="node_Constant_3" output="VALUE" to="node_Greater_2" input="INPUT1"/> <Link from="node_Concatenate_1" output="OUTPUT" to="node_IF_1" input="THEN">452 124 548 125 549 -185 671 -185 </Link> <Link from="node_NilObject_1" output="VALUE" to="node_IF_1" input="ELSE"/> <NetInput name="DATA" node="node_SymbolExtractor_1" terminal="DATA" object_type="any" description="No description available"/> <NetInput name="INDEX" node="node_SymbolExtractor_1" terminal="INDEX" object_type="any" description="No description available"/> <NetInput name="NNET" node="node_NNetExec_1" terminal="NNET" object_type="any" description="No description available"/> <NetOutput name="SYMBOL_VECTOR" node="node_IF_1" terminal="OUTPUT" object_type="any" description="The object from THEN or ELSE depending on COND"/> <Note x="-845" y="305" visible="1" text="Characters are scaled (13x13) and we add extra ratios to the input vector to produce a 172 value input vector."/> <Note x="-419" y="380" visible="1" text="Neural network output is calculated and the ID of the output unit containing the highest score (must be higher than 0.8) is returned"/> <Note x="-125" y="63" visible="1" text="We add The position (x1,y1,x2,y2) position of the character to the output vector. That will be useful to group characters into words later."/> </Network> <Network type="iterator" name="STRING_EXTRACTOR"> <Node name="node_Accumulate_1" type="Accumulate" x="218.000000" y="210.000000"/> <Node name="node_IF_1" type="IF" x="36.000000" y="31.000000"> <Parameter name="PULL_ANYWAY" type="bool" value="" description=""/> </Node> <Node name="node_Constant_1" type="Constant" x="-96.000000" y="30.000000"> <Parameter name="VALUE" type="bool" value="true" description=""/> </Node> <Node name="node_Constant_2" type="Constant" x="-167.000000" y="46.000000"> <Parameter name="VALUE" type="bool" value="false" description=""/> </Node> <Node name="node_Smaller_1" type="Smaller" x="-235.000000" y="16.000000"/> <Node name="node_IterCount_1" type="IterCount" x="-500.000000" y="9.000000"/> <Node name="node_SYMBOL_EXTRACTOR_1" type="SYMBOL_EXTRACTOR" x="-107.000000" y="121.000000"/> <Node name="node_NOP_1" type="NOP" x="-99.000000" y="203.000000"/> <Node name="node_IF_2" type="IF" x="384.000000" y="135.000000"> <Parameter name="PULL_ANYWAY" type="bool" value="" description="If true, the IF statement pulls also on the branch not taken"/> </Node> <Node name="node_isNil_1" type="isNil" x="125.000000" y="121.000000"/> <Link from="node_Smaller_1" output="OUTPUT" to="node_IF_1" input="COND"/> <Link from="node_Constant_1" output="VALUE" to="node_IF_1" input="THEN"/> <Link from="node_Constant_2" output="VALUE" to="node_IF_1" input="ELSE"/> <Link from="node_IterCount_1" output="OUTPUT" to="node_Smaller_1" input="INPUT1">-465.5 9 -330 9 -318.5 8.5 </Link> <Link from="node_IterCount_1" output="OUTPUT" to="node_SYMBOL_EXTRACTOR_1" input="INDEX">-465.5 9 -379 119 -227.5 121 </Link> <Link from="node_SYMBOL_EXTRACTOR_1" output="SYMBOL_VECTOR" to="node_isNil_1" input="INPUT"/> <Link from="node_isNil_1" output="OUTPUT" to="node_IF_2" input="COND"/> <Link from="node_NOP_1" output="OUTPUT" to="node_IF_2" input="THEN"/> <Link from="node_Accumulate_1" output="OUTPUT" to="node_IF_2" input="ELSE"/> <Link from="node_NOP_1" output="OUTPUT" to="node_Accumulate_1" input="ACCUM"/> <Link from="node_SYMBOL_EXTRACTOR_1" output="SYMBOL_VECTOR" to="node_Accumulate_1" input="INPUT"/> <NetInput name="DATA" node="node_SYMBOL_EXTRACTOR_1" terminal="DATA" object_type="any" description="No description available"/> <NetInput name="NNET" node="node_SYMBOL_EXTRACTOR_1" terminal="NNET" object_type="any" description="No description available"/> <NetInput name="SYMBOL_COUNT" node="node_Smaller_1" terminal="INPUT2" object_type="any" description="No description available"/> <NetCondition name="CONDITION" node="node_IF_1" terminal="OUTPUT"/> <NetInput name="ACCUM" node="node_NOP_1" terminal="INPUT" object_type="any" description="The input"/> <NetOutput name="ACCUM_OUT" node="node_IF_2" terminal="OUTPUT" object_type="any" description="The object from THEN or ELSE depending on COND"/> <Note x="-125" y="380" visible="1" text="This is a loop to extract all characters information from the image components and store the result into an accumulator."/> </Network> <Network type="subnet" name="FIND_THRESHOLD"> <Node name="node_StatIntensityAnalyser_1" type="StatIntensityAnalyser" x="-257.000000" y="-292.000000"> <Parameter name="FRACTION_ANALYSED" type="float" value="0.1" description=""/> </Node> <Node name="node_Add_1" type="Add" x="71.000000" y="-290.000000"/> <Node name="node_Div_1" type="Div" x="247.000000" y="-283.000000"/> <Node name="node_Constant_1" type="Constant" x="57.000000" y="-245.000000"> <Parameter name="VALUE" type="float" value="2.5" description=""/> </Node> <Link from="node_StatIntensityAnalyser_1" output="MAX_INTENSITY" to="node_Add_1" input="INPUT1"/> <Link from="node_StatIntensityAnalyser_1" output="MIN_INTENSITY" to="node_Add_1" input="INPUT2"/> <Link from="node_Add_1" output="OUTPUT" to="node_Div_1" input="NUM"/> <Link from="node_Constant_1" output="VALUE" to="node_Div_1" input="DEN"/> <NetInput name="IMAGE_IN" node="node_StatIntensityAnalyser_1" terminal="IMAGE_IN" object_type="any" description="No description available"/> <NetOutput name="THRESHOLD" node="node_Div_1" terminal="OUTPUT" object_type="any" description="No description available"/> <Note x="10" y="10" visible="1" text="Calculate the maximum intensity and minimum image intensity with random pixels (1%). Threshold is calculated with : (MAX+ MIN) / 2.5"/> </Network> <Network type="subnet" name="MULTI_SIGN_TRACKING"> <Node name="node_MultiSignTracking_1" type="MultiSignTracking" x="349.000000" y="-56.000000"> <Parameter name="MIN_WIDTH" type="int" value="25" description="minimum width to obtain before we are ready for recognition."/> <Parameter name="MIN_HEIGHT" type="int" value="35" description="minimum height to obtain before we are ready for recognition."/> <Parameter name="FOREGROUND_COLOR_ID" type="int" value="1" description="The color number being tracked."/> <Parameter name="BACKGROUND_COLOR_ID" type="int" value="0" description="The color number being tracked."/> </Node> <Node name="node_NOP_1" type="NOP" x="945.000000" y="-105.000000"/> <Node name="node_COLOR_TRACKER_1" type="COLOR_TRACKER" x="353.000000" y="85.000000"/> <Node name="node_Subsumption_1" type="Subsumption" x="1002.000000" y="12.000000"/> <Node name="node_Subsumption_2" type="Subsumption" x="1004.000000" y="58.000000"/> <Node name="node_Subsumption_3" type="Subsumption" x="1006.000000" y="111.000000"/> <Node name="node_NOP_2" type="NOP" x="-61.000000" y="92.000000"/> <Node name="node_Constant_1" type="Constant" x="119.000000" y="42.000000"> <Parameter name="VALUE" type="bool" value="true" description="The value"/> </Node> <Node name="node_NOP_3" type="NOP" x="27.000000" y="-63.000000"/> <Node name="node_NOP_4" type="NOP" x="27.000000" y="-9.000000"/> <Node name="node_IF_1" type="IF" x="724.000000" y="87.000000"> <Parameter name="PULL_ANYWAY" type="bool" value="" description="If true, the IF statement pulls also on the branch not taken"/> </Node> <Node name="node_isNil_1" type="isNil" x="587.000000" y="72.000000"/> <Node name="node_NilObject_1" type="NilObject" x="570.000000" y="111.000000"/> <Node name="node_IF_2" type="IF" x="727.000000" y="147.000000"> <Parameter name="PULL_ANYWAY" type="bool" value="" description="If true, the IF statement pulls also on the branch not taken"/> </Node> <Node name="node_IF_3" type="IF" x="723.000000" y="18.000000"> <Parameter name="PULL_ANYWAY" type="bool" value="" description="If true, the IF statement pulls also on the branch not taken"/> </Node> <Node name="node_NilObject_2" type="NilObject" x="728.000000" y="-46.000000"/> <Node name="node_RectBoundaries_1" type="RectBoundaries" x="340.000000" y="170.000000"/> <Node name="node_SymbolExtractor_1" type="SymbolExtractor" x="84.000000" y="169.000000"> <Parameter name="CHANNEL" type="int" value="0" description="Channel from which to extract the symbol"/> </Node> <Node name="node_Constant_2" type="Constant" x="-122.000000" y="176.000000"> <Parameter name="VALUE" type="int" value="0" description="The value"/> </Node> <Node name="node_SymbolCounter_1" type="SymbolCounter" x="216.000000" y="245.000000"> <Parameter name="CHANNEL" type="int" value="0" description="Channel from which to extract the symbol"/> <Parameter name="DEPTH" type="int" value="2" description="Depth of the image (in bytes per pixel)"/> </Node> <Node name="node_IF_4" type="IF" x="630.000000" y="266.000000"> <Parameter name="PULL_ANYWAY" type="bool" value="" description="If true, the IF statement pulls also on the branch not taken"/> </Node> <Node name="node_Greater_1" type="Greater" x="494.000000" y="251.000000"/> <Node name="node_Constant_3" type="Constant" x="339.000000" y="277.000000"> <Parameter name="VALUE" type="int" value="0" description="The value"/> </Node> <Node name="node_Constant_4" type="Constant" x="467.000000" y="306.000000"> <Parameter name="VALUE" type="bool" value="false" description="The value"/> </Node> <Link from="node_MultiSignTracking_1" output="COMPONENTS" to="node_NOP_1" input="INPUT"/> <Link from="node_Constant_1" output="VALUE" to="node_COLOR_TRACKER_1" input="ACTIVATED"/> <Link from="node_NOP_3" output="OUTPUT" to="node_MultiSignTracking_1" input="ACTIVATED"/> <Link from="node_NOP_2" output="OUTPUT" to="node_COLOR_TRACKER_1" input="COMPONENTS"/> <Link from="node_NOP_4" output="OUTPUT" to="node_MultiSignTracking_1" input="COMPONENTS"/> <Link from="node_isNil_1" output="OUTPUT" to="node_IF_1" input="COND"/> <Link from="node_COLOR_TRACKER_1" output="DELTA_X" to="node_isNil_1" input="INPUT"/> <Link from="node_NilObject_1" output="VALUE" to="node_IF_1" input="THEN"/> <Link from="node_MultiSignTracking_1" output="DELTA_X" to="node_IF_1" input="ELSE"/> <Link from="node_IF_1" output="OUTPUT" to="node_Subsumption_1" input="LOW_PRIORITY"/> <Link from="node_isNil_1" output="OUTPUT" to="node_IF_2" input="COND"/> <Link from="node_IF_2" output="OUTPUT" to="node_Subsumption_2" input="LOW_PRIORITY"/> <Link from="node_NilObject_1" output="VALUE" to="node_IF_2" input="THEN"/> <Link from="node_MultiSignTracking_1" output="DELTA_Y" to="node_IF_2" input="ELSE"/> <Link from="node_MultiSignTracking_1" output="BOUNDARY" to="node_IF_3" input="ELSE"/> <Link from="node_NilObject_1" output="VALUE" to="node_IF_3" input="THEN"/> <Link from="node_isNil_1" output="OUTPUT" to="node_IF_3" input="COND"/> <Link from="node_IF_3" output="OUTPUT" to="node_Subsumption_3" input="LOW_PRIORITY"/> <Link from="node_NilObject_2" output="VALUE" to="node_Subsumption_1" input="HIGH_PRIORITY"/> <Link from="node_NilObject_2" output="VALUE" to="node_Subsumption_2" input="HIGH_PRIORITY"/> <Link from="node_NilObject_2" output="VALUE" to="node_Subsumption_3" input="HIGH_PRIORITY"/> <Link from="node_Constant_2" output="VALUE" to="node_SymbolExtractor_1" input="INDEX"/> <Link from="node_SymbolExtractor_1" output="SYMBOL" to="node_RectBoundaries_1" input="CRECT"/> <Link from="node_SymbolCounter_1" output="COUNT" to="node_Greater_1" input="INPUT1"/> <Link from="node_SymbolExtractor_1" output="SYMBOL" to="node_SymbolCounter_1" input="DATA"/> <Link from="node_Greater_1" output="OUTPUT" to="node_IF_4" input="COND"/> <Link from="node_Constant_3" output="VALUE" to="node_Greater_1" input="INPUT2"/> <Link from="node_RectBoundaries_1" output="OUTPUT" to="node_IF_4" input="THEN">397 170 398 265 575 266 </Link> <Link from="node_Constant_4" output="VALUE" to="node_IF_4" input="ELSE"/> <NetOutput name="COMPONENTS" node="node_NOP_1" terminal="OUTPUT" object_type="any" description="The output = The input"/> <NetInput name="COMPONENTS" node="node_NOP_2" terminal="INPUT" object_type="any" description="The input"/> <NetOutput name="DELTA_X" node="node_Subsumption_1" terminal="OUTPUT" object_type="any" description="No description available"/> <NetOutput name="DELTA_Y" node="node_Subsumption_2" terminal="OUTPUT" object_type="any" description="No description available"/> <NetOutput name="BOUNDARY" node="node_Subsumption_3" terminal="OUTPUT" object_type="any" description="No description available"/> <NetInput name="SIGN_TRACKING_ACTIVATED" node="node_NOP_3" terminal="INPUT" object_type="any" description="The input"/> <NetInput name="BLACK_WHITE_COMPONENTS" node="node_NOP_4" terminal="INPUT" object_type="any" description="The input"/> <Note x="0" y="0" visible="0" text="Created with FlowDesigner 0.8.1"/> <Note x="198" y="472" visible="1" text="If activated, the MultiSignTracking behavior will output probable characters components, delta_x, delta_y and boundray of the group of components that could be characters."/> <Note x="843" y="370" visible="1" text="If we can't find blobs of foreground color surrounded by the background color, we will use only foreground color to "scan"."/> </Network> <Network type="subnet" name="ZOOM_FACTOR"> <Node name="node_Div_1" type="Div" x="-132.000000" y="147.000000"/> <Node name="node_Constant_1" type="Constant" x="-244.000000" y="156.000000"> <Parameter name="VALUE" type="float" value="683" description="The value"/> </Node> <Node name="node_Add_1" type="Add" x="-24.000000" y="139.000000"/> <Node name="node_Constant_2" type="Constant" x="-180.000000" y="87.000000"> <Parameter name="VALUE" type="float" value="1.0" description="The value"/> </Node> <Node name="node_NOP_1" type="NOP" x="-486.000000" y="140.000000"/> <Node name="node_NOP_2" type="NOP" x="191.000000" y="139.000000"/> <Link from="node_Constant_1" output="VALUE" to="node_Div_1" input="DEN"/> <Link from="node_Constant_2" output="VALUE" to="node_Add_1" input="INPUT1"/> <Link from="node_Div_1" output="OUTPUT" to="node_Add_1" input="INPUT2"/> <Link from="node_NOP_1" output="OUTPUT" to="node_Div_1" input="NUM"/> <Link from="node_Add_1" output="OUTPUT" to="node_NOP_2" input="INPUT"/> <NetInput name="ABS_ZOOM_VALUE" node="node_NOP_1" terminal="INPUT" object_type="any" description="The input"/> <NetOutput name="ZOOM_FACTOR" node="node_NOP_2" terminal="OUTPUT" object_type="any" description="The output = The input"/> <Note x="0" y="0" visible="0" text="Created with FlowDesigner 0.8.1"/> <Note x="-186" y="428" visible="1" text="Zoom factor 1X - 25X is calculated according to the Sony SNC-RZ30 camera."/> </Network> <Network type="subnet" name="PROCESS_SEGMENTS"> <Node name="node_SENTENCE_READER_1" type="SENTENCE_READER" x="-334.000000" y="-384.000000"/> <NetInput name="NNET" node="node_SENTENCE_READER_1" terminal="NNET" object_type="" description="No description available"/> <NetInput name="DICT" node="node_SENTENCE_READER_1" terminal="DICT" object_type="" description="No description available"/> <NetInput name="BLACK_WHITE_COMPONENTS" node="node_SENTENCE_READER_1" terminal="BLACK_WHITE_COMPONENTS" object_type="" description="The input"/> <NetOutput name="SENTENCE" node="node_SENTENCE_READER_1" terminal="SENTENCE" object_type="" description="A string corresponding to the sentence read"/> <Note x="0" y="0" visible="0" text="Created with FlowDesigner 0.8.1"/> <Note x="-373" y="-194" visible="1" text="Will output Original text (without the dicitonary) and the sentence (if any) recognized in the image"/> </Network> <Network type="subnet" name="COLOR_TRACKER"> <Node name="node_ColorTracker_1" type="ColorTracker" x="-746.000000" y="-212.000000"/> <Node name="node_Constant_1" type="Constant" x="-1053.000000" y="-220.000000"> <Parameter name="VALUE" type="int" value="0" description="The value"/> </Node> <Node name="node_IF_1" type="IF" x="-182.000000" y="-294.000000"> <Parameter name="PULL_ANYWAY" type="bool" value="" description="If true, the IF statement pulls also on the branch not taken"/> </Node> <Node name="node_IF_2" type="IF" x="-183.000000" y="-235.000000"> <Parameter name="PULL_ANYWAY" type="bool" value="" description="If true, the IF statement pulls also on the branch not taken"/> </Node> <Node name="node_IF_3" type="IF" x="-183.000000" y="-175.000000"> <Parameter name="PULL_ANYWAY" type="bool" value="" description="If true, the IF statement pulls also on the branch not taken"/> </Node> <Node name="node_NOP_1" type="NOP" x="-512.000000" y="-343.000000"/> <Node name="node_NilObject_1" type="NilObject" x="-485.000000" y="-159.000000"/> <Node name="node_NOP_2" type="NOP" x="-1018.000000" y="-161.000000"/> <Link from="node_Constant_1" output="VALUE" to="node_ColorTracker_1" input="COLOR_ID"/> <Link from="node_ColorTracker_1" output="DELTA_X" to="node_IF_1" input="THEN"/> <Link from="node_ColorTracker_1" output="DELTA_Y" to="node_IF_2" input="THEN"/> <Link from="node_NOP_1" output="OUTPUT" to="node_IF_1" input="COND"/> <Link from="node_NOP_1" output="OUTPUT" to="node_IF_2" input="COND"/> <Link from="node_NOP_1" output="OUTPUT" to="node_IF_3" input="COND"/> <Link from="node_NilObject_1" output="VALUE" to="node_IF_1" input="ELSE"/> <Link from="node_NilObject_1" output="VALUE" to="node_IF_2" input="ELSE"/> <Link from="node_NilObject_1" output="VALUE" to="node_IF_3" input="ELSE"/> <Link from="node_NOP_2" output="OUTPUT" to="node_ColorTracker_1" input="COMPONENTS"/> <Link from="node_ColorTracker_1" output="BOUNDARY" to="node_IF_3" input="THEN"/> <NetOutput name="DELTA_X" node="node_IF_1" terminal="OUTPUT" object_type="any" description="The object from THEN or ELSE depending on COND"/> <NetOutput name="DELTA_Y" node="node_IF_2" terminal="OUTPUT" object_type="any" description="The object from THEN or ELSE depending on COND"/> <NetOutput name="BOUNDARY" node="node_IF_3" terminal="OUTPUT" object_type="any" description="The object from THEN or ELSE depending on COND"/> <NetInput name="ACTIVATED" node="node_NOP_1" terminal="INPUT" object_type="any" description="The input"/> <NetInput name="COMPONENTS" node="node_NOP_2" terminal="INPUT" object_type="any" description="The input"/> <Note x="0" y="0" visible="0" text="Created with FlowDesigner 0.8.1"/> </Network> <Network type="subnet" name="IF_ACTIVATED_DO"> <Node name="node_IF_1" type="IF" x="-316.000000" y="-92.000000"> <Parameter name="PULL_ANYWAY" type="bool" value="" description="If true, the IF statement pulls also on the branch not taken"/> </Node> <Node name="node_NilObject_1" type="NilObject" x="-461.000000" y="-35.000000"/> <Link from="node_NilObject_1" output="VALUE" to="node_IF_1" input="ELSE"/> <NetInput name="SIGN_TRACKING_ACTIVATED" node="node_IF_1" terminal="COND" object_type="bool" description="The condition for the if statement"/> <NetInput name="DO" node="node_IF_1" terminal="THEN" object_type="any" description="What to do if the condition is true"/> <NetOutput name="OUTPUT" node="node_IF_1" terminal="OUTPUT" object_type="any" description="The object from THEN or ELSE depending on COND"/> <Note x="0" y="0" visible="0" text="Created with FlowDesigner 0.8.1"/> </Network> </Document> Index: Makefile.am =================================================================== RCS file: /cvsroot/robotflow/RobotFlow/Vision/n-files/Makefile.am,v retrieving revision 1.1 retrieving revision 1.2 diff -C2 -d -r1.1 -r1.2 *** Makefile.am 26 Aug 2004 12:12:20 -0000 1.1 --- Makefile.am 6 Jun 2005 20:55:17 -0000 1.2 *************** *** 3,7 **** toolboxdir = $(prefix) ! toolbox_DATA = READ.n EXTRA_DIST = $(toolbox_DATA) --- 3,7 ---- toolboxdir = $(prefix) ! toolbox_DATA = READ.n READv2.n EXTRA_DIST = $(toolbox_DATA) Index: READ.n =================================================================== RCS file: /cvsroot/robotflow/RobotFlow/Vision/n-files/READ.n,v retrieving revision 1.6 retrieving revision 1.7 diff -C2 -d -r1.6 -r1.7 *** READ.n 20 Sep 2004 19:15:48 -0000 1.6 --- READ.n 6 Jun 2005 20:55:17 -0000 1.7 *************** *** 4,8 **** <Network type="subnet" name="MAIN"> <Node name="node_LOOP0_1" type="MAIN_PROCESS" x="-3397.000000" y="-618.000000"/> - <NetInput name="BLACK_WHITE_LOOKUP" node="node_LOOP0_1" terminal="BLACK_WHITE_LOOKUP" object_type="" description="No description available"/> <NetInput name="NNET" node="node_LOOP0_1" terminal="NNET" object_type="" description="No description available"/> <NetInput name="DICT" node="node_LOOP0_1" terminal="DICT" object_type="" description="No description available"/> --- 4,7 ---- *************** *** 17,20 **** --- 16,20 ---- <NetOutput name="ORIGINAL_TEXT" node="node_LOOP0_1" terminal="ORIGINAL_TEXT" object_type="" description="The object from THEN or ELSE depending on COND"/> <NetOutput name="READING_FLAG" node="node_LOOP0_1" terminal="READING_FLAG" object_type="" description="The object from THEN or ELSE depending on COND"/> + <NetInput name="BLACK_WHITE_LOOKUP" node="node_LOOP0_1" terminal="BLACK_WHITE_LOOKUP" object_type="any" description="No description available"/> <Note x="-3418" y="-384" visible="1" text="The MAIN network will load configuration from the files containing : - black and white color lookup - the neural network to use for recog - The dictionary to use * You can change this configuration by double clicking on the LoadFile blocks"/> </Network> *************** *** 53,58 **** <Node name="node_IF_ACTIVATED_DO_5" type="IF_ACTIVATED_DO" x="692.000000" y="404.000000"/> <Node name="node_IF_ACTIVATED_DO_6" type="IF_ACTIVATED_DO" x="688.000000" y="230.000000"/> ! <Node name="node_NOP_3" type="NOP" x="-547.000000" y="398.000000"/> ! <Link from="node_MULTI_SIGN_TRACKING_1" output="COMPONENTS" to="node_PROCESS_SEGMENTS_1" input="SYMBOLS_COMPONENTS">-104.5 76.5 -100 352 -48 351 </Link> <Link from="node_SUBNET0_1" output="COMPONENTS" to="node_MULTI_SIGN_TRACKING_1" input="COMPONENTS"/> <Link from="node_MULTI_SIGN_TRACKING_1" output="DELTA_X" to="node_PAN_TILT_CTRL_1" input="DELTA_X"/> --- 53,66 ---- <Node name="node_IF_ACTIVATED_DO_5" type="IF_ACTIVATED_DO" x="692.000000" y="404.000000"/> <Node name="node_IF_ACTIVATED_DO_6" type="IF_ACTIVATED_DO" x="688.000000" y="230.000000"/> ! <Node name="node_NOP_3" type="NOP" x="-734.000000" y="383.000000"/> ! <Node name="node_Components_1" type="Components" x="-638.000000" y="152.000000"> ! <Parameter name="XGAP" type="int" value="1" description=""/> ! <Parameter name="YGAP" type="int" value="1" description=""/> ! <Parameter name="NUM_COLOR" type="int" value="2" description=""/> ! <Parameter name="MIN_AREA" type="int" value="16" description=""/> ! </Node> ! <Node name="node_Binarize_1" type="Binarize" x="-777.000000" y="282.000000"/> ! <Node name="node_FIND_THRESHOLD_1" type="FIND_THRESHOLD" x="-1012.000000" y="274.000000"/> ! <Link from="node_MULTI_SIGN_TRACKING_1" output="COMPONENTS" to="node_PROCESS_SEGMENTS_1" input="SYMBOLS_COMPONENTS">-104.5 76.5 -99 329 -73 328.5 </Link> <Link from="node_SUBNET0_1" output="COMPONENTS" to="node_MULTI_SIGN_TRACKING_1" input="COMPONENTS"/> <Link from="node_MULTI_SIGN_TRACKING_1" output="DELTA_X" to="node_PAN_TILT_CTRL_1" input="DELTA_X"/> *************** *** 62,66 **** <Link from="node_NOP_1" output="OUTPUT" to="node_PAN_TILT_CTRL_1" input="CURRENT_ZOOM">-432 192 -411 151 40.5 152 </Link> <Link from="node_NOP_2" output="OUTPUT" to="node_SUBNET0_1" input="IMAGE">-1135 291 -1135 85 -1000 83.5 </Link> - <Link from="node_NOP_2" output="OUTPUT" to="node_PROCESS_SEGMENTS_1" input="IMAGE"/> <Link from="node_ZOOM_FACTOR_1" output="ZOOM_FACTOR" to="node_PAN_TILT_CTRL_1" input="ZOOM_FACTOR">-265 193 -111 193 40.5 122 </Link> <Link from="node_AND_1" output="OUTPUT" to="node_NOT_1" input="INPUT"/> --- 70,73 ---- *************** *** 82,88 **** <Link from="node_NOP_3" output="OUTPUT" to="node_IF_ACTIVATED_DO_3" input="SIGN_TRACKING_ACTIVATED"/> <Link from="node_NOP_3" output="OUTPUT" to="node_IF_ACTIVATED_DO_6" input="SIGN_TRACKING_ACTIVATED"/> ! <Link from="node_NOP_3" output="OUTPUT" to="node_IF_ACTIVATED_DO_4" input="SIGN_TRACKING_ACTIVATED">-528 398 389 395 390 274 451.5 272.5 </Link> <Link from="node_NOP_3" output="OUTPUT" to="node_IF_ACTIVATED_DO_5" input="SIGN_TRACKING_ACTIVATED"/> ! <NetInput name="BLACK_WHITE_LOOKUP" node="node_PROCESS_SEGMENTS_1" terminal="LOOKUP" object_type="" description="No description available"/> <NetInput name="NNET" node="node_PROCESS_SEGMENTS_1" terminal="NNET" object_type="" description="No description available"/> <NetInput name="DICT" node="node_PROCESS_SEGMENTS_1" terminal="DICT" object_type="" description="No description available"/> --- 89,100 ---- <Link from="node_NOP_3" output="OUTPUT" to="node_IF_ACTIVATED_DO_3" input="SIGN_TRACKING_ACTIVATED"/> <Link from="node_NOP_3" output="OUTPUT" to="node_IF_ACTIVATED_DO_6" input="SIGN_TRACKING_ACTIVATED"/> ! <Link from="node_NOP_3" output="OUTPUT" to="node_IF_ACTIVATED_DO_4" input="SIGN_TRACKING_ACTIVATED">-715 383 389 395 390 274 451.5 272.5 </Link> <Link from="node_NOP_3" output="OUTPUT" to="node_IF_ACTIVATED_DO_5" input="SIGN_TRACKING_ACTIVATED"/> ! <Link from="node_FIND_THRESHOLD_1" output="THRESHOLD" to="node_Binarize_1" input="THRESHOLD"/> ! <Link from="node_NOP_2" output="OUTPUT" to="node_FIND_THRESHOLD_1" input="IMAGE_IN"/> ! <Link from="node_NOP_2" output="OUTPUT" to="node_Binarize_1" input="IMAGE_IN"/> ! <Link from="node_Binarize_1" output="IMAGE_OUT" to="node_Components_1" input="IMAGE"/> ! <Link from="node_Components_1" output="BLOBS" to="node_MULTI_SIGN_TRACKING_1" input="BLACK_WHITE_COMPONENTS"/> ! <Link from="node_Components_1" output="BLOBS" to="node_PROCESS_SEGMENTS_1" input="BLACK_WHITE_COMPONENTS"/> <NetInput name="NNET" node="node_PROCESS_SEGMENTS_1" terminal="NNET" object_type="" description="No description available"/> <NetInput name="DICT" node="node_PROCESS_SEGMENTS_1" terminal="DICT" object_type="" description="No description available"/> *************** *** 97,100 **** --- 109,113 ---- <NetOutput name="READING_FLAG" node="node_IF_ACTIVATED_DO_6" terminal="OUTPUT" object_type="" description="The object from THEN or ELSE depend... [truncated message content] |
From: Pierre M. <sid...@us...> - 2005-06-02 20:11:13
|
Update of /cvsroot/robotflow/RobotFlow/Probes/src In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv24097 Modified Files: VisualROISelection.cc Log Message: Added methods for rectangular ROI. Index: VisualROISelection.cc =================================================================== RCS file: /cvsroot/robotflow/RobotFlow/Probes/src/VisualROISelection.cc,v retrieving revision 1.1 retrieving revision 1.2 diff -C2 -d -r1.1 -r1.2 *** VisualROISelection.cc 2 Jun 2005 16:58:29 -0000 1.1 --- VisualROISelection.cc 2 Jun 2005 20:11:02 -0000 1.2 *************** *** 30,35 **** #include "VisualROI.h" - using namespace std; using namespace FD; namespace RobotFlow { --- 30,35 ---- #include "VisualROI.h" using namespace FD; + using namespace std; namespace RobotFlow { *************** *** 60,63 **** --- 60,68 ---- * @parameter_description Number of channels of the video frame. * + * @parameter_name ROI_REGION_TYPE + * @parameter_type int + * @parameter_value 0 + * @parameter_description Geometric type for the ROI region (refer to enum e_VISUALROI_type in VisualROI.h). + * * @input_name IN_IMAGE * @input_type Image *************** *** 91,94 **** --- 96,100 ---- m_height = dereference_cast<int>(parameters.get("HEIGHT")); m_numChannels = dereference_cast<int>(parameters.get("NUM_CHANNELS")); + m_roiType = dereference_cast<int>(parameters.get("ROI_REGION_TYPE")); m_numBytesInFrame = m_width*m_height*m_numChannels; *************** *** 273,277 **** int yCen = m_BBy1+hsY+1; cout << "Creating a new ROI: xCen=" << xCen << " yCen=" << yCen << " hsX=" << hsX << " hsY=" << hsY << endl; ! m_roi = new VisualROI(e_VISUALROI_elliptical, xCen, yCen, hsX, hsY, 0); } --- 279,283 ---- int yCen = m_BBy1+hsY+1; cout << "Creating a new ROI: xCen=" << xCen << " yCen=" << yCen << " hsX=" << hsX << " hsY=" << hsY << endl; ! m_roi = new VisualROI(e_VISUALROI_type(m_roiType), xCen, yCen, hsX, hsY, 0); } *************** *** 336,339 **** --- 342,346 ---- ObjectRef m_imageRef; VisualROI *m_roi; + int m_roiType; IplImage *m_curFrame; }; |
From: Pierre M. <sid...@us...> - 2005-06-02 20:07:35
|
Update of /cvsroot/robotflow/RobotFlow/Vision/src In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv22620 Modified Files: VisualROI.cc PFState2VisualROI.cc Log Message: Added methods for rectangular ROI. Index: VisualROI.cc =================================================================== RCS file: /cvsroot/robotflow/RobotFlow/Vision/src/VisualROI.cc,v retrieving revision 1.5 retrieving revision 1.6 diff -C2 -d -r1.5 -r1.6 *** VisualROI.cc 26 May 2005 19:45:09 -0000 1.5 --- VisualROI.cc 2 Jun 2005 20:07:26 -0000 1.6 *************** *** 106,164 **** void VisualROI::DrawROI(IplImage *io_frame, const unsigned char *i_color) const { ! if (!io_frame) { ! throw new GeneralException ("VisualROI::DrawROI : invalid image reference.",__FILE__,__LINE__); ! } ! ! if (!i_color) { ! throw new GeneralException ("VisualROI::DrawROI : invalid color reference.",__FILE__,__LINE__); ! } ! ! if (!m_perim) { ! throw new GeneralException ("VisualROI::DrawROI : cannot draw ROI with uninitialized region data.",__FILE__,__LINE__); ! } ! ! unsigned char *p_pixels = (unsigned char *)io_frame->imageData; ! const short *p_perim = this->GetCstPerim(); ! int imgWidth = io_frame->width; ! int imgHeight = io_frame->height; ! int numChannels = io_frame->nChannels; ! int i, c, x, y; ! short deltaX, deltaY; ! bool broken = true; ! ! // Start at the top center of the region ! x = m_xCen; ! y = m_yCen; ! p_pixels += numChannels*(y*imgWidth + x); ! ! // Overlay region of interest ! for (i=m_perimLength; i>0; i--) { ! deltaX = *p_perim++; ! deltaY = *p_perim++; ! x += deltaX; ! y += deltaY; ! ! // Draw only if region is visible ! if (y>0 && y<imgHeight && x>0 && x<imgWidth) { ! if (!broken) { ! // Relative position ! p_pixels += numChannels*(deltaY*imgWidth + deltaX); ! } ! else { ! // Absolute position ! p_pixels = (unsigned char *)(io_frame->imageData + numChannels*(y*imgWidth + x)); ! broken = false; ! } ! ! for (c=0; c<numChannels; c++) { ! *p_pixels++ = i_color[c]; ! } ! ! p_pixels -= numChannels; ! } ! else { ! broken = true; } } } --- 106,125 ---- void VisualROI::DrawROI(IplImage *io_frame, const unsigned char *i_color) const { ! try { ! switch(m_type) { ! case e_VISUALROI_rectangular: ! DrawRectangularRegion(io_frame, i_color); ! break; ! case e_VISUALROI_elliptical: ! DrawEllipticalRegion(io_frame, i_color); ! break; ! case e_VISUALROI_unknown: ! default: ! throw new GeneralException ("VisualROI::DrawROI : unknown region geometric type",__FILE__,__LINE__); } } + catch (BaseException *e) { + throw e->add(new GeneralException("Exception caught in VisualROI::VisualROI:",__FILE__,__LINE__)); + } } *************** *** 166,232 **** unsigned char *io_pixels, const unsigned char *i_color) const { ! if (!io_pixels) { ! throw new GeneralException ("VisualROI::DrawROI : invalid pixels reference.",__FILE__,__LINE__); ! } ! ! if (!i_color) { ! throw new GeneralException ("VisualROI::DrawROI : invalid color reference.",__FILE__,__LINE__); ! } ! ! if (!m_perim) { ! throw new GeneralException ("VisualROI::DrawROI : cannot draw ROI with uninitialized region data.",__FILE__,__LINE__); ! } ! ! unsigned char *p_pixels = io_pixels; ! const short *p_perim = this->GetCstPerim(); ! int imgWidth = i_width; ! int imgHeight = i_height; ! int numChannels = i_numChannels; ! int i, c, x, y; ! short deltaX, deltaY; ! bool broken = true; ! ! // Start at the top center of the region ! x = m_xCen; ! y = m_yCen; ! p_pixels += numChannels*(y*imgWidth + x); ! ! // Overlay region of interest ! for (i=m_perimLength; i>0; i--) { ! deltaX = *p_perim++; ! deltaY = *p_perim++; ! x += deltaX; ! y += deltaY; ! ! // Draw only if region is visible ! if (y>0 && y<imgHeight && x>0 && x<imgWidth) { ! if (!broken) { ! // Relative position ! p_pixels += numChannels*(deltaY*imgWidth + deltaX); ! } ! else { ! // Absolute position ! p_pixels = (unsigned char *)(io_pixels + numChannels*(y*imgWidth + x)); ! broken = false; ! } ! ! for (c=0; c<numChannels; c++) { ! *p_pixels++ = i_color[c]; ! } ! ! p_pixels -= numChannels; ! } ! else { ! broken = true; } } } void VisualROI::Reset(int i_hsX, int i_hsY, int i_angle) { - delete [] m_perim; - delete [] m_normVects; - delete [] m_mask; - m_hsX = i_hsX; m_hsY = i_hsY; --- 127,150 ---- unsigned char *io_pixels, const unsigned char *i_color) const { ! try { ! switch(m_type) { ! case e_VISUALROI_rectangular: ! DrawRectangularRegion(i_width, i_height, i_numChannels, io_pixels, i_color); ! break; ! case e_VISUALROI_elliptical: ! DrawEllipticalRegion(i_width, i_height, i_numChannels, io_pixels, i_color); ! break; ! case e_VISUALROI_unknown: ! default: ! throw new GeneralException ("VisualROI::DrawROI : unknown region geometric type",__FILE__,__LINE__); } } + catch (BaseException *e) { + throw e->add(new GeneralException("Exception caught in VisualROI::VisualROI:",__FILE__,__LINE__)); + } } void VisualROI::Reset(int i_hsX, int i_hsY, int i_angle) { m_hsX = i_hsX; m_hsY = i_hsY; *************** *** 385,388 **** --- 303,453 ---- // Private routines // + void VisualROI::DrawRectangularRegion(IplImage *io_frame, const unsigned char *i_color) const + { + CvPoint ulc, lrc; + CvScalar color; + + if (io_frame->nChannels == 3) { + color = CV_RGB(i_color[0], i_color[1], i_color[2]); + } + else if (io_frame->nChannels == 1) { + color = cvRealScalar((double)(i_color[0])); + } + else { + throw new GeneralException ("VisualROI::DrawRectangularRegion : can only draw region with an image with 1 or 3 channel(s).",__FILE__,__LINE__); + } + + ulc.x = m_xCen - m_hsX; + ulc.y = m_yCen - m_hsY; + lrc.x = m_xCen + m_hsX; + lrc.y = m_yCen + m_hsY; + cvRectangle(io_frame, ulc, lrc, color, 1); + } + + void VisualROI::DrawEllipticalRegion(IplImage *io_frame, const unsigned char *i_color) const + { + if (!io_frame) { + throw new GeneralException ("VisualROI::DrawROI : invalid image reference.",__FILE__,__LINE__); + } + + if (!i_color) { + throw new GeneralException ("VisualROI::DrawROI : invalid color reference.",__FILE__,__LINE__); + } + + if (!m_perim) { + throw new GeneralException ("VisualROI::DrawROI : cannot draw ROI with uninitialized region data.",__FILE__,__LINE__); + } + + unsigned char *p_pixels = (unsigned char *)io_frame->imageData; + const short *p_perim = this->GetCstPerim(); + int imgWidth = io_frame->width; + int imgHeight = io_frame->height; + int numChannels = io_frame->nChannels; + int i, c, x, y; + short deltaX, deltaY; + bool broken = true; + + // Start at the top center of the region + x = m_xCen; + y = m_yCen; + p_pixels += numChannels*(y*imgWidth + x); + + // Overlay region of interest + for (i=m_perimLength; i>0; i--) { + deltaX = *p_perim++; + deltaY = *p_perim++; + x += deltaX; + y += deltaY; + + // Draw only if region is visible + if (y>0 && y<imgHeight && x>0 && x<imgWidth) { + if (!broken) { + // Relative position + p_pixels += numChannels*(deltaY*imgWidth + deltaX); + } + else { + // Absolute position + p_pixels = (unsigned char *)(io_frame->imageData + numChannels*(y*imgWidth + x)); + broken = false; + } + + for (c=0; c<numChannels; c++) { + *p_pixels++ = i_color[c]; + } + + p_pixels -= numChannels; + } + else { + broken = true; + } + } + } + + void VisualROI::DrawRectangularRegion(int i_width, int i_height, int i_numChannels, + unsigned char *io_pixels, const unsigned char *i_color) const + { + throw new GeneralException ("VisualROI::DrawRectangularRegion : method not yet implemented.",__FILE__,__LINE__); + } + + void VisualROI::DrawEllipticalRegion(int i_width, int i_height, int i_numChannels, + unsigned char *io_pixels, const unsigned char *i_color) const + { + if (!io_pixels) { + throw new GeneralException ("VisualROI::DrawROI : invalid pixels reference.",__FILE__,__LINE__); + } + + if (!i_color) { + throw new GeneralException ("VisualROI::DrawROI : invalid color reference.",__FILE__,__LINE__); + } + + if (!m_perim) { + throw new GeneralException ("VisualROI::DrawROI : cannot draw ROI with uninitialized region data.",__FILE__,__LINE__); + } + + unsigned char *p_pixels = io_pixels; + const short *p_perim = this->GetCstPerim(); + int imgWidth = i_width; + int imgHeight = i_height; + int numChannels = i_numChannels; + int i, c, x, y; + short deltaX, deltaY; + bool broken = true; + + // Start at the top center of the region + x = m_xCen; + y = m_yCen; + p_pixels += numChannels*(y*imgWidth + x); + + // Overlay region of interest + for (i=m_perimLength; i>0; i--) { + deltaX = *p_perim++; + deltaY = *p_perim++; + x += deltaX; + y += deltaY; + + // Draw only if region is visible + if (y>0 && y<imgHeight && x>0 && x<imgWidth) { + if (!broken) { + // Relative position + p_pixels += numChannels*(deltaY*imgWidth + deltaX); + } + else { + // Absolute position + p_pixels = (unsigned char *)(io_pixels + numChannels*(y*imgWidth + x)); + broken = false; + } + + for (c=0; c<numChannels; c++) { + *p_pixels++ = i_color[c]; + } + + p_pixels -= numChannels; + } + else { + broken = true; + } + } + } + void VisualROI::MakeEllipticalRegion() { *************** *** 396,399 **** --- 461,468 ---- int x, y, xOff, yOff, xAbs, yAbs; int curX, curY; + + delete [] m_perim; + delete [] m_normVects; + delete [] m_mask; // Number of iterations in loops below *************** *** 467,471 **** void VisualROI::MakeRectangularRegion() { ! throw new GeneralException ("VisualROI::MakeRectangularRegion : method not yet implemented.",__FILE__,__LINE__); } --- 536,542 ---- void VisualROI::MakeRectangularRegion() { ! if (m_angle != 0) { ! throw new GeneralException ("VisualROI::MakeRectangularRegion : method not yet implemented for angle different from 0.",__FILE__,__LINE__); ! } } Index: PFState2VisualROI.cc =================================================================== RCS file: /cvsroot/robotflow/RobotFlow/Vision/src/PFState2VisualROI.cc,v retrieving revision 1.1 retrieving revision 1.2 diff -C2 -d -r1.1 -r1.2 *** PFState2VisualROI.cc 26 May 2005 19:49:37 -0000 1.1 --- PFState2VisualROI.cc 2 Jun 2005 20:07:26 -0000 1.2 *************** *** 47,50 **** --- 47,55 ---- * @parameter_description Flag indicating to use the ROI rotation angle in the particle's state. * + * @parameter_name ROI_REGION_TYPE + * @parameter_type int + * @parameter_value 0 + * @parameter_description Geometric type for the ROI region (refer to enum e_VISUALROI_type in VisualROI.h). + * * @input_name IN_PARTICLE * @input_type PFGenericParticle *************** *** 69,72 **** --- 74,78 ---- m_useScale = dereference_cast<bool>(parameters.get("USE_SCALE")); m_useAngle = dereference_cast<bool>(parameters.get("USE_ROTATION_ANGLE")); + m_roiType = dereference_cast<int>(parameters.get("ROI_REGION_TYPE")); // First 2 elements of state is center position x,y *************** *** 85,89 **** // Allocate ROI m_curROI = RCPtr<VisualROI>(new VisualROI()); ! m_curROI->SetType(e_VISUALROI_elliptical); } --- 91,95 ---- // Allocate ROI m_curROI = RCPtr<VisualROI>(new VisualROI()); ! m_curROI->SetType(e_VISUALROI_type(m_roiType)); } *************** *** 141,144 **** --- 147,151 ---- bool m_useScale; bool m_useAngle; + int m_roiType; unsigned int m_stateSize; |
From: Pierre M. <sid...@us...> - 2005-06-02 20:06:40
|
Update of /cvsroot/robotflow/RobotFlow/Vision/src In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv22132 Modified Files: VisualTargetManager.cc Log Message: Added likelihood sigma parameter. Index: VisualTargetManager.cc =================================================================== RCS file: /cvsroot/robotflow/RobotFlow/Vision/src/VisualTargetManager.cc,v retrieving revision 1.6 retrieving revision 1.7 diff -C2 -d -r1.6 -r1.7 *** VisualTargetManager.cc 2 Jun 2005 16:46:44 -0000 1.6 --- VisualTargetManager.cc 2 Jun 2005 20:06:26 -0000 1.7 *************** *** 71,74 **** --- 71,79 ---- * @parameter_description Adaptation rate for each cue weight. * + * @parameter_name LIKELIHOOD_SIGMA + * @parameter_type float + * @parameter_value 200.0 + * @parameter_description Exponential sigma value to discriminate likelihood. + * * @input_name IMAGE_IN * @input_type Image *************** *** 157,160 **** --- 162,166 ---- m_targetAdaptRate = dereference_cast<float>(parameters.get("TARGET_ADAPT_RATE")); m_cueAdaptRate = dereference_cast<float>(parameters.get("CUE_ADAPT_RATE")); + m_lSigma = dereference_cast<float>(parameters.get("LIKELIHOOD_SIGMA")); m_numPixels = m_width*m_height; *************** *** 379,393 **** throw new GeneralException ("VisualTargetManager::calculate : cannot evaluate the target's likelihood with null input FEATURES_VECTOR.",__FILE__,__LINE__); } ! ! sim = m_refTarget->SimilarityWCueAdapt( ! &(object_cast<Vector<VisualFeatureDesc<double> *> >(featVecRef)), ! m_cueAdaptRate); ! targetMatch = (sim >= m_targetMatchThres); m_refTarget->AgeTarget(targetMatch); if (sim > m_targetAdaptThres) { ! m_refTarget->Adapt(&(object_cast<Vector<VisualFeatureDesc<double> *> >(featVecRef)), ! m_targetAdaptRate); } } --- 385,399 ---- throw new GeneralException ("VisualTargetManager::calculate : cannot evaluate the target's likelihood with null input FEATURES_VECTOR.",__FILE__,__LINE__); } ! RCPtr<Vector<VisualFeatureDesc<double> *> > featVecPtr = featVecRef; ! sim = m_refTarget->SimilarityWCueAdapt(featVecPtr.get(), m_cueAdaptRate); ! //sim = m_refTarget->Similarity(featVecPtr.get()); ! cout << "Similarity = " << sim << endl; ! //float likelihood = exp(m_lSigma*(sim-1.f)); ! //cout << "Likelihood = " << likelihood << endl; targetMatch = (sim >= m_targetMatchThres); m_refTarget->AgeTarget(targetMatch); if (sim > m_targetAdaptThres) { ! m_refTarget->Adapt(featVecPtr.get(), m_targetAdaptRate); } } |
From: Pierre M. <sid...@us...> - 2005-06-02 20:06:11
|
Update of /cvsroot/robotflow/RobotFlow/Vision/src In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv21981 Modified Files: IntegralEdgesOriExtraction.cc Log Message: Made explicit cast for sqrt and atan2 parameters. Index: IntegralEdgesOriExtraction.cc =================================================================== RCS file: /cvsroot/robotflow/RobotFlow/Vision/src/IntegralEdgesOriExtraction.cc,v retrieving revision 1.1 retrieving revision 1.2 diff -C2 -d -r1.1 -r1.2 *** IntegralEdgesOriExtraction.cc 2 Jun 2005 16:54:00 -0000 1.1 --- IntegralEdgesOriExtraction.cc 2 Jun 2005 20:06:01 -0000 1.2 *************** *** 431,435 **** for (p=0; p<m_numPixels; p++) { // Compute edge strength ! float strength = sqrt((*p_oriXPixels)*(*p_oriXPixels) + (*p_oriYPixels)*(*p_oriYPixels)); if (strength < m_edgesStrTresh) { strength = 0; --- 431,435 ---- for (p=0; p<m_numPixels; p++) { // Compute edge strength ! float strength = sqrt((float)((*p_oriXPixels)*(*p_oriXPixels) + (*p_oriYPixels)*(*p_oriYPixels))); if (strength < m_edgesStrTresh) { strength = 0; *************** *** 441,445 **** // Compute edge orientation // Orientation will be between [0, 2PI] ! float angle = atan2(*p_oriYPixels, *p_oriXPixels) + k_IEOE_pi; // Determine the orientation image channel --- 441,445 ---- // Compute edge orientation // Orientation will be between [0, 2PI] ! float angle = atan2((float)(*p_oriYPixels), (float)(*p_oriXPixels)) + k_IEOE_pi; // Determine the orientation image channel |
From: Pierre M. <sid...@us...> - 2005-06-02 20:02:33
|
Update of /cvsroot/robotflow/RobotFlow/Vision/include In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv20306 Modified Files: VisualTargetManager.h Log Message: Added likelihood sigma parameter. Index: VisualTargetManager.h =================================================================== RCS file: /cvsroot/robotflow/RobotFlow/Vision/include/VisualTargetManager.h,v retrieving revision 1.4 retrieving revision 1.5 diff -C2 -d -r1.4 -r1.5 *** VisualTargetManager.h 26 May 2005 19:37:29 -0000 1.4 --- VisualTargetManager.h 2 Jun 2005 20:02:25 -0000 1.5 *************** *** 80,83 **** --- 80,84 ---- float m_imgYCen; unsigned char m_roiColor[3]; + float m_lSigma; struct timeb m_t1, m_t2; |
From: Pierre M. <sid...@us...> - 2005-06-02 20:01:37
|
Update of /cvsroot/robotflow/RobotFlow/Vision/include In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv20013 Modified Files: VisualROI.h Log Message: Added methods for rectangular ROI. Index: VisualROI.h =================================================================== RCS file: /cvsroot/robotflow/RobotFlow/Vision/include/VisualROI.h,v retrieving revision 1.4 retrieving revision 1.5 diff -C2 -d -r1.4 -r1.5 *** VisualROI.h 26 May 2005 19:37:29 -0000 1.4 --- VisualROI.h 2 Jun 2005 20:01:26 -0000 1.5 *************** *** 101,104 **** --- 101,114 ---- private: + void DrawRectangularRegion(IplImage *io_frame, const unsigned char *i_color) const; + + void DrawEllipticalRegion(IplImage *io_frame, const unsigned char *i_color) const; + + void DrawRectangularRegion(int i_width, int i_height, int i_numChannels, + unsigned char *io_pixels, const unsigned char *i_color) const; + + void DrawEllipticalRegion(int i_width, int i_height, int i_numChannels, + unsigned char *io_pixels, const unsigned char *i_color) const; + void MakeRectangularRegion(); |
From: Pierre M. <sid...@us...> - 2005-06-02 19:58:43
|
Update of /cvsroot/robotflow/RobotFlow/Vision/include In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv18474 Modified Files: VisualIntegralDesc.h Log Message: Added boundary limit for Euclidean distance. Index: VisualIntegralDesc.h =================================================================== RCS file: /cvsroot/robotflow/RobotFlow/Vision/include/VisualIntegralDesc.h,v retrieving revision 1.1 retrieving revision 1.2 diff -C2 -d -r1.1 -r1.2 *** VisualIntegralDesc.h 2 Jun 2005 16:49:38 -0000 1.1 --- VisualIntegralDesc.h 2 Jun 2005 19:58:34 -0000 1.2 *************** *** 284,289 **** if (dist > 1.0) { ! std::cout << "VisualIntegralDesc::EuclideanDist: Invalid dist=" << dist << std::endl; ! throw new FD::GeneralException("VisualIntegralDesc::EuclideanDist: distance should not be greater than 1.0.",__FILE__,__LINE__); } --- 284,288 ---- if (dist > 1.0) { ! return 0.0; } |
From: Pierre M. <sid...@us...> - 2005-06-02 16:59:31
|
Update of /cvsroot/robotflow/RobotFlow/Probes/src In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv16485 Modified Files: Makefile.am Log Message: Added VisualROISelection.cc and OpenCV file list Index: Makefile.am =================================================================== RCS file: /cvsroot/robotflow/RobotFlow/Probes/src/Makefile.am,v retrieving revision 1.12 retrieving revision 1.13 diff -C2 -d -r1.12 -r1.13 *** Makefile.am 16 Aug 2004 16:57:25 -0000 1.12 --- Makefile.am 2 Jun 2005 16:59:22 -0000 1.13 *************** *** 8,11 **** --- 8,17 ---- lib_LTLIBRARIES = libProbes.la + if WITH_OPENCV + OPENCV_SOURCES = VisualROISelection.cc + else + OPENCV_SOURCES = + endif + if WITH_SDL IMAGEPROBE_SDL_SOURCE = ImageProbeSDL.cc *************** *** 20,30 **** $(IMAGEPROBE_SDL_SOURCE) \ SymbolKeypad.cc \ ! CheckBoxPanel.cc ! libProbes_la_LDFLAGS = -release $(LT_RELEASE) $(SDL_LIB) INCLUDES = -I../include $(OVERFLOW_INCLUDE) $(GNOME_INCLUDE) \ ! -I../../Vision/include $(SDL_INCLUDE) LDADD = $(SDL_LIB) --- 26,37 ---- $(IMAGEPROBE_SDL_SOURCE) \ SymbolKeypad.cc \ ! CheckBoxPanel.cc $(OPENCV_SOURCES) ! ! libProbes_la_LDFLAGS = -release $(LT_RELEASE) $(SDL_LIB) $(OPENCV_LIBS) INCLUDES = -I../include $(OVERFLOW_INCLUDE) $(GNOME_INCLUDE) \ ! -I../../Vision/include $(SDL_INCLUDE) $(OPENCV_INCLUDES) LDADD = $(SDL_LIB) |
From: Pierre M. <sid...@us...> - 2005-06-02 16:58:38
|
Update of /cvsroot/robotflow/RobotFlow/Probes/src In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv16024 Added Files: VisualROISelection.cc Log Message: New probe for selecting a region of interest in an image or video frame. --- NEW FILE: VisualROISelection.cc --- /* Copyright (C) 2004 Pierre Moisan (Pie...@US...) This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with this library; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ #ifndef _VISUALROISELECTION_CC_ #define _VISUALROISELECTION_CC_ #include "Object.h" #include "ObjectRef.h" #include "Exception.h" #include "Image.h" #include "BufferedNode.h" #include <gnome.h> #include <pthread.h> #include "gdk-pixbuf/gdk-pixbuf.h" #include "libgnomecanvas/gnome-canvas-pixbuf.h" #include "cv.h" #include "VisualROI.h" using namespace std; using namespace FD; namespace RobotFlow { //forward declaration class VisualROISelection; DECLARE_NODE(VisualROISelection) /*Node * @name VisualROISelection * @category RobotFlow:Probes * @description Region of interest selection interface * * @parameter_name WIDTH * @parameter_description The width of the video frame * @parameter_type int * @parameter_value 320 * * @parameter_name HEIGHT * @parameter_description The height of the video frame * @parameter_type int * @parameter_value 240 * * @parameter_name NUM_CHANNELS * @parameter_type int * @parameter_value 3 * @parameter_description Number of channels of the video frame. * * @input_name IN_IMAGE * @input_type Image * @input_description Current frame * * @output_name OUT_ROI * @output_type VisualROI * @output_description The user defined region of interest * END*/ class VisualROISelection : public BufferedNode { friend void on_save_button_clicked (GtkButton *button, VisualROISelection *cSelection); friend void on_show_button_clicked (GtkButton *button, VisualROISelection *cSelection); friend gboolean pixbuf_event_function (GnomeCanvasItem *canvasitem, GdkEvent *event, VisualROISelection *cSelection); public: VisualROISelection(string nodeName, ParameterSet params) : BufferedNode(nodeName, params), m_saving(false), m_show(true), m_skip(0), m_pixbuf_item(NULL), m_last_count(-1), m_roi(NULL) { // Buffered node inputs m_inImageID = addInput("IN_IMAGE"); // Buffered node inputs m_outROIID = addOutput("OUT_ROI"); m_width = dereference_cast<int>(parameters.get("WIDTH")); m_height = dereference_cast<int>(parameters.get("HEIGHT")); m_numChannels = dereference_cast<int>(parameters.get("NUM_CHANNELS")); m_numBytesInFrame = m_width*m_height*m_numChannels; m_roiColor[0] = 0; m_roiColor[1] = 0; m_roiColor[2] = 255; //create pixbuf m_pixbuf = gdk_pixbuf_new (GDK_COLORSPACE_RGB, FALSE, 8, m_width, m_height); CvSize size; size.width = m_width; size.height = m_height; m_curFrame = cvCreateImage(size, IPL_DEPTH_8U, m_numChannels); } virtual ~VisualROISelection() { gdk_threads_enter(); gtk_object_destroy(GTK_OBJECT(window1)); cvReleaseImage(&m_curFrame); delete m_roi; gdk_threads_leave(); } void initialize() { BufferedNode::initialize(); gdk_threads_enter(); window1 = gtk_window_new (GTK_WINDOW_TOPLEVEL); gtk_object_set_data (GTK_OBJECT (window1), "window1", window1); gtk_window_set_title (GTK_WINDOW (window1), _("window1")); vbox1 = gtk_vbox_new (FALSE, 0); gtk_widget_ref (vbox1); gtk_object_set_data_full (GTK_OBJECT (window1), "vbox1", vbox1, (GtkDestroyNotify) gtk_widget_unref); gtk_widget_show (vbox1); gtk_container_add (GTK_CONTAINER (window1), vbox1); vbox2 = gtk_vbox_new (FALSE, 0); gtk_widget_ref (vbox2); gtk_object_set_data_full (GTK_OBJECT (window1), "vbox2", vbox2, (GtkDestroyNotify) gtk_widget_unref); gtk_widget_show (vbox2); gtk_box_pack_start (GTK_BOX (vbox1), vbox2, TRUE, TRUE, 0); scrolledwindow2 = gtk_scrolled_window_new (NULL, NULL); gtk_widget_ref (scrolledwindow2); gtk_object_set_data_full (GTK_OBJECT (window1), "scrolledwindow2", scrolledwindow2, (GtkDestroyNotify) gtk_widget_unref); gtk_widget_show (scrolledwindow2); gtk_box_pack_start (GTK_BOX (vbox2), scrolledwindow2, TRUE, TRUE, 0); gtk_widget_push_visual (gdk_rgb_get_visual()); gtk_widget_push_colormap (gdk_rgb_get_cmap()); canvas1 = gnome_canvas_new (); gtk_widget_pop_colormap (); gtk_widget_pop_visual (); gtk_widget_ref (canvas1); gtk_object_set_data_full (GTK_OBJECT (window1), "canvas1", canvas1, (GtkDestroyNotify) gtk_widget_unref); gtk_widget_show (canvas1); gtk_container_add (GTK_CONTAINER (scrolledwindow2), canvas1); gtk_widget_set_usize (canvas1, 320, 240); gtk_widget_set_events (canvas1, GDK_BUTTON1_MOTION_MASK); gtk_widget_set_extension_events (canvas1, GDK_EXTENSION_EVENTS_CURSOR); gnome_canvas_set_scroll_region (GNOME_CANVAS (canvas1), 0, 0, 100, 100); hbox1 = gtk_hbox_new (FALSE, 0); gtk_widget_ref (hbox1); gtk_object_set_data_full (GTK_OBJECT (window1), "hbox1", hbox1, (GtkDestroyNotify) gtk_widget_unref); gtk_widget_show (hbox1); gtk_box_pack_start (GTK_BOX (vbox2), hbox1, TRUE, TRUE, 0); //SAVE save_button = gtk_button_new_with_label (_("Save")); gtk_widget_ref (save_button); gtk_object_set_data_full (GTK_OBJECT (window1), "save_button", save_button, (GtkDestroyNotify) gtk_widget_unref); gtk_widget_show (save_button); gtk_box_pack_start (GTK_BOX (hbox1), save_button, FALSE, FALSE, 0); //SHOW BUTTON show_button = gtk_button_new_with_label (_("Show")); gtk_widget_ref (show_button); gtk_object_set_data_full (GTK_OBJECT (window1), "show_button", show_button, (GtkDestroyNotify) gtk_widget_unref); gtk_widget_show (show_button); gtk_box_pack_start (GTK_BOX (hbox1), show_button, FALSE, TRUE, 0); //creating pixbuf item m_pixbuf_item = gnome_canvas_item_new (gnome_canvas_root (GNOME_CANVAS(canvas1)), gnome_canvas_pixbuf_get_type(), "pixbuf",m_pixbuf,NULL); //update window size and position gtk_window_set_default_size (GTK_WINDOW(window1),min(m_width + 100,800), min(m_height + 200,600)); gtk_window_set_position (GTK_WINDOW(window1),GTK_WIN_POS_CENTER); //update the scrollbars gnome_canvas_set_scroll_region (GNOME_CANVAS(canvas1),0.0, 0.0, m_width, (double) m_height); //connecting signals gtk_signal_connect (GTK_OBJECT (save_button), "clicked", GTK_SIGNAL_FUNC (on_save_button_clicked), this); gtk_signal_connect (GTK_OBJECT (show_button), "clicked", GTK_SIGNAL_FUNC (on_show_button_clicked), this); gtk_signal_connect(GTK_OBJECT(m_pixbuf_item), "event", GTK_SIGNAL_FUNC(pixbuf_event_function), this); gtk_widget_show (window1); gdk_threads_leave(); } void display_pixbuf() { gdk_threads_enter(); if (!m_imageRef->isNil()) { // Getting image IplImage *my_image = m_curFrame; unsigned char *image_data = (unsigned char*) my_image->imageData; unsigned char *pixbuf_data = gdk_pixbuf_get_pixels(m_pixbuf); int num_channels = gdk_pixbuf_get_n_channels(m_pixbuf); //copying image data for (int i = 0; i < m_width * m_height; i++) { //RED or HUE pixbuf_data[2] = *image_data++; //GREEN or SATURATION pixbuf_data[1] = *image_data++; //BLUE or VALUE pixbuf_data[0] = *image_data++; //next pixbuf pixel pixbuf_data += num_channels; } //update canvas. gnome_canvas_request_redraw (GNOME_CANVAS(canvas1), 0, 0, m_width, m_height); } gdk_threads_leave(); } void calculate(int output_id, int count, Buffer &out) { try { m_imageRef = getInput(m_inImageID, count); // Verify input image sanity if (object_cast<Image>(m_imageRef).get_width() != m_width || object_cast<Image>(m_imageRef).get_height() != m_height || object_cast<Image>(m_imageRef).get_pixelsize() != m_numChannels) { throw new GeneralException ("VisualROISelection::calculate : image parameters do not correspond to given input.",__FILE__,__LINE__); } memcpy(m_curFrame->imageData, object_cast<Image>(m_imageRef).get_data(), m_numBytesInFrame); if (m_show && m_roi) { m_roi->DrawROI(m_curFrame, (const unsigned char *)m_roiColor); } if (m_saving) { // Set a new ROI delete m_roi; int hsX = (m_BBx2 - m_BBx1)/2; int hsY = (m_BBy2 - m_BBy1)/2; int xCen = m_BBx1+hsX+1; int yCen = m_BBy1+hsY+1; cout << "Creating a new ROI: xCen=" << xCen << " yCen=" << yCen << " hsX=" << hsX << " hsY=" << hsY << endl; m_roi = new VisualROI(e_VISUALROI_elliptical, xCen, yCen, hsX, hsY, 0); } /* if (m_last_count != count) { if (m_skip == 0) { display_pixbuf(); } else if (count % m_skip == 0) { display_pixbuf(); } m_last_count = count; } */ display_pixbuf(); if (m_saving && m_roi) { m_saving = false; VisualROI *outROI = new VisualROI(*m_roi); (*outputs[m_outROIID].buffer)[count] = ObjectRef(outROI); } else { (*outputs[m_outROIID].buffer)[count] = ObjectRef(nilObject); } }//try catch (BaseException *e) { throw e->add(new GeneralException("Exception in VisualROISelection::calculate:",__FILE__,__LINE__)); } } private: int m_inImageID; int m_outROIID; bool m_show; bool m_saving; int m_skip; int m_width; int m_height; int m_numChannels; int m_numBytesInFrame; int m_BBx1; int m_BBx2; int m_BBy1; int m_BBy2; unsigned char m_roiColor[3]; GtkWidget *window1; GtkWidget *vbox1; GtkWidget *vbox2; GtkWidget *scrolledwindow2; GtkWidget *canvas1; GtkWidget *hbox1; GtkWidget *save_button; GtkWidget *show_button; GnomeCanvasItem *m_pixbuf_item; GdkPixbuf *m_pixbuf; int m_last_count; ObjectRef m_imageRef; VisualROI *m_roi; IplImage *m_curFrame; }; void on_save_button_clicked (GtkButton *button, VisualROISelection *cSelection) { cSelection->m_saving = true; } void on_show_button_clicked (GtkButton *button, VisualROISelection *cSelection) { cSelection->m_show = !cSelection->m_show; } gboolean pixbuf_event_function( GnomeCanvasItem *canvasitem, GdkEvent *event, VisualROISelection *cSelection) { static double x,y; static GnomeCanvasItem *item = NULL; double item_x, item_y; double x1,y1,x2,y2; item_x = event->button.x; item_y = event->button.y; switch (event->type) { case GDK_BUTTON_PRESS: if(item) { gtk_object_destroy(GTK_OBJECT(item)); } if (!cSelection->m_imageRef->isNil()) { item = gnome_canvas_item_new ( gnome_canvas_root(GNOME_CANVAS(cSelection->canvas1)), gnome_canvas_ellipse_get_type(), "x1",item_x, "y1",item_y, "x2",item_x, "y2",item_y, "outline_color", "blue", NULL); x = item_x; y = item_y; } break; case GDK_MOTION_NOTIFY: if (event->motion.state & GDK_BUTTON1_MASK && item) { //resizing if (item_x > x) { gnome_canvas_item_set (item, "x2",item_x,NULL); } else { gnome_canvas_item_set (item, "x1",item_x,"x2",x,NULL); } if (item_y > y) { gnome_canvas_item_set (item, "y2",item_y,NULL); } else { gnome_canvas_item_set (item, "y1",item_y,"y2",y,NULL); } } break; case GDK_BUTTON_RELEASE: //destroying the rectangle if (item) { //getting image Image &my_image = object_cast<Image>(cSelection->m_imageRef); int width = my_image.get_width(); int height = my_image.get_height(); //getting rect boundaries gtk_object_get(GTK_OBJECT(item),"x1",&x1,"y1",&y1,"x2",&x2,"y2",&y2,NULL); cSelection->m_BBx1 = (int)max(x1,0.0); cSelection->m_BBy1 = (int)max(y1,0.0); cSelection->m_BBx2 = (int)min(x2,(double) width); cSelection->m_BBy2 = (int)min(y2,(double) height); gtk_object_destroy(GTK_OBJECT(item)); item = NULL; } break; case GDK_LEAVE_NOTIFY: break; default: break; } return TRUE; } } #endif |
From: Pierre M. <sid...@us...> - 2005-06-02 16:54:37
|
Update of /cvsroot/robotflow/RobotFlow/Vision/src In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv13975 Modified Files: Makefile.am Log Message: Added files for integral image features. Index: Makefile.am =================================================================== RCS file: /cvsroot/robotflow/RobotFlow/Vision/src/Makefile.am,v retrieving revision 1.31 retrieving revision 1.32 diff -C2 -d -r1.31 -r1.32 *** Makefile.am 26 May 2005 19:51:18 -0000 1.31 --- Makefile.am 2 Jun 2005 16:54:28 -0000 1.32 *************** *** 9,12 **** --- 9,15 ---- OPENCV_SOURCES = ColorHistExtraction.cc \ DrawPFParticle.cc \ + IntegralColorExtraction.cc \ + IntegralEdgesOriExtraction.cc \ + IntegralLBPExtraction.cc \ MeanShiftTracker.cc \ PFGenericParticle.cc \ *************** *** 17,20 **** --- 20,24 ---- VisualFeatureDesc.cc \ VisualHistogramDesc.cc \ + VisualIntegralDesc.cc \ VisualROI.cc \ VisualTarget.cc \ *************** *** 86,92 **** ! libVision_la_LDFLAGS = -release $(LT_RELEASE) $(PIXBUF_LIBS) $(GNOME_LIB) $(JPEG_LIB) $(OPENCV_LIBS) ! INCLUDES = -I../include $(OVERFLOW_INCLUDE) $(PIXBUF_INCLUDE) $(JPEG_INCLUDE) $(OPENCV_INCLUDES) install-data-local: --- 90,96 ---- ! libVision_la_LDFLAGS = -release $(LT_RELEASE) $(PIXBUF_LIBS) $(GNOME_LIB) $(JPEG_LIB) $(OPENCV_LIBS) ! INCLUDES = -I../include $(OVERFLOW_INCLUDE) $(PIXBUF_INCLUDE) $(JPEG_INCLUDE) $(OPENCV_INCLUDES) install-data-local: |
From: Pierre M. <sid...@us...> - 2005-06-02 16:54:11
|
Update of /cvsroot/robotflow/RobotFlow/Vision/src In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv13695 Added Files: IntegralColorExtraction.cc IntegralEdgesOriExtraction.cc IntegralLBPExtraction.cc VisualIntegralDesc.cc Log Message: New integral image features extraction and descriptor. --- NEW FILE: IntegralColorExtraction.cc --- /* Copyright (C) 2005 Pierre Moisan (Pie...@US...) This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with this library; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ #include "IntegralColorExtraction.h" using namespace FD; using namespace std; namespace RobotFlow { DECLARE_NODE(IntegralColorExtraction) DECLARE_TYPE(IntegralColorExtraction) /*Node * * @name IntegralColorExtraction * @category RobotFlow:Vision:FeaturesExtraction * @description Integral color features extraction. * * @parameter_name FRAME_WIDTH * @parameter_type int * @parameter_value 320 * @parameter_description Video frame width. * * @parameter_name FRAME_HEIGHT * @parameter_type int * @parameter_value 240 * @parameter_description Video frame height. * * @parameter_name NUM_CHANNELS * @parameter_type int * @parameter_value 3 * @parameter_description Number of channels in video frame. * * @parameter_name NUM_HORI_RECT * @parameter_type int * @parameter_value 2 * @parameter_description Number of horizontal rectangle to use for integral features. * * @parameter_name NUM_VERT_RECT * @parameter_type int * @parameter_value 2 * @parameter_description Number of vertical rectangle to use for integral features. * * @input_name IN_IMAGE * @input_type Image * @input_description Current frame to process. * * @input_name CURRENT_ROI * @input_type VisualROI * @input_description The current region of interest. * * @input_name PREPROCESS_FRAME * @input_type bool * @input_description Flag indicating to preprocess a new image. * * @output_name OUT_FEATURES * @output_type Vector<VisualFeatureDesc<double> *> * @output_description Output features descriptor. * * @output_name PREPROCESS_COMPLETED * @output_type int * @output_description Output to force preprocessing. * END*/ // // Default constructor for Object // IntegralColorExtraction::IntegralColorExtraction() : m_width(-1), m_height(-1), m_numChannels(-1), m_numHoriIntRect(0), m_numVertIntRect(0), m_maxValue(0), m_featVect(NULL) { } // // Constructor with complete intialisation // IntegralColorExtraction::IntegralColorExtraction(int i_width, int i_height, int i_numChannels, int i_numHoriIntRect, int i_numVertIntRect, double i_maxValue) : m_width(i_width), m_height(i_height), m_numChannels(i_numChannels), m_numHoriIntRect(i_numHoriIntRect), m_numVertIntRect(i_numVertIntRect), m_maxValue(i_maxValue), m_featVect(NULL) { Initialize(); } // // BufferedNode constructor // IntegralColorExtraction::IntegralColorExtraction(string nodeName, ParameterSet params) : VisualFeaturesExtraction<double>(nodeName, params), m_featVect(NULL) { m_imageInID = addInput("IN_IMAGE"); m_roiInID = addInput("CURRENT_ROI"); m_useNextImgInID = addInput("PREPROCESS_FRAME"); m_featuresOutID = addOutput("OUT_FEATURES"); m_ppCompletedOutID = addOutput("PREPROCESS_COMPLETED"); m_width = dereference_cast<int>(parameters.get("FRAME_WIDTH")); m_height = dereference_cast<int>(parameters.get("FRAME_HEIGHT")); m_numChannels = dereference_cast<int>(parameters.get("NUM_CHANNELS")); m_numHoriIntRect = dereference_cast<int>(parameters.get("NUM_HORI_RECT")); m_numVertIntRect = dereference_cast<int>(parameters.get("NUM_VERT_RECT")); m_maxValue = 255.0; Initialize(); } IntegralColorExtraction::~IntegralColorExtraction() { for (int c=0; c<m_numChannels; c++) { cvReleaseImage(&(m_sumImage[c])); cvReleaseImage(&(m_chImage[c])); } cvReleaseImage(&m_curImage); delete [] m_sumPixPtr; delete [] m_chPixPtr; delete [] m_sumImage; delete [] m_chImage; } // Modified BufferedNode request method to support cyclic node connection void IntegralColorExtraction::request(int output_id, const ParameterSet &req) { if (req.exist("LOOKAHEAD")) { outputs[output_id].lookAhead = max(outputs[output_id].lookAhead,dereference_cast<int> (req.get("LOOKAHEAD"))); } if (req.exist("LOOKBACK")) { outputs[output_id].lookBack = max(outputs[output_id].lookBack,dereference_cast<int> (req.get("LOOKBACK"))); } if (req.exist("INORDER")) { inOrder = true; } int outputLookAhead=0, outputLookBack=0; outputLookAhead=max(outputLookAhead, outputs[output_id].lookAhead); outputLookBack =max(outputLookBack, outputs[output_id].lookBack); // Every output usually requires these inputs ParameterSet nextImgReq; nextImgReq.add("LOOKAHEAD", ObjectRef(Int::alloc(inputsCache[m_useNextImgInID].lookAhead+outputLookAhead))); nextImgReq.add("LOOKBACK", ObjectRef(Int::alloc(inputsCache[m_useNextImgInID].lookBack+outputLookBack))); inputs[m_useNextImgInID].node->request(inputs[m_useNextImgInID].outputID,nextImgReq); if (output_id == m_featuresOutID) { ParameterSet myReq; myReq.add("LOOKAHEAD", ObjectRef(Int::alloc(inputsCache[m_roiInID].lookAhead+outputLookAhead))); myReq.add("LOOKBACK", ObjectRef(Int::alloc(inputsCache[m_roiInID].lookBack+outputLookBack))); inputs[m_roiInID].node->request(inputs[m_roiInID].outputID,myReq); } else if (output_id == m_ppCompletedOutID) { ParameterSet myReq; myReq.add("LOOKAHEAD", ObjectRef(Int::alloc(inputsCache[m_imageInID].lookAhead+outputLookAhead))); myReq.add("LOOKBACK", ObjectRef(Int::alloc(inputsCache[m_imageInID].lookBack+outputLookBack))); inputs[m_imageInID].node->request(inputs[m_imageInID].outputID, myReq); } else { throw new GeneralException ("IntegralColorExtraction::request : unknown output ID.",__FILE__,__LINE__); } } void IntegralColorExtraction::calculate(int output_id, int count, Buffer &out) { try { bool useNext = dereference_cast<bool>(getInput(m_useNextImgInID, count)); if (useNext) { RCPtr<Image> imageRef = getInput(m_imageInID, count); // Verify input image sanity if (imageRef->get_width() != m_width || imageRef->get_height() != m_height || imageRef->get_pixelsize() != m_numChannels) { throw new GeneralException ("IntegralColorExtraction::calculate : image parameters do not correspond to given input.",__FILE__,__LINE__); } // Preprocess input image Preprocess((const unsigned char *)(imageRef->get_data())); } if (output_id == m_featuresOutID) { ObjectRef roiRef = getInput(m_roiInID, count); if (!roiRef->isNil()) { RCPtr<VisualROI> roiPtr = roiRef; ExtractFeatures(roiPtr.get()); (*outputs[m_featuresOutID].buffer)[count] = m_featVect; } else { (*outputs[m_featuresOutID].buffer)[count] = ObjectRef(nilObject); } } else if (output_id == m_ppCompletedOutID) { // Preprocess image than output true RCPtr<Image> imageRef = getInput(m_imageInID, count); // Verify input image sanity if (imageRef->get_width() != m_width || imageRef->get_height() != m_height || imageRef->get_pixelsize() != m_numChannels) { throw new GeneralException ("IntegralColorExtraction::calculate : image parameters do not correspond to given input.",__FILE__,__LINE__); } // Preprocess input image Preprocess((const unsigned char *)(imageRef->get_data())); (*outputs[m_ppCompletedOutID].buffer)[count] = ObjectRef(Int::alloc(1)); } } catch (BaseException *e) { throw e->add(new GeneralException("Exception in IntegralColorExtraction::calculate:",__FILE__,__LINE__)); } } void IntegralColorExtraction::ExtractFeatures(VisualROI *i_roi) { try { CvPoint ulcLimit, lrcLimit, curULC, curLRC; int roiWidth, roiHeight, deltaX, deltaY; double numPixelsInSubROI; int c,i,j; double *featPtr = (*m_featVect)[0]->GetFeatures(); int cenX = i_roi->GetXCen(); int cenY = i_roi->GetYCen(); if (cenX < 0 || cenX >= m_width || cenY < 0 || cenY >= m_height) { // ROI center is out of image plane // Ignore current ROI (*m_featVect)[0]->SetValidity(false); return; } ulcLimit.x = cenX - i_roi->GetHSX(); if (ulcLimit.x < 0) { ulcLimit.x = 0; } else if (ulcLimit.x >= m_width) { ulcLimit.x = m_width-1; } ulcLimit.y = cenY - i_roi->GetHSY(); if (ulcLimit.y < 0) { ulcLimit.y = 0; } else if (ulcLimit.y >= m_height) { ulcLimit.y = m_height-1; } lrcLimit.x = cenX + i_roi->GetHSX(); if (lrcLimit.x < 0) { lrcLimit.x = 0; } else if (lrcLimit.x >= m_width) { lrcLimit.x = m_width-1; } lrcLimit.y = cenY + i_roi->GetHSY(); if (lrcLimit.y < 0) { lrcLimit.y = 0; } else if (lrcLimit.y >= m_height) { lrcLimit.y = m_height-1; } roiWidth = lrcLimit.x - ulcLimit.x; roiHeight = lrcLimit.y - ulcLimit.y; if (roiWidth==0 || roiHeight==0) { // Invalid ROI (*m_featVect)[0]->SetValidity(false); return; } else { (*m_featVect)[0]->SetValidity(true); } //cout << "ROI Info: ulc=(" << ulcLimit.x << "," << ulcLimit.y << ") lrc=(" << lrcLimit.x << "," << lrcLimit.y << endl; if (m_numHoriIntRect) { deltaX = roiWidth/m_numHoriIntRect; } else { deltaX = roiWidth; } if (m_numVertIntRect) { deltaY = roiHeight/m_numVertIntRect; } else { deltaY = roiHeight; } for (c=0; c<m_numChannels; c++) { m_sumPixPtr[c] = (int *)(m_sumImage[c]->imageData); } numPixelsInSubROI = deltaX*deltaY; curULC.y = ulcLimit.y; curLRC.y = curULC.y + deltaY; for (i=0; i<m_numVertIntRect; i++) { curULC.x = ulcLimit.x; curLRC.x = curULC.x + deltaX; for (j=0; j<m_numHoriIntRect; j++) { for (c=0; c<m_numChannels; c++) { // Compute mean color channel double tmpVal = (double)(m_sumPixPtr[c][curLRC.y*m_imgSumWidth+curLRC.x] - m_sumPixPtr[c][curLRC.y*m_imgSumWidth+curULC.x] - m_sumPixPtr[c][curULC.y*m_imgSumWidth+curLRC.x] + m_sumPixPtr[c][curULC.y*m_imgSumWidth+curULC.x])/numPixelsInSubROI; *featPtr++ = tmpVal; } curULC.x += deltaX; curLRC.x += deltaX; } curULC.y += deltaY; curLRC.y += deltaY; } } catch (BaseException *e) { throw e->add(new GeneralException("Exception in IntegralColorExtraction::ExtractFeatures:",__FILE__,__LINE__)); } } void IntegralColorExtraction::ExtractFeatures(IplImage *i_input, VisualROI *i_roi) { try { Preprocess((const unsigned char *)(i_input->imageData)); ExtractFeatures(i_roi); } catch (BaseException *e) { throw e->add(new GeneralException("Exception in IntegralColorExtraction::ExtractFeatures:",__FILE__,__LINE__)); } } void IntegralColorExtraction::Preprocess(const unsigned char *i_src) { int c, p; const unsigned char *p_srcPix = i_src; // First extract all channels of image for (c=0; c<m_numChannels; c++) { m_chPixPtr[c] = (unsigned char *)(m_chImage[c]->imageData); } for (p=0; p<m_numPixels; p++) { for (c=0; c<m_numChannels; c++) { *(m_chPixPtr[c])++ = *p_srcPix++; } } // Preprocessing consists of computing the sum of pixels // in order to have the integral images. for (c=0; c<m_numChannels; c++) { cvIntegral(m_chImage[c], m_sumImage[c], NULL, NULL); } } // // Private methods // void IntegralColorExtraction::Initialize() { m_numPixels = m_width*m_height; m_numBytesInFrame = m_numPixels*m_numChannels; m_numIntRect = m_numHoriIntRect*m_numVertIntRect; m_featVect = RCPtr<Vector<VisualFeatureDesc<double> *> >(Vector<VisualFeatureDesc<double> *>::alloc(1)); (*m_featVect)[0] = new VisualIntegralDesc<double>(e_VISUALINTDESC_EuclideanDist, m_numChannels, m_numIntRect, m_maxValue); m_chImage = new IplImage *[m_numChannels]; m_sumImage = new IplImage *[m_numChannels]; m_chPixPtr = new unsigned char*[m_numChannels]; m_sumPixPtr = new int *[m_numChannels]; CvSize imgSize; imgSize.width = m_width; imgSize.height = m_height; m_curImage = cvCreateImage(imgSize, IPL_DEPTH_8U, m_numChannels); // Sum images are of size (m_width+1)x(m_height+1) m_imgSumWidth = m_width+1; CvSize sumSize; sumSize.width = m_imgSumWidth; sumSize.height = m_height+1; for (int c=0; c<m_numChannels; c++) { m_chImage[c] = cvCreateImage(imgSize, IPL_DEPTH_8U, 1); m_sumImage[c] = cvCreateImage(sumSize, IPL_DEPTH_32S, 1); } } }//namespace RobotFlow --- NEW FILE: VisualIntegralDesc.cc --- /* Copyright (C) 2005 Pierre Moisan (Pie...@US...) This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with this library; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ #include "VisualIntegralDesc.h" using namespace FD; using namespace std; namespace RobotFlow { DECLARE_TYPE(VisualIntegralDesc<double>) }//namespace RobotFlow --- NEW FILE: IntegralEdgesOriExtraction.cc --- /* Copyright (C) 2005 Pierre Moisan (Pie...@US...) This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with this library; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ #include "IntegralEdgesOriExtraction.h" using namespace FD; using namespace std; namespace RobotFlow { DECLARE_NODE(IntegralEdgesOriExtraction) DECLARE_TYPE(IntegralEdgesOriExtraction) /*Node * * @name IntegralEdgesOriExtraction * @category RobotFlow:Vision:FeaturesExtraction * @description Integral edges orientation features extraction. * * @parameter_name FRAME_WIDTH * @parameter_type int * @parameter_value 320 * @parameter_description Video frame width. * * @parameter_name FRAME_HEIGHT * @parameter_type int * @parameter_value 240 * @parameter_description Video frame height. * * @parameter_name NUM_CHANNELS * @parameter_type int * @parameter_value 3 * @parameter_description Number of channels in video frame. * * @parameter_name NUM_ORIENTATIONS * @parameter_type int * @parameter_value 4 * @parameter_description Number of edge orientations. * * @parameter_name NUM_CHANNELS * @parameter_type int * @parameter_value 3 * @parameter_description Number of channels in video frame. * * @parameter_name NUM_HORI_RECT * @parameter_type int * @parameter_value 2 * @parameter_description Number of horizontal rectangle to use for integral features. * * @parameter_name NUM_VERT_RECT * @parameter_type int * @parameter_value 2 * @parameter_description Number of vertical rectangle to use for integral features. * * @parameter_name MIN_EDGES_STRENGTH * @parameter_type float * @parameter_value 100.0 * @parameter_description Threshold to remove noisy/weak edges. * * @parameter_name MAX_EDGES_STRENGTH * @parameter_type float * @parameter_value 1000.0 * @parameter_description Limit on the edge strength. * * @input_name IN_IMAGE * @input_type Image * @input_description Current frame to process. * * @input_name CURRENT_ROI * @input_type VisualROI * @input_description The current region of interest. * * @input_name PREPROCESS_FRAME * @input_type bool * @input_description Flag indicating to preprocess a new image. * * @output_name OUT_FEATURES * @output_type Vector<VisualFeatureDesc<double> *> * @output_description Output features descriptor. * * @output_name PREPROCESS_COMPLETED * @output_type int * @output_description Output to force preprocessing. * END*/ static const double k_IEOE_pi = 3.14159265358979323846; static const double k_IEOE_2piInv = 1.0/(2.0*k_IEOE_pi); // // Default constructor for Object // IntegralEdgesOriExtraction::IntegralEdgesOriExtraction() : m_width(-1), m_height(-1), m_numChannels(-1), m_numOriBins(-1), m_edgesStrTresh(0.0), m_maxStrengthValue(0.0), m_featVect(NULL) { } // // Constructor with complete intialisation // IntegralEdgesOriExtraction::IntegralEdgesOriExtraction(int i_width, int i_height, int i_numChannels, int i_numOriBins, double i_edgesStrTresh, double i_maxStrengthValue) : m_width(i_width), m_height(i_height), m_numChannels(i_numChannels), m_numOriBins(i_numOriBins), m_edgesStrTresh(i_edgesStrTresh), m_maxStrengthValue(i_maxStrengthValue), m_featVect(NULL) { Initialize(); } // // BufferedNode constructor // IntegralEdgesOriExtraction::IntegralEdgesOriExtraction(string nodeName, ParameterSet params) : VisualFeaturesExtraction<double>(nodeName, params), m_featVect(NULL) { m_imageInID = addInput("IN_IMAGE"); m_roiInID = addInput("CURRENT_ROI"); m_useNextImgInID = addInput("PREPROCESS_FRAME"); m_featuresOutID = addOutput("OUT_FEATURES"); m_ppCompletedOutID = addOutput("PREPROCESS_COMPLETED"); m_width = dereference_cast<int>(parameters.get("FRAME_WIDTH")); m_height = dereference_cast<int>(parameters.get("FRAME_HEIGHT")); m_numChannels = dereference_cast<int>(parameters.get("NUM_CHANNELS")); m_numOriBins = dereference_cast<int>(parameters.get("NUM_ORIENTATIONS")); m_numHoriIntRect = dereference_cast<int>(parameters.get("NUM_HORI_RECT")); m_numVertIntRect = dereference_cast<int>(parameters.get("NUM_VERT_RECT")); m_edgesStrTresh = dereference_cast<float>(parameters.get("MIN_EDGES_STRENGTH")); m_maxStrengthValue = dereference_cast<float>(parameters.get("MAX_EDGES_STRENGTH")); Initialize(); } IntegralEdgesOriExtraction::~IntegralEdgesOriExtraction() { for (int b=0; b<m_numOriBins; b++) { cvReleaseImage(&(m_edgesOriSum[b])); cvReleaseImage(&(m_edgesOri[b])); } delete [] m_edgesOriSumPix; delete [] m_edgesOriPix; delete [] m_edgesOriSum; delete [] m_edgesOri; cvReleaseImage(&m_oriYImage); cvReleaseImage(&m_oriXImage); cvReleaseImage(&m_grayImage); cvReleaseImage(&m_curImage); } // Modified BufferedNode request method to support cyclic node connection void IntegralEdgesOriExtraction::request(int output_id, const ParameterSet &req) { if (req.exist("LOOKAHEAD")) { outputs[output_id].lookAhead = max(outputs[output_id].lookAhead,dereference_cast<int> (req.get("LOOKAHEAD"))); } if (req.exist("LOOKBACK")) { outputs[output_id].lookBack = max(outputs[output_id].lookBack,dereference_cast<int> (req.get("LOOKBACK"))); } if (req.exist("INORDER")) { inOrder = true; } int outputLookAhead=0, outputLookBack=0; outputLookAhead=max(outputLookAhead, outputs[output_id].lookAhead); outputLookBack =max(outputLookBack, outputs[output_id].lookBack); // Every output usually requires these inputs ParameterSet nextImgReq; nextImgReq.add("LOOKAHEAD", ObjectRef(Int::alloc(inputsCache[m_useNextImgInID].lookAhead+outputLookAhead))); nextImgReq.add("LOOKBACK", ObjectRef(Int::alloc(inputsCache[m_useNextImgInID].lookBack+outputLookBack))); inputs[m_useNextImgInID].node->request(inputs[m_useNextImgInID].outputID,nextImgReq); if (output_id == m_featuresOutID) { ParameterSet myReq; myReq.add("LOOKAHEAD", ObjectRef(Int::alloc(inputsCache[m_roiInID].lookAhead+outputLookAhead))); myReq.add("LOOKBACK", ObjectRef(Int::alloc(inputsCache[m_roiInID].lookBack+outputLookBack))); inputs[m_roiInID].node->request(inputs[m_roiInID].outputID,myReq); } else if (output_id == m_ppCompletedOutID) { ParameterSet myReq; myReq.add("LOOKAHEAD", ObjectRef(Int::alloc(inputsCache[m_imageInID].lookAhead+outputLookAhead))); myReq.add("LOOKBACK", ObjectRef(Int::alloc(inputsCache[m_imageInID].lookBack+outputLookBack))); inputs[m_imageInID].node->request(inputs[m_imageInID].outputID, myReq); } else { throw new GeneralException ("IntegralEdgesOriExtraction::request : unknown output ID.",__FILE__,__LINE__); } } void IntegralEdgesOriExtraction::calculate(int output_id, int count, Buffer &out) { try { bool useNext = dereference_cast<bool>(getInput(m_useNextImgInID, count)); if (useNext) { RCPtr<Image> imageRef = getInput(m_imageInID, count); // Verify input image sanity if (imageRef->get_width() != m_width || imageRef->get_height() != m_height || imageRef->get_pixelsize() != m_numChannels) { throw new GeneralException ("IntegralEdgesOriExtraction::calculate : image parameters do not correspond to given input.",__FILE__,__LINE__); } // Copy input image memcpy(m_curImage->imageData, imageRef->get_data(), m_numBytesInFrame); // Preprocess input image Preprocess(m_curImage); } if (output_id == m_featuresOutID) { ObjectRef roiRef = getInput(m_roiInID, count); if (!roiRef->isNil()) { RCPtr<VisualROI> roiPtr = roiRef; ExtractFeatures(roiPtr.get()); (*outputs[m_featuresOutID].buffer)[count] = m_featVect; } else { (*outputs[m_featuresOutID].buffer)[count] = ObjectRef(nilObject); } } else if (output_id == m_ppCompletedOutID) { // Preprocess image than output true RCPtr<Image> imageRef = getInput(m_imageInID, count); // Verify input image sanity if (imageRef->get_width() != m_width || imageRef->get_height() != m_height || imageRef->get_pixelsize() != m_numChannels) { throw new GeneralException ("IntegralEdgesOriExtraction::calculate : image parameters do not correspond to given input.",__FILE__,__LINE__); } // Copy input image memcpy(m_curImage->imageData, imageRef->get_data(), m_numBytesInFrame); // Preprocess input image Preprocess(m_curImage); (*outputs[m_ppCompletedOutID].buffer)[count] = ObjectRef(Int::alloc(1)); } } catch (BaseException *e) { throw e->add(new GeneralException("Exception in IntegralEdgesOriExtraction::calculate:",__FILE__,__LINE__)); } } void IntegralEdgesOriExtraction::ExtractFeatures(VisualROI *i_roi) { try { CvPoint ulcLimit, lrcLimit, curULC, curLRC; int roiWidth, roiHeight, deltaX, deltaY; double numPixelsInSubROI; int b,i,j; double *featPtr = (*m_featVect)[0]->GetFeatures(); int cenX = i_roi->GetXCen(); int cenY = i_roi->GetYCen(); if (cenX < 0 || cenX >= m_width || cenY < 0 || cenY >= m_height) { // ROI center is out of image plane // Ignore current ROI (*m_featVect)[0]->SetValidity(false); return; } ulcLimit.x = cenX - i_roi->GetHSX(); if (ulcLimit.x < 0) { ulcLimit.x = 0; } else if (ulcLimit.x >= m_width) { ulcLimit.x = m_width-1; } ulcLimit.y = cenY - i_roi->GetHSY(); if (ulcLimit.y < 0) { ulcLimit.y = 0; } else if (ulcLimit.y >= m_height) { ulcLimit.y = m_height-1; } lrcLimit.x = cenX + i_roi->GetHSX(); if (lrcLimit.x < 0) { lrcLimit.x = 0; } else if (lrcLimit.x >= m_width) { lrcLimit.x = m_width-1; } lrcLimit.y = cenY + i_roi->GetHSY(); if (lrcLimit.y < 0) { lrcLimit.y = 0; } else if (lrcLimit.y >= m_height) { lrcLimit.y = m_height-1; } roiWidth = lrcLimit.x - ulcLimit.x; roiHeight = lrcLimit.y - ulcLimit.y; if (roiWidth==0 || roiHeight==0) { // Invalid ROI (*m_featVect)[0]->SetValidity(false); return; } else { (*m_featVect)[0]->SetValidity(true); } //cout << "ROI Info: ulc=(" << ulcLimit.x << "," << ulcLimit.y << ") lrc=(" << lrcLimit.x << "," << lrcLimit.y << endl; if (m_numHoriIntRect) { deltaX = roiWidth/m_numHoriIntRect; } else { deltaX = roiWidth; } if (m_numVertIntRect) { deltaY = roiHeight/m_numVertIntRect; } else { deltaY = roiHeight; } numPixelsInSubROI = deltaX*deltaY; curULC.y = ulcLimit.y; curLRC.y = curULC.y + deltaY; for (i=0; i<m_numVertIntRect; i++) { curULC.x = ulcLimit.x; curLRC.x = curULC.x + deltaX; for (j=0; j<m_numHoriIntRect; j++) { // Compute orientation features for (b=0; b<m_numOriBins; b++) { *featPtr++ = (m_edgesOriSumPix[b][lrcLimit.y*m_imgSumWidth+lrcLimit.x] - m_edgesOriSumPix[b][lrcLimit.y*m_imgSumWidth+ulcLimit.x] - m_edgesOriSumPix[b][ulcLimit.y*m_imgSumWidth+lrcLimit.x] + m_edgesOriSumPix[b][ulcLimit.y*m_imgSumWidth+ulcLimit.x])/numPixelsInSubROI; } curULC.x += deltaX; curLRC.x += deltaX; } curULC.y += deltaY; curLRC.y += deltaY; } } catch (BaseException *e) { throw e->add(new GeneralException("Exception in IntegralEdgesOriExtraction::ExtractFeatures:",__FILE__,__LINE__)); } } void IntegralEdgesOriExtraction::ExtractFeatures(IplImage *i_input, VisualROI *i_roi) { try { Preprocess(i_input); ExtractFeatures(i_roi); } catch (BaseException *e) { throw e->add(new GeneralException("Exception in IntegralEdgesOriExtraction::ExtractFeatures:",__FILE__,__LINE__)); } } void IntegralEdgesOriExtraction::Preprocess(IplImage *i_srcImg) { int b, p; // Convert to graysacle if (m_numChannels == 3) { cvCvtColor(i_srcImg, m_grayImage, CV_BGR2GRAY); } else if (m_numChannels == 1) { // Copy input image memcpy(m_grayImage->imageData, i_srcImg->imageData, m_numBytesInFrame); } else { throw new GeneralException ("IntegralEdgesOriExtraction::Preprocess : only images with 1 or 3 channels are supported.",__FILE__,__LINE__); } cvSobel(m_grayImage, m_oriXImage, 1, 0, 3); cvSobel(m_grayImage, m_oriYImage, 0, 1, 3); signed short *p_oriXPixels = (signed short *)(m_oriXImage->imageData); signed short *p_oriYPixels = (signed short *)(m_oriYImage->imageData); // Each orientation is assigned a channel for integral image processing for (b=0; b<m_numOriBins; b++) { // Assign pointers to each orientation image m_edgesOriPix[b] = (float *)(m_edgesOri[b]->imageData); } for (p=0; p<m_numPixels; p++) { // Compute edge strength float strength = sqrt((*p_oriXPixels)*(*p_oriXPixels) + (*p_oriYPixels)*(*p_oriYPixels)); if (strength < m_edgesStrTresh) { strength = 0; } else if (strength > m_maxStrengthValue) { strength = m_maxStrengthValue; } // Compute edge orientation // Orientation will be between [0, 2PI] float angle = atan2(*p_oriYPixels, *p_oriXPixels) + k_IEOE_pi; // Determine the orientation image channel int angleChannel = cvRound((angle*k_IEOE_2piInv)*(float)(m_numOriBins-1)); if (angleChannel < 0) { angleChannel = 0; } else if (angleChannel >= m_numOriBins) { angleChannel = m_numOriBins-1; } // Assign edge strengh to appropriate angle channel image for (b=0; b<m_numOriBins; b++) { if (b == angleChannel) { *(m_edgesOriPix[b])++ = strength; } else { *(m_edgesOriPix[b])++ = 0.f; } } p_oriXPixels++; p_oriYPixels++; } // Compute integral images on all orientation channels for (b=0; b<m_numOriBins; b++) { cvIntegral(m_edgesOri[b], m_edgesOriSum[b], NULL, NULL); } // Assign pointers on each orientation integral image for (b=0; b<m_numOriBins; b++) { m_edgesOriSumPix[b] = (double *)(m_edgesOriSum[b]->imageData); } } // // Private methods // void IntegralEdgesOriExtraction::Initialize() { m_numPixels = m_width*m_height; m_numBytesInFrame = m_numPixels*m_numChannels; m_numIntRect = m_numHoriIntRect*m_numVertIntRect; m_featVect = RCPtr<Vector<VisualFeatureDesc<double> *> >(Vector<VisualFeatureDesc<double> *>::alloc(1)); (*m_featVect)[0] = new VisualIntegralDesc<double>(e_VISUALINTDESC_EuclideanDist, m_numOriBins, m_numIntRect, m_maxStrengthValue); CvSize imgSize; imgSize.width = m_width; imgSize.height = m_height; m_curImage = cvCreateImage(imgSize, IPL_DEPTH_8U, m_numChannels); m_grayImage = cvCreateImage( imgSize, IPL_DEPTH_8U, 1 ); m_oriXImage = cvCreateImage( imgSize, IPL_DEPTH_16S, 1 ); m_oriYImage = cvCreateImage( imgSize, IPL_DEPTH_16S, 1 ); CvSize sumImgSize; m_imgSumWidth = m_width+1; sumImgSize.width = m_imgSumWidth; sumImgSize.height = m_height+1; m_edgesOri = new IplImage*[m_numOriBins]; m_edgesOriSum = new IplImage*[m_numOriBins]; m_edgesOriPix = new float*[m_numOriBins]; m_edgesOriSumPix = new double*[m_numOriBins]; for (int b=0; b<m_numOriBins; b++) { m_edgesOri[b] = cvCreateImage( imgSize, IPL_DEPTH_32F, 1 ); m_edgesOriSum[b] = cvCreateImage( sumImgSize, IPL_DEPTH_64F, 1 ); } } }//namespace RobotFlow --- NEW FILE: IntegralLBPExtraction.cc --- /* Copyright (C) 2005 Pierre Moisan (Pie...@US...) This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with this library; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ #include "IntegralLBPExtraction.h" using namespace FD; [...1064 lines suppressed...] // // Code adapted from // Cpplibs C++ libraries and PRAPI // Copyright (C) 2001 Topi Mäenpää and Jaakko Viertola // int IntegralLBPExtraction::CountOneBits(unsigned int i_val) { int count = 0; unsigned int base = 1; for (int i=0;i<m_numSamples;i++) { if (i_val & base) count++; base <<= 1; } return count; } }//namespace RobotFlow |
From: Pierre M. <sid...@us...> - 2005-06-02 16:51:43
|
Update of /cvsroot/robotflow/RobotFlow/Vision/include In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv12372 Modified Files: Makefile.am Log Message: Added files for integral image features. Index: Makefile.am =================================================================== RCS file: /cvsroot/robotflow/RobotFlow/Vision/include/Makefile.am,v retrieving revision 1.11 retrieving revision 1.12 diff -C2 -d -r1.11 -r1.12 *** Makefile.am 26 May 2005 19:52:50 -0000 1.11 --- Makefile.am 2 Jun 2005 16:51:35 -0000 1.12 *************** *** 5,8 **** --- 5,11 ---- OPENCV_FILES = VisualFeaturesExtraction.h \ ColorHistExtraction.h \ + IntegralColorExtraction.h \ + IntegralEdgesOriExtraction.h \ + IntegralLBPExtraction.h \ MeanShiftTracker.h \ PFGenericParticle.h \ *************** *** 15,18 **** --- 18,22 ---- VisualFeatureDesc.h \ VisualHistogramDesc.h \ + VisualIntegralDesc.h \ VisualROI.h \ VisualTarget.h \ |
From: Pierre M. <sid...@us...> - 2005-06-02 16:49:48
|
Update of /cvsroot/robotflow/RobotFlow/Vision/include In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv11331 Added Files: IntegralColorExtraction.h IntegralEdgesOriExtraction.h IntegralLBPExtraction.h VisualIntegralDesc.h Log Message: New integral image features extraction and descriptor. --- NEW FILE: IntegralLBPExtraction.h --- /* Copyright (C) 2005 Pierre Moisan (Pie...@US...) This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with this library; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ #ifndef _INTEGRALLBPEXTRACTION_H_ #define _INTEGRALLBPEXTRACTION_H_ #include "VisualFeaturesExtraction.h" #include "VisualIntegralDesc.h" #include "VisualTarget.h" #include <stdlib.h> #include <sys/timeb.h> namespace RobotFlow { // // Integral LBP (Lcoal Binary Pattern) Features Extraction for RobotFlow // class IntegralLBPExtraction : public VisualFeaturesExtraction<double> { public: // // Default constructor for Object // IntegralLBPExtraction(); // // Constructor with complete intialisation // IntegralLBPExtraction(int i_width, int i_height, int i_numChannels, int i_numHoriIntRect, int i_numVertIntRect, int i_numSamples, int i_predicate, bool i_doInterpolation, bool i_useUniform, int i_startAngle, double i_maxValue); // // BufferedNode constructor // IntegralLBPExtraction(std::string nodeName, FD::ParameterSet params); // // Constructor using input stream // IntegralLBPExtraction(std::istream &in) { readFrom(in); } virtual ~IntegralLBPExtraction(); // Default routine to print a IntegralLBPExtraction object to an output stream void printOn(std::ostream &out) const { throw new FD::GeneralException("Exception in IntegralLBPExtraction::printOn: method not yet implemented.",__FILE__,__LINE__); } // Default routine to read a IntegralLBPExtraction object from an input stream void readFrom(std::istream &in) { throw new FD::GeneralException("Exception in IntegralLBPExtraction::readFrom: method not yet implemented.",__FILE__,__LINE__); } virtual void request(int output_id, const FD::ParameterSet &req); void calculate(int output_id, int count, FD::Buffer &out); void Preprocess(IplImage *i_src); void ExtractFeatures(VisualROI *i_roi); void ExtractFeatures(IplImage *i_input, VisualROI *i_roi); VisualFeatureDesc<double> *GetDescriptor() { return (*m_featVect)[0]; } const VisualFeatureDesc<double> *GetCstDescriptor() const { return (const VisualIntegralDesc<double> *)(*m_featVect)[0]; } private: void Initialize(); void InitSamplePoints(); void ExtractLBPGeneralWithInterpolation(unsigned char *i_pixPtr); void ExtractLBPGeneralWithoutInterpolation(unsigned char *i_pixPtr); void ExtractLBPGeneralRIU2WithInterpolation(unsigned char *i_pixPtr); void ExtractLBPGeneralRIU2WithoutInterpolation(unsigned char *i_pixPtr); void ExtractLBP8WithInterpolation(unsigned char *i_pixPtr); void ExtractLBP8WithoutInterpolation(unsigned char *i_pixPtr); void ExtractLBP8RIU2WithInterpolation(unsigned char *i_pixPtr); void ExtractLBP8RIU2WithoutInterpolation(unsigned char *i_pixPtr); int ComputeBitTransitions(unsigned int i_val); int CountOneBits(unsigned int i_val); private: // Input IDs (for BufferedNode) int m_imageInID; int m_roiInID; int m_useNextImgInID; // Output IDs (for BufferedNode) int m_featuresOutID; int m_ppCompletedOutID; // Width of images int m_width; int m_imgSumWidth; // Height of images int m_height; // Number of channels in an image int m_numChannels; // Number of pixels in an image int m_numPixels; // Number of bytes in an image int m_numBytesInFrame; // Number of independant rectangular region to compute // the integral color features int m_numHoriIntRect; int m_numVertIntRect; int m_numIntRect; // The number of samples in the local neighborhood. int m_numSamples; // The current predicate (radius), i.e. the distance of the // neighborhood from its center. int m_predicate; // Interpolation flag bool m_doInterpolation; // Flag to use only uniform patterns i.e. patterns with 2 or less // bit transitions bool m_useUniform; // The angle of the first neighbor. int m_startAngle; // Maximum pixel channel value double m_maxValue; // Number of valid local binary patterns int m_numValidPattern; // Precalculated table of interpolation points. CvPoint *m_samplePoints; // Precalculated table of interpolation offsets. CvPoint2D32f *m_pointsOffsets; // Precalculated values for interpolation multiplication. double *m_BiLiMultipliers; // Temporary pixel pointers corresponding to the neighborhood samples unsigned char **m_tmpSamples; // Integral color descriptor for region of interest FD::RCPtr<FD::Vector<VisualFeatureDesc<double> *> > m_featVect; // Temporary image copy IplImage *m_curImage; // Grayscale version of current image IplImage *m_grayImage; // Each local pattern is an image IplImage **m_patternImage; // Pointer to channel image pixels unsigned char **m_patternPixPtr; int **m_sumPixPtr; // Sum of pixels (integral) image IplImage **m_sumImage; // Function pointer to the appropriate private extraction routine void (IntegralLBPExtraction::*m_extractionFct)(unsigned char *); }; } #endif --- NEW FILE: VisualIntegralDesc.h --- /* Copyright (C) 2005 Pierre Moisan (Pie...@US...) This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with this library; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ #ifndef _VISUALINTEGRALCOLORDESC_H_ #define _VISUALINTEGRALCOLORDESC_H_ #include <math.h> #include "VisualFeatureDesc.h" namespace RobotFlow { typedef enum { e_VISUALINTDESC_EuclideanDist = 0, // TODO: maybe add other similarity methods e_VISUALINTDESC_UnknownSimilarity } e_VISUALINTDESC_similarityType; // // Descriptor for integral image features // A typical integral image feature consists of a mean value // for a given region of interest in each channel of the image // template <class FeatType> class VisualIntegralDesc : public VisualFeatureDesc<FeatType> { public: VisualIntegralDesc() : VisualFeatureDesc<FeatType>(e_VISUALDESCRIPTOR_integral), m_simType(e_VISUALINTDESC_UnknownSimilarity), m_numClrChannels(0), m_numIntValues(0), m_featSize(0), m_valid(false) { SetSimilarityFct(); } VisualIntegralDesc(e_VISUALINTDESC_similarityType i_simType, unsigned int i_numClrChannels, unsigned int i_numIntValues, FeatType i_maxValue) : VisualFeatureDesc<FeatType>(e_VISUALDESCRIPTOR_integral), m_simType(i_simType), m_numClrChannels(i_numClrChannels), m_numIntValues(i_numIntValues), m_maxValue(i_maxValue), m_valid(true) { m_featSize = m_numClrChannels*m_numIntValues; m_intFeatures = new FeatType[m_featSize]; SetSimilarityFct(); } VisualIntegralDesc(const VisualIntegralDesc<FeatType> &i_ref) { try { this->SetType(i_ref.GetType()); m_simType = i_ref.m_simType; m_numClrChannels = i_ref.m_numClrChannels; m_numIntValues = i_ref.m_numIntValues; m_featSize = i_ref.m_featSize; m_maxValue = i_ref.m_maxValue; m_valid = i_ref.m_valid; m_intFeatures = new FeatType[m_featSize]; for (int i=0; i<m_featSize; i++) { m_intFeatures[i] = i_ref.m_intFeatures[i]; } SetSimilarityFct(); } catch (FD::BaseException *e) { throw e->add(new FD::GeneralException("Exception caught in VisualIntegralDesc::VisualIntegralDesc:",__FILE__,__LINE__)); } } ~VisualIntegralDesc() { delete [] m_intFeatures; } VisualIntegralDesc<FeatType> & operator =(const VisualIntegralDesc<FeatType> &i_ref) { // Avoid self assignment if (&i_ref == this) { return *this; } this->SetType(i_ref.GetType()); m_simType = i_ref.m_simType; m_numClrChannels = i_ref.m_numClrChannels; m_numIntValues = i_ref.m_numIntValues; m_featSize = i_ref.m_featSize; m_maxValue = i_ref.m_maxValue; m_valid = i_ref.m_valid; m_intFeatures = new FeatType[m_featSize]; for (int i=0; i<m_featSize; i++) { m_intFeatures[i] = i_ref.m_intFeatures[i]; } SetSimilarityFct(); return *this; } VisualIntegralDesc<FeatType>* clone() const { return new VisualIntegralDesc<FeatType>(*this); } // Default routine to print a VisualIntegralDesc object to an output stream void printOn(std::ostream &out) const { throw new FD::GeneralException("Exception in VisualIntegralDesc::printOn: cannot use base class routine.",__FILE__,__LINE__); } // Default routine to read a VisualIntegralDesc object from an input stream void readFrom(std::istream &in) { throw new FD::GeneralException("Exception in VisualIntegralDesc::readFrom: cannot use base class routine.",__FILE__,__LINE__); } double Similarity(const FeatType *i_candidate, unsigned int i_size) const { try { if (!i_candidate) { throw new FD::GeneralException("VisualIntegralDesc::Similarity: invalid (NULL) candidate features.",__FILE__,__LINE__); } if (i_size != m_featSize) { throw new FD::GeneralException("VisualIntegralDesc::Similarity: candidate features size differs from current features descriptor size.",__FILE__,__LINE__); } if (!m_similarityFct) { throw new FD::GeneralException("VisualIntegralDesc::Similarity: invalid or unknown similarity type.",__FILE__,__LINE__); } if (!m_valid) { // Invalid descriptor, output maximal distance return 0.0; } // Use appropriate function return (this->*m_similarityFct)(i_candidate); } catch (FD::BaseException *e) { throw e->add(new FD::GeneralException("Exception caught in VisualIntegralDesc::Similarity:",__FILE__,__LINE__)); } } void Adapt(const FeatType *i_candidate, unsigned int i_size, double i_rate) { try { if (!i_candidate) { throw new FD::GeneralException("VisualIntegralDesc::Adapt: invalid (NULL) candidate features.",__FILE__,__LINE__); } if (i_size != m_featSize) { throw new FD::GeneralException("VisualIntegralDesc::Adapt: candidate features size differs from current features descriptor size.",__FILE__,__LINE__); } if (i_rate < 0.0 || i_rate > 1.0) { throw new FD::GeneralException ("VisualHistogramDesc::Adapt : adaptation rate must be in the interval [0.0,1.0]",__FILE__,__LINE__); } if (i_rate == 0.0) { // Nothing to do return; } if (i_rate == 1.0) { SetFeatures(i_candidate, i_size); return; } int i; const FeatType *p_adaptFeat = i_candidate; double compRate = 1.0 - i_rate; for (i=0; i<m_featSize; i++) { m_intFeatures[i] = (FeatType)(compRate*(double)(m_intFeatures[i]) + i_rate*(double)(p_adaptFeat[i])); } } catch (FD::BaseException *e) { throw e->add(new FD::GeneralException("Exception caught in VisualIntegralDesc::Adapt:",__FILE__,__LINE__)); } } unsigned int GetSize() const { return m_featSize; } FeatType *GetFeatures() { return m_intFeatures; } const FeatType *GetCstFeatures() const { return (const FeatType *)m_intFeatures; } bool GetValidity() const { return m_valid; } void SetSize(unsigned int i_size) { throw new FD::GeneralException("Exception in VisualIntegralDesc::SetSize: cannot use base class routine.",__FILE__,__LINE__); } void SetFeatures(const FeatType *i_ref, unsigned int i_size) { try { if (!i_ref) { throw new FD::GeneralException("VisualIntegralDesc::SetFeatures: invalid (NULL) input features.",__FILE__,__LINE__); } if (i_size != m_featSize) { throw new FD::GeneralException("VisualIntegralDesc::SetFeatures: candidate features size differs from current features descriptor size.",__FILE__,__LINE__); } int i; const FeatType *p_inFeat = i_ref; for (i=0; i<m_featSize; i++) { m_intFeatures[i] = *p_inFeat++; } } catch (FD::BaseException *e) { throw e->add(new FD::GeneralException("Exception caught in VisualIntegralDesc::SetFeatures:",__FILE__,__LINE__)); } } void SetValidity(bool i_flag) { m_valid = i_flag; } private: void SetSimilarityFct() { if (m_simType == e_VISUALINTDESC_EuclideanDist) { m_similarityFct = &VisualIntegralDesc::EuclideanDist; } else { m_similarityFct = NULL; } } double EuclideanDist(const FeatType *i_candidate) const { FeatType diff; double dist = 0.0; const FeatType *p_curFeat = (const FeatType *)m_intFeatures; const FeatType *p_candFeat = i_candidate; for (int i=0; i<m_numIntValues; i++) { double clrDist = 0.0; for (int c=0; c<m_numClrChannels; c++) { diff = ((*p_curFeat) - (*p_candFeat))/m_maxValue; clrDist += (double)(diff*diff); p_curFeat++; p_candFeat++; } dist += sqrt(clrDist); } dist /= m_numIntValues; if (dist > 1.0) { std::cout << "VisualIntegralDesc::EuclideanDist: Invalid dist=" << dist << std::endl; throw new FD::GeneralException("VisualIntegralDesc::EuclideanDist: distance should not be greater than 1.0.",__FILE__,__LINE__); } return 1.0-dist; } private: // Similarity/Distance type to use e_VISUALINTDESC_similarityType m_simType; // Number of color channels of the color space used unsigned int m_numClrChannels; // Number of integral rectangle values/features unsigned int m_numIntValues; // The size is m_numIntValues*m_numClrChannels unsigned int m_featSize; // Maximum channel value FeatType m_maxValue; // Validity flag in cases where ROI was invalid for features extraction bool m_valid; // Integral features FeatType *m_intFeatures; // Function pointer to the appropriate private similarity routine double (VisualIntegralDesc::*m_similarityFct)(const FeatType *) const; }; } #endif --- NEW FILE: IntegralColorExtraction.h --- /* Copyright (C) 2005 Pierre Moisan (Pie...@US...) This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with this library; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ #ifndef _INTEGRALCOLOREXTRACTION_H_ #define _INTEGRALCOLOREXTRACTION_H_ #include "VisualFeaturesExtraction.h" #include "VisualIntegralDesc.h" #include "VisualTarget.h" #include <stdlib.h> #include <sys/timeb.h> namespace RobotFlow { // // Integral Color Features Extraction for RobotFlow // class IntegralColorExtraction : public VisualFeaturesExtraction<double> { public: // // Default constructor for Object // IntegralColorExtraction(); // // Constructor with complete intialisation // IntegralColorExtraction(int i_width, int i_height, int i_numChannels, int i_numHoriIntRect, int i_numVertIntRect, double i_maxValue); // // BufferedNode constructor // IntegralColorExtraction(std::string nodeName, FD::ParameterSet params); // // Constructor using input stream // IntegralColorExtraction(std::istream &in) { readFrom(in); } virtual ~IntegralColorExtraction(); // Default routine to print a IntegralColorExtraction object to an output stream void printOn(std::ostream &out) const { throw new FD::GeneralException("Exception in IntegralColorExtraction::printOn: method not yet implemented.",__FILE__,__LINE__); } // Default routine to read a IntegralColorExtraction object from an input stream void readFrom(std::istream &in) { throw new FD::GeneralException("Exception in IntegralColorExtraction::readFrom: method not yet implemented.",__FILE__,__LINE__); } virtual void request(int output_id, const FD::ParameterSet &req); void calculate(int output_id, int count, FD::Buffer &out); void Preprocess(const unsigned char *i_src); void ExtractFeatures(VisualROI *i_roi); void ExtractFeatures(IplImage *i_input, VisualROI *i_roi); VisualFeatureDesc<double> *GetDescriptor() { return (*m_featVect)[0]; } const VisualFeatureDesc<double> *GetCstDescriptor() const { return (const VisualIntegralDesc<double> *)(*m_featVect)[0]; } private: void Initialize(); private: // Input IDs (for BufferedNode) int m_imageInID; int m_roiInID; int m_useNextImgInID; // Output IDs (for BufferedNode) int m_featuresOutID; int m_ppCompletedOutID; // Width of images int m_width; int m_imgSumWidth; // Height of images int m_height; // Number of channels in an image int m_numChannels; // Number of pixels in an image int m_numPixels; // Number of bytes in an image int m_numBytesInFrame; // Number of independant rectangular region to compute // the integral color features int m_numHoriIntRect; int m_numVertIntRect; int m_numIntRect; // Maximum pixel channel value double m_maxValue; // Integral color descriptor for region of interest FD::RCPtr<FD::Vector<VisualFeatureDesc<double> *> > m_featVect; // Temporary image copy IplImage *m_curImage; // Each channel image IplImage **m_chImage; // Pointer to channel image pixels unsigned char **m_chPixPtr; int **m_sumPixPtr; // Sum of pixels (integral) image IplImage **m_sumImage; }; } #endif --- NEW FILE: IntegralEdgesOriExtraction.h --- /* Copyright (C) 2005 Pierre Moisan (Pie...@US...) This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with this library; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ #ifndef _INTEGRALEDGESORIEXTRACTION_H_ #define _INTEGRALEDGESORIEXTRACTION_H_ #include "VisualFeaturesExtraction.h" #include "VisualIntegralDesc.h" #include "VisualTarget.h" #include <stdlib.h> #include <sys/timeb.h> namespace RobotFlow { // // Integral Edges Orientation Features Extraction for RobotFlow // class IntegralEdgesOriExtraction : public VisualFeaturesExtraction<double> { public: // // Default constructor for Object // IntegralEdgesOriExtraction(); // // Constructor with complete intialisation // IntegralEdgesOriExtraction(int i_width, int i_height, int i_numChannels, int i_numOriBins, double i_edgesStrTresh, double i_maxStrengthValue); // // BufferedNode constructor // IntegralEdgesOriExtraction(std::string nodeName, FD::ParameterSet params); // // Constructor using input stream // IntegralEdgesOriExtraction(std::istream &in) { readFrom(in); } virtual ~IntegralEdgesOriExtraction(); // Default routine to print a IntegralEdgesOriExtraction object to an output stream void printOn(std::ostream &out) const { throw new FD::GeneralException("Exception in IntegralEdgesOriExtraction::printOn: method not yet implemented.",__FILE__,__LINE__); } // Default routine to read a IntegralEdgesOriExtraction object from an input stream void readFrom(std::istream &in) { throw new FD::GeneralException("Exception in IntegralEdgesOriExtraction::readFrom: method not yet implemented.",__FILE__,__LINE__); } virtual void request(int output_id, const FD::ParameterSet &req); void calculate(int output_id, int count, FD::Buffer &out); void Preprocess(IplImage *i_srcImg); void ExtractFeatures(VisualROI *i_roi); void ExtractFeatures(IplImage *i_input, VisualROI *i_roi); VisualFeatureDesc<double> *GetDescriptor() { return (*m_featVect)[0]; } const VisualFeatureDesc<double> *GetCstDescriptor() const { return (const VisualIntegralDesc<double> *)(*m_featVect)[0]; } private: void Initialize(); private: // Input IDs (for BufferedNode) int m_imageInID; int m_roiInID; int m_useNextImgInID; // Output IDs (for BufferedNode) int m_featuresOutID; int m_ppCompletedOutID; // Width of images int m_width; int m_imgSumWidth; // Height of images int m_height; // Number of channels in an image int m_numChannels; // Number of pixels in an image int m_numPixels; // Number of bytes in an image int m_numBytesInFrame; // Number of orientations to use int m_numOriBins; // Number of independant rectangular region to compute // the integral edges orientation features int m_numHoriIntRect; int m_numVertIntRect; int m_numIntRect; // Edge strength threshold to remove noisy edges double m_edgesStrTresh; // Maximum strength channel value double m_maxStrengthValue; // Integral color descriptor for region of interest FD::RCPtr<FD::Vector<VisualFeatureDesc<double> *> > m_featVect; // Temporary image copy IplImage *m_curImage; // Grayscale version of current image IplImage *m_grayImage; // Result from filtering With Sobel (X) IplImage *m_oriXImage; // Result from filtering With Sobel (Y) IplImage *m_oriYImage; // Multi-channel edges orientation map IplImage **m_edgesOri; // Multi-channel sum of edges orientation map IplImage **m_edgesOriSum; // Pixel/value pointers float **m_edgesOriPix; double **m_edgesOriSumPix; }; } #endif |
From: Pierre M. <sid...@us...> - 2005-06-02 16:48:10
|
Update of /cvsroot/robotflow/RobotFlow/Vision/src In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv10282 Modified Files: VisualTargetPFInterface.cc Log Message: Added a sigma parameter for likelihood computation. Index: VisualTargetPFInterface.cc =================================================================== RCS file: /cvsroot/robotflow/RobotFlow/Vision/src/VisualTargetPFInterface.cc,v retrieving revision 1.1 retrieving revision 1.2 diff -C2 -d -r1.1 -r1.2 *** VisualTargetPFInterface.cc 26 May 2005 19:49:37 -0000 1.1 --- VisualTargetPFInterface.cc 2 Jun 2005 16:47:47 -0000 1.2 *************** *** 47,50 **** --- 47,55 ---- * @parameter_description Flag indicating to use the ROI rotation angle in the particle's state. * + * @parameter_name LIKELIHOOD_SIGMA + * @parameter_type float + * @parameter_value 200.0 + * @parameter_description Exponential sigma value to discriminate likelihood. + * * @input_name IN_TARGET * @input_type VisualTarget<double> *************** *** 105,108 **** --- 110,114 ---- m_useScale = dereference_cast<bool>(parameters.get("USE_SCALE")); m_useAngle = dereference_cast<bool>(parameters.get("USE_ROTATION_ANGLE")); + m_lSigma = dereference_cast<float>(parameters.get("LIKELIHOOD_SIGMA")); // First 2 elements of state is center position x,y *************** *** 255,259 **** // likelihood of the current features vector float sim = m_curTarget->Similarity(featRef.get()); ! float likelihood = exp(20.0*(sim-1.f)); //cout << "Likelihood = " << likelihood << endl; --- 261,266 ---- // likelihood of the current features vector float sim = m_curTarget->Similarity(featRef.get()); ! //cout << "Similarity = " << sim << endl; ! float likelihood = exp(m_lSigma*(sim-1.f)); //cout << "Likelihood = " << likelihood << endl; *************** *** 335,338 **** --- 342,346 ---- bool m_useScale; bool m_useAngle; + float m_lSigma; unsigned int m_stateSize; |
From: Pierre M. <sid...@us...> - 2005-06-02 16:47:04
|
Update of /cvsroot/robotflow/RobotFlow/Vision/src In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv9685 Modified Files: VisualTargetManager.cc Log Message: Changed all pointers to RCPtr for a more efficient use of memory. Index: VisualTargetManager.cc =================================================================== RCS file: /cvsroot/robotflow/RobotFlow/Vision/src/VisualTargetManager.cc,v retrieving revision 1.5 retrieving revision 1.6 diff -C2 -d -r1.5 -r1.6 *** VisualTargetManager.cc 26 May 2005 19:45:09 -0000 1.5 --- VisualTargetManager.cc 2 Jun 2005 16:46:44 -0000 1.6 *************** *** 331,335 **** if (!roiRef->isNil()) { // Initialize target at current ROI ! m_refTarget->SetROI(&(object_cast<VisualROI>(roiRef))); m_refTarget->SetValidity(true); --- 331,336 ---- if (!roiRef->isNil()) { // Initialize target at current ROI ! RCPtr<VisualROI> roiRefPtr = roiRef; ! m_refTarget->SetROI(roiRefPtr.get()); m_refTarget->SetValidity(true); |