Hey guys,
I am having problems with this common algorithm for converting RGB color values (in the range of 0-255) to HSB (called HSV) values with hue from 0-360 and saturation 0-255, and V 0-255.
The algorithm works except when I have colors where R =255, G=0 and B = anything.
Can anyone see anything I am doing wrong here??
////////////////////////////////////////////////////////////////////////////////
// Calculates and stores global HSB values when a new RGB color is stored.
// This is needed for the fade function to work properly
///////////////////////////////////////////////////////////////////////////////
void calcHSV(){
float var_Min;
float var_Max;
float del_Max;
float var_R;
float var_G;
float var_B;
float del_R;
float del_G;
float del_B;
var_R = ((float)color[R] / 255.0 ); //RGB from 0 to 255
var_G = ((float)color[G] / 255.0 );
var_B = ((float)color[B] / 255.0 );
var_Min = min( min(var_R, var_G) , var_B ); //Min. value of RGB
var_Max = max( max(var_R, var_G) , var_B ); //Max. value of RGB
del_Max = var_Max - var_Min; //Delta RGB value
hsb[V] = (float)var_Max * 255.0;
if ( del_Max == 0 ) //This is a gray, no chroma...
{
hsb[H] = 0; //HSV results from 0 to 1
hsb[S] = 0;
}
else //Chromatic data...
{
hsb[S] = ((float)del_Max / (float)var_Max) * 255.0;
del_R = ( ( ( var_Max - var_R ) / 6.0 ) + ( del_Max / 2.0 ) ) / del_Max;
del_G = ( ( ( var_Max - var_G ) / 6.0 ) + ( del_Max / 2.0 ) ) / del_Max;
del_B = ( ( ( var_Max - var_B ) / 6.0 ) + ( del_Max / 2.0 ) ) / del_Max;
if ( var_R == var_Max ) hsb[H] = (del_B - del_G) * 360.0;
else if ( var_G == var_Max ) hsb[H] = (( 1 / 3.0 ) + del_R - del_B) * 360.0;
else if ( var_B == var_Max ) hsb[H] = (( 2 / 3.0 ) + del_G - del_R) * 360.0;
if ( hsb[H] < 0 ) ; hsb[H] += 360.0;
if ( hsb[H] > 1 ) ; hsb[H] -= 360.0;
}
}
This is the code I translated this from: