efl: move Ector_Color to be defined in Efl.Gfx as it makes more sense.

This create some possible naming clash and is why I come up with
efl_gfx_color*_type_set functions. We will have to think about this more
carefully as it makes sense to now pass this colors directly to our color
API. Ideally the default 8 bits interface would become just a convenience
wrapper around the more complex possibility.
This commit is contained in:
Cedric BAIL 2016-01-07 11:16:33 -08:00
parent bc67b215ac
commit c451d5885b
5 changed files with 66 additions and 52 deletions

View File

@ -60,6 +60,7 @@ installed_eflheadersdir = $(includedir)/efl-@VMAJ@
dist_installed_eflheaders_DATA = \
lib/efl/Efl_Config.h \
lib/efl/Efl_Model_Common.h \
lib/efl/interfaces/efl_gfx.x \
lib/efl/Efl.h
installed_eflinterfacesdir = $(includedir)/efl-@VMAJ@/interfaces

View File

@ -1,28 +1,6 @@
#ifndef ECTOR_UTIL_H
# define ECTOR_UTIL_H
#define ECTOR_COLOR_SET(value) (value << 8)
#define ECTOR_COLOR16_SET(value) (value)
typedef struct _Ector_Color Ector_Color;
typedef enum _Ector_Color_Type
{
ECTOR_COLOR,
ECTOR_COLOR16
} Ector_Color_Type;
struct _Ector_Color
{
unsigned short r;
unsigned short g;
unsigned short b;
unsigned short a;
Ector_Color_Type type;
};
static inline void
ector_color_argb_premul(int a, int *r, int *g, int *b)
{
@ -51,34 +29,4 @@ ector_color_multiply(unsigned int c1, unsigned int c2)
(((((c1) & 0xff) * ((c2) & 0xff)) + 0xff) >> 8) );
}
static inline void
ector_color_set(Ector_Color *color, unsigned char r, unsigned char g,
unsigned char b, unsigned char a)
{
color->r = ECTOR_COLOR_SET(r);
color->g = ECTOR_COLOR_SET(g);
color->b = ECTOR_COLOR_SET(b);
color->a = ECTOR_COLOR_SET(a);
color->type = ECTOR_COLOR;
}
static inline void
ector_color16_set(Ector_Color *color, unsigned short r, unsigned short g,
unsigned short b, unsigned short a)
{
color->r = ECTOR_COLOR16_SET(r);
color->g = ECTOR_COLOR16_SET(g);
color->b = ECTOR_COLOR16_SET(b);
color->a = ECTOR_COLOR16_SET(a);
color->type = ECTOR_COLOR16;
}
static inline Ector_Color_Type
ector_color_type_get(Ector_Color *color)
{
return color->type;
}
#endif

View File

@ -191,6 +191,19 @@ EAPI extern const Eo_Event_Description _EFL_GFX_PATH_CHANGED;
#include "interfaces/efl_gfx_gradient_radial.eo.h"
#include "interfaces/efl_gfx_filter.eo.h"
#define EFL_GFX_COLOR_SET(value) (value << 8)
#define EFL_GFX_COLOR16_SET(value) (value)
static inline void efl_gfx_color_type_set(Efl_Gfx_Color *color,
unsigned char r, unsigned char g,
unsigned char b, unsigned char a);
static inline void efl_gfx_color16_type_set(Efl_Gfx_Color *color,
unsigned short r, unsigned short g,
unsigned short b, unsigned short a);
#include "interfaces/efl_gfx.x"
#else
#ifndef EFL_NOLEGACY_API_SUPPORT

View File

@ -0,0 +1,36 @@
#ifndef EFL_GFX_X
# define EFL_GFX_X
static inline void
efl_gfx_color_type_set(Efl_Gfx_Color *color,
unsigned char r, unsigned char g,
unsigned char b, unsigned char a)
{
color->r = EFL_GFX_COLOR_SET(r);
color->g = EFL_GFX_COLOR_SET(g);
color->b = EFL_GFX_COLOR_SET(b);
color->a = EFL_GFX_COLOR_SET(a);
color->type = EFL_GFX_COLOR_TYPE_BITS8;
}
static inline void
efl_gfx_color16_type_set(Efl_Gfx_Color *color,
unsigned short r, unsigned short g,
unsigned short b, unsigned short a)
{
color->r = EFL_GFX_COLOR16_SET(r);
color->g = EFL_GFX_COLOR16_SET(g);
color->b = EFL_GFX_COLOR16_SET(b);
color->a = EFL_GFX_COLOR16_SET(a);
color->type = EFL_GFX_COLOR_TYPE_BITS16;
}
static inline Efl_Gfx_Color_Type
efl_gfx_color_type_get(Efl_Gfx_Color *color)
{
return color->type;
}
#endif

View File

@ -9,6 +9,22 @@ enum Efl.Gfx.Render_Op {
last
}
enum Efl.Gfx.Color_Type {
bits8, [[Color is encoded in the top 8 bits of the unsigned short as a unsigned char.]]
bits16, [[Color is encoded in the 16 bits as an unsigned char.]]
last
}
struct Efl.Gfx.Color {
[[Define an RGBA color.]]
r: ushort; [[The red component.]]
g: ushort; [[The green component.]]
b: ushort; [[The blue component.]]
a: ushort; [[The alpha component.]]
type: Efl.Gfx.Color_Type;
}
interface Efl.Gfx.Base {
legacy_prefix: null;
eo_prefix: efl_gfx;