Fixed bug 3165 - define numbers don't match types in Swift

C.W. Betts

Swift is very strict with types, so much that those of different signedness/size must be cast. Most of the defines are imported as 32-bit signed integers, while the corresponding field in a struct is a 32-bit unsigned integer. Appending a "u" would cause the defined types to be imported as 32-bit unsigned integers.
This commit is contained in:
Sam Lantinga 2016-10-01 13:35:36 -07:00
parent 894da7c3e2
commit 2803a1f6b3
4 changed files with 32 additions and 32 deletions

View file

@ -122,7 +122,7 @@ typedef enum
/**
* \brief Used to indicate that you don't care what the window position is.
*/
#define SDL_WINDOWPOS_UNDEFINED_MASK 0x1FFF0000
#define SDL_WINDOWPOS_UNDEFINED_MASK 0x1FFF0000u
#define SDL_WINDOWPOS_UNDEFINED_DISPLAY(X) (SDL_WINDOWPOS_UNDEFINED_MASK|(X))
#define SDL_WINDOWPOS_UNDEFINED SDL_WINDOWPOS_UNDEFINED_DISPLAY(0)
#define SDL_WINDOWPOS_ISUNDEFINED(X) \
@ -131,7 +131,7 @@ typedef enum
/**
* \brief Used to indicate that the window position should be centered.
*/
#define SDL_WINDOWPOS_CENTERED_MASK 0x2FFF0000
#define SDL_WINDOWPOS_CENTERED_MASK 0x2FFF0000u
#define SDL_WINDOWPOS_CENTERED_DISPLAY(X) (SDL_WINDOWPOS_CENTERED_MASK|(X))
#define SDL_WINDOWPOS_CENTERED SDL_WINDOWPOS_CENTERED_DISPLAY(0)
#define SDL_WINDOWPOS_ISCENTERED(X) \