I've got a question regarding bitwise enums that I just can't seem to resolve. I've got a number of flags that are represented by a bitwise enum as in the following example:
enum
{
EnumNone=0,
EnumOne = 1<<0,
EnumTwo = 1<<1,
EnumThree = 1<<2,
EnumFour = 1<<3
};
typedef NSUInteger MyEnum;
All is fine with the above example. Based on my research and various helpful posts in stackoverflow (this for example), I've concluded that, using the above example, I'm essentially given 32 options (or shifts if you will), each option representing 1 bit in a 32-bit series of options, which basically tells me that I can go all the way to EnumThirtyTwo = 1 << 31.
My question is this:
Suppose I've more than 32, say 75 flags for example, to represent using a bitwise enum. How would that best be represented?
enum
{
EnumNone=0,
EnumOne = 1<<0,
EnumTwo = 1<<1,
EnumThree = 1<<2,
EnumFour = 1<<3,
...
...
EnumSeventyFive = 1<<75
};
typedef NSUInteger MyEnum;
Would it be a simple matter of changing the declaration of my enum type, say, to: typedef long int MyEnum; or typedef long MyEnum?
You can use a few simple macros/functions and a struct containing a char array of sufficient size - gives you call-by-value semantics, i.e. just like real enums. E.g. something along the lines of (typed directly into answer):
typedef struct
{
char bits[10]; // enough for 80 bits...
} SeventyFiveFlags;
typedef enum
{
EnumOne = 0,
...
EnumSeventyFive = 74
} SeventyFiveFlagNames;
NS_INLINE BOOL testFlag(SeventyFiveFlags flags, SeventyFiveFlagNames bit)
{
return (flags.bits[bit >> 3] & (1 << (bit & 0x7))) != 0;
}
However you can also use the bitstring(3) functions/macros if you are OK with call-by-reference semantics. These create (heap or stack) bit strings of any length. Use your enum to provide symbolic names for the bit numbers rather than masks, e.g.:
#include <bitstring.h>
typedef enum
{
EnumOne = 0,
...
EnumSeventyFive = 74,
SeventyFiveFlagsSize = 75
} SeventyFiveFlagNames;
typedef bitstr_t *SeventyFiveFlags;
// local (stack) declaration & use
SeventyFiveFlags seventyFive;
bit_decl(seventyFive, SeventyFiveFlagsSize); // declare
bit_nclear(seventyFive, EnumOne, EnumSeventyFive); // set all false
if( bit_test(seventyFive, EnumFortyTwo) ) // test
You can always wrap this up as a class if heap allocation only is OK.
Maybe I am talking about something irrelevant.
I think having too much flag in an enum is not a good practise. Having this large amount of flag, there must be ways to group them up like:
enum
{
EnumNone=0,
EnumOne = 1<<0,
EnumTwo = 1<<1,
EnumThree = 1<<2,
EnumFour = 1<<3
};
typedef NSUInteger widthRelated;
enum
{
EnumNone=0,
EnumOne = 1<<0,
EnumTwo = 1<<1,
EnumThree = 1<<2,
EnumFour = 1<<3
};
typedef NSUInteger heightRelated;
Related
I have a native struct, (which is quite large so I have to use new key word to instantiate, below is just to make a MCVE I cant change the struct as it is provided as external dependencies),
struct NativeStruct
{
char BrokerID[11];
char InvestorID[13];
char InstrumentID[31];
char OrderRef[13];
char UserID[16];
char OrderPriceType;
char Direction;
double LimitPrice;
}
I want to convert NativeStruct to managed object, so I defined a ref struct to mirror it, this also used two enums as below,
public enum struct EnumOrderPriceTypeType
{
AnyPrice = (Byte)'1',
LimitPrice = (Byte)'2',
BestPrice = (Byte)'3',
LastPrice = (Byte)'4',
LastPricePlusOneTicks = (Byte)'5',
LastPricePlusTwoTicks = (Byte)'6',
LastPricePlusThreeTicks = (Byte)'7',
AskPrice1 = (Byte)'8',
AskPrice1PlusOneTicks = (Byte)'9',
AskPrice1PlusTwoTicks = (Byte)'A',
AskPrice1PlusThreeTicks = (Byte)'B',
BidPrice1 = (Byte)'C',
BidPrice1PlusOneTicks = (Byte)'D',
BidPrice1PlusTwoTicks = (Byte)'E',
BidPrice1PlusThreeTicks = (Byte)'F'
};
public enum struct EnumDirectionType
{
Buy = (Byte)'0',
Sell = (Byte)'1'
};
[StructLayout(LayoutKind::Sequential)]
public ref struct ManagedStruct
{
[MarshalAs(UnmanagedType::ByValTStr, SizeConst = 11)]
String^ BrokerID;
[MarshalAs(UnmanagedType::ByValTStr, SizeConst = 13)]
String^ InvestorID;
[MarshalAs(UnmanagedType::ByValTStr, SizeConst = 31)]
String^ InstrumentID;
[MarshalAs(UnmanagedType::ByValTStr, SizeConst = 13)]
String^ OrderRef;
[MarshalAs(UnmanagedType::ByValTStr, SizeConst = 16)]
String^ UserID;
EnumOrderPriceTypeType OrderPriceType;
EnumDirectionType Direction;
double LimitPrice;
};
Then I use StructureToPtr to copy the native object to managed object, and use WriteLine to test if the copy is successful,
NativeStruct *native = new NativeStruct();
ManagedStruct^ managed = gcnew ManagedStruct();
managed->LimitPrice = 95.5;
managed->BrokerID = "666666";
Marshal::StructureToPtr(managed, IntPtr(native), false);
int i;
for (i = 0; i < 11; i++)
Console::Write(native->BrokerID[i]);
Console::WriteLine();
Console::WriteLine(native->LimitPrice);
Console::WriteLine(L"Hello ");
Console::ReadLine();
My question is why LimitPrice is not copied successfuly? I have been battling this for a week, any help will be welcomed. Thanks a lot.
Marshal::StructureToPtr() can only work correctly when the managed and the native struct are an exact match. By far the simplest way to verify this is to check the sizes of the structures, they must be identical. So add this code to your program:
auto nlen = sizeof(NativeStruct);
auto mlen = Marshal::SizeOf(ManagedStruct::typeid);
System::Diagnostics::Debug::Assert(nlen == mlen);
Kaboom. The native struct takes 96 bytes and the managed one takes 104. Consequences are dire, you corrupt memory and that has a lot more unpleasant side effects than the LimitPrice member value getting copied to the wrong offset.
Two basic ways to trouble-shoot this. You can simply populate all of the managed struct members with unique values and check the first member of the native struct that has the wrong value. The member before it is wrong. Keep going until the you no longer get the kaboom. Or you can write code that uses offsetof() on the native struct members and compare them with Marshal::OffsetOf().
Just to save you the trouble, the problem are the enum declarations. Their size in the native struct is 1 byte but the managed versions take 4 bytes. Fix:
public enum struct EnumOrderPriceTypeType : Byte
and
public enum struct EnumDirectionType : Byte
Note the added : Byte to force the enum to take 1 byte of storage. It should be noted that copying the members one-by-one instead of using Marshal::StructureToPtr() is quicker and would have saved you a week of trouble.
I am using the c++ library. so i converted my .m file to .mm file.
Now what happens i am not able to set the enum value.
See Enum
typedef enum{
ImageSourceTypeCamera,
ImageSourceTypePhotoLibrary,
ImageSourceTypeNone
}
ImageSourceType;
// Property
#property(nonatomic,assign) ImageSourceType ImageSourceType;
Now when i am using this line its give me error
self.ImageSourceType=2; / / assigning to 'ImageProcessType' from incompaitable with 'int'
Try:
self.ImageSourceType = ImageSourceTypePhotoLibrary
To use numbers instead of names for enums, format the definition like so:
typedef enum{
ImageSourceTypeCamera = 0,
ImageSourceTypePhotoLibrary = 1,
ImageSourceTypeNone = 2
}
ImageSourceType;
self.ImageSourceType = 0; //ImageSourceTypeCamera
self.ImageSourceType = 1; //ImageSourceTypePhotoLibrary
//etc...
if (self.ImageSourceType == 0) {
//this is the same as (self.ImageSourceType == ImageSourceTypeCamera)
I have a situation in Objective-C where a Java-style enum would be very helpful. I have a set of layer types, and each layer has its own persistence value associated with it (stored in seconds). I want to keep these layer types in an enum, since they're too similar to make into separate classes, like so:
typedef enum {
ExplosionLayerType,
FireworkLayerType,
FireLayerType,
FireJetLayerType
} FXLayerType;
In Java, I could easily associate these two values with something like this:
public enum FXLayerType {
Explosion(3),
Firework(6),
Fire(7),
FireJet(-1);
private int persistence;
FXLayerType(int persistence) {
this.persistence = persistence;
}
}
Is there a simple way to create a sort of lightweight class like this in Objective-C, or will need to resort to more primitive methods?
EDIT:
Various people have suggested doing something like this:
typedef enum {
ExplosionLayerType = 3,
FireworkLayerType = 6
} FXLayerType;
This will not work for me, since I may have something like this (Java-style enum):
Explosion(3),
Firework(6),
Dust(3);
In Java, Dust and Explosion will be treated as unique values, but direct assignment with C enums will treat them as being exactly the same.
If you just want a primitive container for type and value, consider this approach:
typedef struct FXLayerValue {
FXLayerType type;
int value;
} FXLayerValue;
Then again, a class hierarchy may be worth consideration if things become complex or are better handled dynamically. Caveat: if you have a ton of objects to save and/or create, an objc type will be overkill and degrade performance.
Unfortunately, my Java-Fu isn't good enough to know all the lang differences for enums.
To emulate Java enum's we need something which is comparable (can be operands of == etc.), which can have fields, and is lightweight. This suggests structs, and optionally pointers to structs. Here is an example of the latter:
FXLayerType.h:
typedef const struct { int persistence; } FXLayerType;
extern FXLayerType * const LayerTypeExplosion;
extern FXLayerType * const LayerTypeFirework;
extern FXLayerType * const LayerTypeDust;
FXLayerType.m:
#import "FXLayerType.h"
const FXLayerType _LayerTypeExplosion = { 3 };
const FXLayerType _LayerTypeFirework = { 6 };
const FXLayerType _LayerTypeDust = { 3 };
FXLayerType * const LayerTypeExplosion = &_LayerTypeExplosion;
FXLayerType * const LayerTypeFirework = &_LayerTypeFirework;
FXLayerType * const LayerTypeDust = &_LayerTypeDust;
So FXLayerType itself is a constant struct, while as with Obj-C objects we always use pointers to these structs. The implementation creates 3 constant structs and 3 constant pointers to them.
We can now write code such as:
FXLayerType *a, *b;
a = LayerTypeDust;
b = LayerTypeExplosion;
NSLog(#"%d, %d\n", a == b, a->persistence == b->persistence);
Which will output "0, 1" - a and b are different enums (0) but have the same persistence (1). Note here a and b are not constant pointers, only the enum "literals" are defined as constants.
As written this has the disadvantage that you cannot switch on an enum value. However if that is needed just add a second field, say tag, and init it with a unique value using a real enum, say FXLayerStyleTag. You can also remove the indirection if you are happy to always compare tags (e.g. a.tag ==b.tag`). This gives you:
FXLayerType.h:
typedef enum { ExplosionTag, FireworkTag, DustTag } FXLayerTypeTag;
typedef struct { FXLayerTypeTag tag; int persistence; } FXLayerType;
extern const FXLayerType LayerTypeExplosion;
extern const FXLayerType LayerTypeFirework;
extern const FXLayerType LayerTypeDust;
FXLayerType.m:
#import "FXLayerType.h"
const FXLayerType LayerTypeExplosion = { ExplosionTag, 3 };
const FXLayerType LayerTypeFirework = { FireworkTag, 6 };
const FXLayerType LayerTypeDust = { DustTag, 3 };
Use:
FXLayerType a, b;
a = LayerTypeDust;
b = LayerTypeExplosion;
NSLog(#"%d, %d\n", a.tag == b.tag, a.persistence == b.persistence);
A difference between the two designs is the first passes around pointers while the second structures, which may be larger. You can combine them, to get switchable pointer-based enums - that is left as an exercise!
Both these designs also have the (dis)advantage that the number of enum "literals" can be extended at any time.
Actually you may assign values to the enum keys in C, as they are nothing but ints:
typedef enum {
LayerTypeExplosion = 3,
LayerTypeFirework = 6,
LayerTypeFire = 7,
LayerTypeFireJet = -1
} FXLayerType;
You may use them then simply as a restricted set of values, to be assigned to a variable of type FXLayerType.
FXLayerType myLayerType = LayerTypeFirework;
NSLog(#"Value of myLayerType = %i", myLayerType);
// => "Value of myLayerType = 6"
This is not 100% equivalent, but this might be an approach you could take in Objective-C. Basically, create several class-level convenience methods to construction the various configuration of FXLayerType.
#interface FXLayerType
{
#private
int persistence;
}
+ (FXLayerType*)fireworkLayerType;
+ (FXLayerType*)explosionLayerType;
+ (FXLayerType*)jetLayerType;
#end
#implementation FXLayerType
+ (FXLayerType*)explosionLayerTypeWithPersistence:(int)persistence
{
FXLayerType* layerType = [[FXLayerType new] autorelease];
layerType->persistence = persistence;
return layerType;
}
+ (FXLayerType*)explosionLayerType
{
return [self explosionLayerTypeWithPersistence:3];
}
+ (FXLayerType*)fireworkLayerType
{
return [self explosionLayerTypeWithPersistence:6];
}
+ (FXLayerType*)jetLayerType
{
return [self explosionLayerTypeWithPersistence:-1];
}
#end
Usage:
FXLayerType* aJetLayerType = [FXLayerType jetLayerType];
I have recently used the j2objc format for enums. It works rather nicely. Additionally you can auto generate your enums if you are trying to map directly from a java object.
https://code.google.com/p/j2objc/wiki/Enums
However, I did remove the j2objc specific classes from my "Enums". I did not want the additional dependencies.
The struct based answers look good on their face but fail when you try to add Objective-C objects into the struct. Given that limitation, truly emulating the Java-style enum may be more work than it is worth.
tl;dr Version
How are the data types of an enum's constants guaranteed to be NSUInteger instead of unsigned int when declaring an enum thusly:
enum {
NSNullCellType = 0,
NSTextCellType = 1,
NSImageCellType = 2
};
typedef NSUInteger NSCellType;
The typedef to NSUInteger does not appear to be tied to the enum declaration in any way.
Full Version
I was reading through Apple's 64-Bit Transition Guide for Cocoa for some guidance on enum values and I came away with a question. Here's a (lengthy) quote from the Enumeration Constants section, emphasis mine:
A problem with enumeration (enum) constants is that their data types are frequently indeterminate. In other words, enum constants are not predictably unsigned int. With conventionally constructed enumerations, the compiler actually sets the underlying type based on what it finds. The underlying type can be (signed) int or even long. Take the following example:
type enum {
MyFlagError = -1,
MyFlagLow = 0,
MyFlagMiddle = 1,
MyFlagHigh = 2
} MyFlagType;
The compiler looks at this declaration and, finding a negative value assigned to one of the member constants, declares the underlying type of the enumeration int. If the range of values for the members does not fit into an int or unsigned int, then the base type silently becomes 64-bit (long). The base type of quantities defined as enumerations can thus change silently size to accord with the values in the enumeration. This can happen whether you're compiling 32-bit or 64-bit. Needless to say, this situation presents obstacles for binary compatibility.
As a remedy for this problem, Apple has decided to be more explicit about the enumeration type in the Cocoa API. Instead of declaring arguments in terms of the enumeration, the header files now separately declare a type for the enumeration whose size can be specified. The members of the enumeration and their values are declared and assigned as before. For example, instead of this:
typedef enum {
NSNullCellType = 0,
NSTextCellType = 1,
NSImageCellType = 2
} NSCellType;
there is now this:
enum {
NSNullCellType = 0,
NSTextCellType = 1,
NSImageCellType = 2
};
typedef NSUInteger NSCellType;
The enumeration type is defined in terms of NSInteger or NSUInteger to make the base enumeration type 64-bit capable on 64-bit architectures.
My question is this: given that the typedef doesn't appear to be tied explicitly to the enum declaration, how does one know if their data types are unsigned int or NSUInteger?
There is now NS_ENUM starting Xcode 4.5:
typedef NS_ENUM(NSUInteger, NSCellType) {
NSNullCellType = 0,
NSTextCellType = 1,
NSImageCellType = 2
};
And you can consider NS_OPTIONS if you work with binary flags:
typedef NS_OPTIONS(NSUInteger, MyCellFlag) {
MyTextCellFlag = 1 << 0,
MyImageCellFlag = 1 << 1,
};
I run a test on the simulator so the intention of the test is check the size of different integer types. For that, the result of sizeof was printed in the console. So I test this enum values:
typedef enum {
TLEnumCero = 0,
TLEnumOne = 1,
TLEnumTwo = 2
} TLEnum;
typedef enum {
TLEnumNegativeMinusOne = -1,
TLEnumNegativeCero = 0,
TLEnumNegativeOne = 1,
TLEnumNegativeTwo = 2
} TLEnumNegative;
typedef NS_ENUM(NSUInteger, TLUIntegerEnum) {
TLUIntegerEnumZero = 0,
TLUIntegerEnumOne = 1,
TLUIntegerEnumTwo = 2
};
typedef NS_ENUM(NSInteger, TLIntegerEnum) {
TLIntegerEnumMinusOne = -1,
TLIntegerEnumZero = 0,
TLIntegerEnumOne = 1,
TLIntegerEnumTwo = 2
};
Test Code:
NSLog(#"sizeof enum: %ld", sizeof(TLEnum));
NSLog(#"sizeof enum negative: %ld", sizeof(TLEnumNegative));
NSLog(#"sizeof enum NSUInteger: %ld", sizeof(TLUIntegerEnum));
NSLog(#"sizeof enum NSInteger: %ld", sizeof(TLIntegerEnum));
Result for iPhone Retina (4-inch) Simulator:
sizeof enum: 4
sizeof enum negative: 4
sizeof enum NSUInteger: 4
sizeof enum NSInteger: 4
Result for iPhone Retina (4-inch 64 bit) Simulator:
sizeof enum: 4
sizeof enum negative: 4
sizeof enum NSUInteger: 8
sizeof enum NSInteger: 8
Conclusion
A generic enum can be an int or unsigned int types of 4 bytes for 32 or 64 bits.
As we already know NSUInteger and NSInteger are 4 bytes for 32 bits and 8 bytes in 64 bits compiler for iOS.
These are two separate declarations. The typedef guarantees that, when you use that type, you always get an NSUInteger.
The problem with an enum is not that it's not large enough to hold the value. In fact, the only guarantee you get for an enum is that sizeof(enum Foo) is large enough to hold whatever values you've currently defined in that enum. But its size may change if you add another constant. That's why Apple do the separate typedef, to maintain binary stability of the API.
The data types of the enum's constants are not guaranteed to be NSUInteger, but they are guaranteed to be cast to NSUInteger every time you use them through NSCellType.
In other words, the declaration decrees that although the enum's values would currently fit into an unsigned int, the storage reserved for them when accessed through NSCellType should be an NSUInteger.
I'm experimenting with Obj-C blocks and trying to have a struct with two blocks in it where one block is to change what the other block does.
this is a really roundabout way to do something simple... and there may be better ways to do it, but the point of the exercise is for me to understand blocks. here's the code , it doesn't work, so what am I missing/not understanding and/or doing wrong?
//enumerate my math operation options so i can have something more understandable
//than 0, 1, 2, etc... also makes it easier to add operations, as opTypeTotal
//will be 1 plus the index of the operation before it.
typedef enum
{
opTypeAdd = 0,
opTypeSubtract = 1,
opTypeTotal
} opType;
//not sure if (struct someMathStruct)* is correct, probably is wrong
//the intent is to pass a pointer to someMathStruct, but the compiler
//won't know about its existance until a few lines later...
typedef (void)(^changeBlock)(opType,(struct someMathStruct)*);
typedef (void)(^mathBlock)(int,int,int*);
//hold two blocks, to be defined later at runtime
typedef struct someMathStruct{
mathBlock doMath;
changeBlock changeOperation;
} SomeMath;
//i want to declare an array of blocks of type mathBlock
//the intent is to have the array index to correspond with the opTypes enumerated above
//almost certain i'm doing this wrong
mathBlock *m[opTypeTotal] = malloc(sizeof(mathBlock *)*opTypeTotal);
//just two simple math operations as blocks
m[opTypeAdd] = ^(void)(int a,int b,int *result){*result = a+b;};
m[opTypeSubtract] = ^(void)(int a,int b,int *result){*result = a-b;};
//this block is what's supposed to change the other block in the struct
//it takes an opType, and a pointer to the SomeMath struct
//is this the right way to access the member variables of the struct?
changeBlock changeMe = ^(void)(opType a, SomeMath *b) {
//should make adding operations as easy as just adding cases
switch (a)
{
case opTypeAdd: *b.doMath=m[a]; break;
case opTypeSubtract:
default: *b.doMath=m[a]; //catch-all goes to subtracting
}
}
...
SomeMath mathFun;
int theTotal = 0; //a test int to work with
//do i need to copy the changeMe block?
//or can i just do what i'm doing here as the block itself isn't unique
mathFun.changeOperation = changeMe;
mathFun->changeOperation(opTypeAdd, &mathFun);
mathFun->doMath(theTotal,11,&theTotal); //result should be 11
mathFun->changeOperation(opTypeSubtract, &mathFun);
mathFun->doMath(theTotal,3,&theTotal); //result should be 8
NSLog(#"the result: %d",theTotal); //should output "the result: 8"
The code seems to work as you expect (the result is 8) once you fix the compilation errors:
Compile with: gcc -o test test.m -framework Foundation
#import <Foundation/Foundation.h>
//enumerate my math operation options so i can have something more understandable
//than 0, 1, 2, etc... also makes it easier to add operations, as opTypeTotal
//will be 1 plus the index of the operation before it.
typedef enum
{
opTypeAdd = 0,
opTypeSubtract = 1,
opTypeTotal
} opType;
struct someMathStruct; // Forward declare this as a type so we can use it in the
// changeBlock typedef
typedef void (^changeBlock) (opType,struct someMathStruct*);
typedef void (^mathBlock) (int,int,int*);
//hold two blocks, to be defined later at runtime
typedef struct someMathStruct{
mathBlock doMath;
changeBlock changeOperation;
} SomeMath;
int main()
{
//i want to declare an array of blocks of type mathBlock
//the intent is to have the array index to correspond with the opTypes
// enumerated above
mathBlock *m = calloc(opTypeTotal, sizeof(mathBlock *));
//just two simple math operations as blocks
m[opTypeAdd] = ^(int a,int b,int *result){*result = a+b;};
m[opTypeSubtract] = ^(int a,int b,int *result){*result = a-b;};
changeBlock changeMe = ^(opType a, SomeMath *b) {
//should make adding operations as easy as just adding cases
switch (a)
{
case opTypeAdd: b->doMath = m[a]; break;
case opTypeSubtract:
default: b->doMath = m[a]; //catch-all goes to subtracting
}
};
SomeMath mathFun;
int theTotal = 0; //a test int to work with
mathFun.changeOperation = changeMe;
mathFun.changeOperation(opTypeAdd, &mathFun);
mathFun.doMath(theTotal,11,&theTotal); //result should be 11
mathFun.changeOperation(opTypeSubtract, &mathFun);
mathFun.doMath(theTotal,3,&theTotal); //result should be 8
NSLog(#"the result: %d",theTotal); //should output "the result: 8"
}