motion-memorymanagement 0.0.1
Sign up to get free protection for your applications and to get access to all the features.
- data/.gitignore +20 -0
- data/Gemfile +4 -0
- data/LICENSE +42 -0
- data/MMRuntime/MMRuntime.xcodeproj/project.pbxproj +271 -0
- data/MMRuntime/MMRuntime/MANotificationCenterAdditions.h +18 -0
- data/MMRuntime/MMRuntime/MANotificationCenterAdditions.m +36 -0
- data/MMRuntime/MMRuntime/MAWeakArray.h +16 -0
- data/MMRuntime/MMRuntime/MAWeakArray.m +121 -0
- data/MMRuntime/MMRuntime/MAWeakDictionary.h +16 -0
- data/MMRuntime/MMRuntime/MAWeakDictionary.m +66 -0
- data/MMRuntime/MMRuntime/MAZeroingWeakProxy.h +31 -0
- data/MMRuntime/MMRuntime/MAZeroingWeakProxy.m +99 -0
- data/MMRuntime/MMRuntime/MAZeroingWeakRef.h +73 -0
- data/MMRuntime/MMRuntime/MAZeroingWeakRef.m +852 -0
- data/MMRuntime/MMRuntime/MAZeroingWeakRefNativeZWRNotAllowedTable.h +180 -0
- data/MMRuntime/MMRuntime/MMRuntime-Prefix.pch +7 -0
- data/MMRuntime/MMRuntime/MMRuntime.h +3 -0
- data/MMRuntime/MMRuntime/MMRuntime.m +11 -0
- data/README.md +33 -0
- data/Rakefile +2 -0
- data/demoapp/.gitignore +13 -0
- data/demoapp/Rakefile +12 -0
- data/demoapp/app/app_delegate.rb +5 -0
- data/demoapp/spec/main_spec.rb +39 -0
- data/lib/motion-memorymanagement.rb +23 -0
- data/lib/motion-memorymanagement/runtime_helpers.rb +5 -0
- data/lib/motion-memorymanagement/version.rb +5 -0
- data/motion-memorymanagement.gemspec +17 -0
- metadata +90 -0
@@ -0,0 +1,66 @@
|
|
1
|
+
//
|
2
|
+
// MAWeakDictionary.m
|
3
|
+
// ZeroingWeakRef
|
4
|
+
//
|
5
|
+
// Created by Mike Ash on 7/13/10.
|
6
|
+
//
|
7
|
+
|
8
|
+
#import "MAWeakDictionary.h"
|
9
|
+
|
10
|
+
#import "MAZeroingWeakRef.h"
|
11
|
+
|
12
|
+
|
13
|
+
@implementation MAWeakDictionary
|
14
|
+
|
15
|
+
- (id)init
|
16
|
+
{
|
17
|
+
if((self = [super init]))
|
18
|
+
{
|
19
|
+
_dict = [[NSMutableDictionary alloc] init];
|
20
|
+
}
|
21
|
+
return self;
|
22
|
+
}
|
23
|
+
|
24
|
+
- (void)dealloc
|
25
|
+
{
|
26
|
+
[_dict release];
|
27
|
+
[super dealloc];
|
28
|
+
}
|
29
|
+
|
30
|
+
- (NSUInteger)count
|
31
|
+
{
|
32
|
+
return [_dict count];
|
33
|
+
}
|
34
|
+
|
35
|
+
- (id)objectForKey: (id)aKey
|
36
|
+
{
|
37
|
+
MAZeroingWeakRef *ref = [_dict objectForKey: aKey];
|
38
|
+
id obj = [ref target];
|
39
|
+
|
40
|
+
// clean out keys whose objects have gone away
|
41
|
+
if(ref && !obj)
|
42
|
+
[_dict removeObjectForKey: aKey];
|
43
|
+
|
44
|
+
return obj;
|
45
|
+
}
|
46
|
+
|
47
|
+
- (NSEnumerator *)keyEnumerator
|
48
|
+
{
|
49
|
+
// enumerate over a copy because -objectForKey: mutates
|
50
|
+
// which could cause an exception in code that should
|
51
|
+
// appear to be correct
|
52
|
+
return [[_dict allKeys] objectEnumerator];
|
53
|
+
}
|
54
|
+
|
55
|
+
- (void)removeObjectForKey: (id)aKey
|
56
|
+
{
|
57
|
+
[_dict removeObjectForKey: aKey];
|
58
|
+
}
|
59
|
+
|
60
|
+
- (void)setObject: (id)anObject forKey: (id)aKey
|
61
|
+
{
|
62
|
+
[_dict setObject: [MAZeroingWeakRef refWithTarget: anObject]
|
63
|
+
forKey: aKey];
|
64
|
+
}
|
65
|
+
|
66
|
+
@end
|
@@ -0,0 +1,31 @@
|
|
1
|
+
//
|
2
|
+
// MAZeroingWeakProxy.h
|
3
|
+
// ZeroingWeakRef
|
4
|
+
//
|
5
|
+
// Created by Michael Ash on 7/17/10.
|
6
|
+
// Copyright 2010 Michael Ash. All rights reserved.
|
7
|
+
//
|
8
|
+
|
9
|
+
#import <Foundation/Foundation.h>
|
10
|
+
|
11
|
+
|
12
|
+
@class MAZeroingWeakRef;
|
13
|
+
|
14
|
+
@interface MAZeroingWeakProxy : NSProxy
|
15
|
+
{
|
16
|
+
MAZeroingWeakRef *_weakRef;
|
17
|
+
Class _targetClass;
|
18
|
+
}
|
19
|
+
|
20
|
+
+ (id)proxyWithTarget: (id)target;
|
21
|
+
|
22
|
+
- (id)initWithTarget: (id)target;
|
23
|
+
|
24
|
+
- (id)zeroingProxyTarget;
|
25
|
+
|
26
|
+
#if NS_BLOCKS_AVAILABLE
|
27
|
+
// same caveats/restrictions as MAZeroingWeakRef cleanup block
|
28
|
+
- (void)setCleanupBlock: (void (^)(id target))block;
|
29
|
+
#endif
|
30
|
+
|
31
|
+
@end
|
@@ -0,0 +1,99 @@
|
|
1
|
+
//
|
2
|
+
// MAZeroingWeakProxy.m
|
3
|
+
// ZeroingWeakRef
|
4
|
+
//
|
5
|
+
// Created by Michael Ash on 7/17/10.
|
6
|
+
// Copyright 2010 Michael Ash. All rights reserved.
|
7
|
+
//
|
8
|
+
|
9
|
+
#import "MAZeroingWeakProxy.h"
|
10
|
+
|
11
|
+
#import "MAZeroingWeakRef.h"
|
12
|
+
|
13
|
+
@implementation MAZeroingWeakProxy
|
14
|
+
|
15
|
+
+ (id)proxyWithTarget: (id)target
|
16
|
+
{
|
17
|
+
return [[[self alloc] initWithTarget: target] autorelease];
|
18
|
+
}
|
19
|
+
|
20
|
+
- (id)initWithTarget: (id)target
|
21
|
+
{
|
22
|
+
// stash the class of the target so we can get method signatures after it goes away
|
23
|
+
_targetClass = [target class];
|
24
|
+
_weakRef = [[MAZeroingWeakRef alloc] initWithTarget: target];
|
25
|
+
return self;
|
26
|
+
}
|
27
|
+
|
28
|
+
- (void)dealloc
|
29
|
+
{
|
30
|
+
[_weakRef release];
|
31
|
+
[super dealloc];
|
32
|
+
}
|
33
|
+
|
34
|
+
- (id)zeroingProxyTarget
|
35
|
+
{
|
36
|
+
return [_weakRef target];
|
37
|
+
}
|
38
|
+
|
39
|
+
#if NS_BLOCKS_AVAILABLE
|
40
|
+
- (void)setCleanupBlock: (void (^)(id target))block
|
41
|
+
{
|
42
|
+
[_weakRef setCleanupBlock: block];
|
43
|
+
}
|
44
|
+
#endif
|
45
|
+
|
46
|
+
- (id)forwardingTargetForSelector: (SEL)sel
|
47
|
+
{
|
48
|
+
return [_weakRef target];
|
49
|
+
}
|
50
|
+
|
51
|
+
- (NSMethodSignature *)methodSignatureForSelector: (SEL)sel
|
52
|
+
{
|
53
|
+
return [_targetClass instanceMethodSignatureForSelector: sel];
|
54
|
+
}
|
55
|
+
|
56
|
+
- (void)forwardInvocation: (NSInvocation *)inv
|
57
|
+
{
|
58
|
+
NSMethodSignature *sig = [inv methodSignature];
|
59
|
+
NSUInteger returnLength = [sig methodReturnLength];
|
60
|
+
|
61
|
+
if(returnLength)
|
62
|
+
{
|
63
|
+
char buf[returnLength];
|
64
|
+
bzero(buf, sizeof(buf));
|
65
|
+
[inv setReturnValue: buf];
|
66
|
+
}
|
67
|
+
}
|
68
|
+
|
69
|
+
- (BOOL)respondsToSelector: (SEL)sel
|
70
|
+
{
|
71
|
+
id target = [_weakRef target];
|
72
|
+
if(target)
|
73
|
+
return [target respondsToSelector: sel];
|
74
|
+
else
|
75
|
+
return [_targetClass instancesRespondToSelector: sel];
|
76
|
+
}
|
77
|
+
|
78
|
+
- (BOOL)conformsToProtocol: (Protocol *)protocol
|
79
|
+
{
|
80
|
+
id target = [_weakRef target];
|
81
|
+
if(target)
|
82
|
+
return [target conformsToProtocol: protocol];
|
83
|
+
else
|
84
|
+
return [_targetClass conformsToProtocol: protocol];
|
85
|
+
}
|
86
|
+
|
87
|
+
// NSProxy implements these for some incomprehensibly stupid reason
|
88
|
+
|
89
|
+
- (NSUInteger)hash
|
90
|
+
{
|
91
|
+
return [[_weakRef target] hash];
|
92
|
+
}
|
93
|
+
|
94
|
+
- (BOOL)isEqual: (id)obj
|
95
|
+
{
|
96
|
+
return [[_weakRef target] isEqual: obj];
|
97
|
+
}
|
98
|
+
|
99
|
+
@end
|
@@ -0,0 +1,73 @@
|
|
1
|
+
//
|
2
|
+
// MAZeroingWeakRef.h
|
3
|
+
// ZeroingWeakRef
|
4
|
+
//
|
5
|
+
// Created by Michael Ash on 7/5/10.
|
6
|
+
//
|
7
|
+
|
8
|
+
#import <Foundation/Foundation.h>
|
9
|
+
|
10
|
+
|
11
|
+
@interface MAZeroingWeakRef : NSObject
|
12
|
+
{
|
13
|
+
id _target;
|
14
|
+
BOOL _nativeZWR;
|
15
|
+
#if NS_BLOCKS_AVAILABLE
|
16
|
+
void (^_cleanupBlock)(id target);
|
17
|
+
#endif
|
18
|
+
}
|
19
|
+
|
20
|
+
+ (BOOL)canRefCoreFoundationObjects;
|
21
|
+
|
22
|
+
+ (id)refWithTarget: (id)target;
|
23
|
+
|
24
|
+
- (id)initWithTarget: (id)target;
|
25
|
+
|
26
|
+
#if NS_BLOCKS_AVAILABLE
|
27
|
+
// ON 10.7:
|
28
|
+
// cleanup block runs while the target's memory is still
|
29
|
+
// allocated but after all dealloc methods have run
|
30
|
+
// (it runs at associated object cleanup time)
|
31
|
+
// you can use the target's pointer value but don't
|
32
|
+
// manipulate its contents!
|
33
|
+
|
34
|
+
// ON 10.6 AND BELOW:
|
35
|
+
// cleanup block runs while the global ZWR lock is held
|
36
|
+
// so make it short and sweet!
|
37
|
+
// use GCD or something to schedule execution later
|
38
|
+
// if you need to do something that may take a while
|
39
|
+
//
|
40
|
+
// it is unsafe to call -target on the weak ref from
|
41
|
+
// inside the cleanup block, which is why the target
|
42
|
+
// is passed in as a parameter
|
43
|
+
// note that you must not resurrect the target at this point!
|
44
|
+
- (void)setCleanupBlock: (void (^)(id target))block;
|
45
|
+
#endif
|
46
|
+
|
47
|
+
- (id)target;
|
48
|
+
|
49
|
+
@end
|
50
|
+
|
51
|
+
#ifndef __has_feature
|
52
|
+
#define __has_feature(feature) 0
|
53
|
+
#endif
|
54
|
+
|
55
|
+
#define MAWeakVar(var) __weak_ ## var
|
56
|
+
|
57
|
+
#if __has_feature(objc_arc_weak)
|
58
|
+
|
59
|
+
#define MAWeakDeclare(var) __weak __typeof__((var)) MAWeakVar(var) = var
|
60
|
+
#define MAWeakImport(var) __typeof__((MAWeakVar(var))) var = MAWeakVar(var)
|
61
|
+
#define MAWeakImportReturn(var) MAWeakImport(var); do { if(var == nil) return; } while(NO)
|
62
|
+
|
63
|
+
#else
|
64
|
+
|
65
|
+
#define MAWeakDeclare(var) __typeof__((var)) MAWeakVar(var) = (id)[MAZeroingWeakRef refWithTarget:var]
|
66
|
+
#define MAWeakImport(var) __typeof__((MAWeakVar(var))) var = [(MAZeroingWeakRef *)MAWeakVar(var) target]
|
67
|
+
#define MAWeakImportReturn(var) MAWeakImport(var); do { if(var == nil) return; } while(NO)
|
68
|
+
|
69
|
+
#endif
|
70
|
+
|
71
|
+
#define MAWeakSelfDeclare() MAWeakDeclare(self)
|
72
|
+
#define MAWeakSelfImport() MAWeakImport(self)
|
73
|
+
#define MAWeakSelfImportReturn() MAWeakImportReturn(self)
|
@@ -0,0 +1,852 @@
|
|
1
|
+
//
|
2
|
+
// MAZeroingWeakRef.m
|
3
|
+
// ZeroingWeakRef
|
4
|
+
//
|
5
|
+
// Created by Michael Ash on 7/5/10.
|
6
|
+
//
|
7
|
+
|
8
|
+
#import "MAZeroingWeakRef.h"
|
9
|
+
|
10
|
+
#import "MAZeroingWeakRefNativeZWRNotAllowedTable.h"
|
11
|
+
|
12
|
+
#if __APPLE__
|
13
|
+
#import <CommonCrypto/CommonDigest.h>
|
14
|
+
|
15
|
+
#import <dlfcn.h>
|
16
|
+
#import <libkern/OSAtomic.h>
|
17
|
+
#import <objc/runtime.h>
|
18
|
+
#import <mach/mach.h>
|
19
|
+
#import <mach/port.h>
|
20
|
+
#import <pthread.h>
|
21
|
+
#else
|
22
|
+
#import <pthread.h>
|
23
|
+
#endif
|
24
|
+
|
25
|
+
|
26
|
+
/*
|
27
|
+
The COREFOUNDATION_HACK_LEVEL macro allows you to control how much horrible CF
|
28
|
+
hackery is enabled. The following levels are defined:
|
29
|
+
|
30
|
+
3 - Completely insane hackery allows weak references to CF objects, deallocates
|
31
|
+
them asynchronously in another thread to eliminate resurrection-related race
|
32
|
+
condition and crash.
|
33
|
+
|
34
|
+
2 - Full hackery allows weak references to CF objects by doing horrible
|
35
|
+
things with the private CF class table. Extremely small risk of resurrection-
|
36
|
+
related race condition leading to a crash.
|
37
|
+
|
38
|
+
1 - Mild hackery allows foolproof identification of CF objects and will assert
|
39
|
+
if trying to make a ZWR to one.
|
40
|
+
|
41
|
+
0 - No hackery, checks for an "NSCF" prefix in the class name to identify CF
|
42
|
+
objects and will assert if trying to make a ZWR to one
|
43
|
+
*/
|
44
|
+
#ifndef COREFOUNDATION_HACK_LEVEL
|
45
|
+
#define COREFOUNDATION_HACK_LEVEL 0
|
46
|
+
#endif
|
47
|
+
|
48
|
+
/*
|
49
|
+
The KVO_HACK_LEVEL macro allows similar control over the amount of KVO hackery.
|
50
|
+
|
51
|
+
1 - Use the private _isKVOA method to check for a KVO dynamic subclass.
|
52
|
+
|
53
|
+
0 - No hackery, uses the KVO overridden -class to check.
|
54
|
+
*/
|
55
|
+
#ifndef KVO_HACK_LEVEL
|
56
|
+
#define KVO_HACK_LEVEL 0
|
57
|
+
#endif
|
58
|
+
|
59
|
+
/*
|
60
|
+
The USE_BLOCKS_BASED_LOCKING macro allows control on the code structure used
|
61
|
+
during lock checking. You want to disable blocks if you want your app to work
|
62
|
+
on iOS 3.x devices. iOS 4.x and above can use blocks.
|
63
|
+
|
64
|
+
1 - Use blocks for lock checks.
|
65
|
+
|
66
|
+
0 - Don't use blocks for lock checks.
|
67
|
+
*/
|
68
|
+
#ifndef USE_BLOCKS_BASED_LOCKING
|
69
|
+
#define USE_BLOCKS_BASED_LOCKING 1
|
70
|
+
#endif
|
71
|
+
|
72
|
+
#if KVO_HACK_LEVEL >= 1
|
73
|
+
@interface NSObject (KVOPrivateMethod)
|
74
|
+
|
75
|
+
- (BOOL)_isKVOA;
|
76
|
+
|
77
|
+
@end
|
78
|
+
#endif
|
79
|
+
|
80
|
+
|
81
|
+
@interface NSObject (MAZeroingWeakRefSwizzled)
|
82
|
+
- (void)MAZeroingWeakRef_KVO_original_release;
|
83
|
+
- (void)MAZeroingWeakRef_KVO_original_dealloc;
|
84
|
+
- (void)MAZeroingWeakRef_KVO_original_addObserver:(NSObject *)observer forKeyPath:(NSString *)keyPath context:(void *)context;
|
85
|
+
- (void)MAZeroingWeakRef_KVO_original_removeObserver:(NSObject *)observer forKeyPath:(NSString *)keyPath;
|
86
|
+
- (void)MAZeroingWeakRef_KVO_original_removeObserver:(NSObject *)observer forKeyPath:(NSString *)keyPath context:(void *)context;
|
87
|
+
@end
|
88
|
+
|
89
|
+
|
90
|
+
static void EnsureCustomSubclass(id obj);
|
91
|
+
|
92
|
+
@interface MAZeroingWeakRef ()
|
93
|
+
|
94
|
+
- (void)_zeroTarget;
|
95
|
+
- (void)_executeCleanupBlockWithTarget: (id)target;
|
96
|
+
|
97
|
+
@end
|
98
|
+
|
99
|
+
|
100
|
+
static id (*objc_loadWeak_fptr)(id *location);
|
101
|
+
static id (*objc_storeWeak_fptr)(id *location, id obj);
|
102
|
+
|
103
|
+
@interface _MAZeroingWeakRefCleanupHelper : NSObject
|
104
|
+
{
|
105
|
+
MAZeroingWeakRef *_ref;
|
106
|
+
id _target;
|
107
|
+
}
|
108
|
+
|
109
|
+
- (id)initWithRef: (MAZeroingWeakRef *)ref target: (id)target;
|
110
|
+
|
111
|
+
@end
|
112
|
+
|
113
|
+
@implementation _MAZeroingWeakRefCleanupHelper
|
114
|
+
|
115
|
+
- (id)initWithRef: (MAZeroingWeakRef *)ref target: (id)target
|
116
|
+
{
|
117
|
+
if((self = [self init]))
|
118
|
+
{
|
119
|
+
objc_storeWeak_fptr(&_ref, ref);
|
120
|
+
_target = target;
|
121
|
+
}
|
122
|
+
return self;
|
123
|
+
}
|
124
|
+
|
125
|
+
- (void)dealloc
|
126
|
+
{
|
127
|
+
MAZeroingWeakRef *ref = objc_loadWeak_fptr(&_ref);
|
128
|
+
[ref _executeCleanupBlockWithTarget: _target];
|
129
|
+
objc_storeWeak_fptr(&_ref, nil);
|
130
|
+
|
131
|
+
[super dealloc];
|
132
|
+
}
|
133
|
+
|
134
|
+
@end
|
135
|
+
|
136
|
+
|
137
|
+
@implementation MAZeroingWeakRef
|
138
|
+
|
139
|
+
#if COREFOUNDATION_HACK_LEVEL >= 2
|
140
|
+
|
141
|
+
typedef struct __CFRuntimeClass { // Version 0 struct
|
142
|
+
CFIndex version;
|
143
|
+
const char *className;
|
144
|
+
void (*init)(CFTypeRef cf);
|
145
|
+
CFTypeRef (*copy)(CFAllocatorRef allocator, CFTypeRef cf);
|
146
|
+
void (*finalize)(CFTypeRef cf);
|
147
|
+
Boolean (*equal)(CFTypeRef cf1, CFTypeRef cf2);
|
148
|
+
CFHashCode (*hash)(CFTypeRef cf);
|
149
|
+
CFStringRef (*copyFormattingDesc)(CFTypeRef cf, CFDictionaryRef formatOptions); // str with retain
|
150
|
+
CFStringRef (*copyDebugDesc)(CFTypeRef cf); // str with retain
|
151
|
+
void (*reclaim)(CFTypeRef cf);
|
152
|
+
} CFRuntimeClass;
|
153
|
+
|
154
|
+
extern CFRuntimeClass * _CFRuntimeGetClassWithTypeID(CFTypeID typeID);
|
155
|
+
|
156
|
+
typedef void (*CFFinalizeFptr)(CFTypeRef);
|
157
|
+
static CFFinalizeFptr *gCFOriginalFinalizes;
|
158
|
+
static size_t gCFOriginalFinalizesSize;
|
159
|
+
|
160
|
+
#endif
|
161
|
+
|
162
|
+
#if COREFOUNDATION_HACK_LEVEL >= 1
|
163
|
+
|
164
|
+
extern Class *__CFRuntimeObjCClassTable;
|
165
|
+
|
166
|
+
#endif
|
167
|
+
|
168
|
+
static pthread_mutex_t gMutex;
|
169
|
+
|
170
|
+
#if __APPLE__
|
171
|
+
static CFMutableDictionaryRef gObjectWeakRefsMap; // maps (non-retained) objects to CFMutableSetRefs containing weak refs
|
172
|
+
#else
|
173
|
+
static NSMapTable *gObjectWeakRefsMap;
|
174
|
+
#endif
|
175
|
+
|
176
|
+
static NSMutableSet *gCustomSubclasses;
|
177
|
+
static NSMutableDictionary *gCustomSubclassMap; // maps regular classes to their custom subclasses
|
178
|
+
|
179
|
+
#if COREFOUNDATION_HACK_LEVEL >= 3
|
180
|
+
static CFMutableSetRef gCFWeakTargets;
|
181
|
+
static NSOperationQueue *gCFDelayedDestructionQueue;
|
182
|
+
#endif
|
183
|
+
|
184
|
+
+ (void)initialize
|
185
|
+
{
|
186
|
+
if(self == [MAZeroingWeakRef class])
|
187
|
+
{
|
188
|
+
pthread_mutexattr_t mutexattr;
|
189
|
+
pthread_mutexattr_init(&mutexattr);
|
190
|
+
pthread_mutexattr_settype(&mutexattr, PTHREAD_MUTEX_RECURSIVE);
|
191
|
+
pthread_mutex_init(&gMutex, &mutexattr);
|
192
|
+
pthread_mutexattr_destroy(&mutexattr);
|
193
|
+
|
194
|
+
#if __APPLE__
|
195
|
+
gObjectWeakRefsMap = CFDictionaryCreateMutable(NULL, 0, NULL, &kCFTypeDictionaryValueCallBacks);
|
196
|
+
#else
|
197
|
+
gObjectWeakRefsMap = [[NSMapTable mapTableWithWeakToStrongObjects] retain];
|
198
|
+
#endif
|
199
|
+
gCustomSubclasses = [[NSMutableSet alloc] init];
|
200
|
+
gCustomSubclassMap = [[NSMutableDictionary alloc] init];
|
201
|
+
|
202
|
+
// see if the 10.7 ZWR runtime functions are available
|
203
|
+
// nothing special about objc_allocateClassPair, it just
|
204
|
+
// seems like a reasonable and safe choice for finding
|
205
|
+
// the runtime functions
|
206
|
+
|
207
|
+
// Farcaller: ZWR is disabled, as it doesn't work with RubyMotion reliably
|
208
|
+
objc_loadWeak_fptr = NULL;
|
209
|
+
objc_storeWeak_fptr = NULL;
|
210
|
+
|
211
|
+
#if COREFOUNDATION_HACK_LEVEL >= 3
|
212
|
+
gCFWeakTargets = CFSetCreateMutable(NULL, 0, NULL);
|
213
|
+
gCFDelayedDestructionQueue = [[NSOperationQueue alloc] init];
|
214
|
+
#endif
|
215
|
+
}
|
216
|
+
}
|
217
|
+
|
218
|
+
#if USE_BLOCKS_BASED_LOCKING
|
219
|
+
#define BLOCK_QUALIFIER __block
|
220
|
+
static void WhileLocked(void (^block)(void))
|
221
|
+
{
|
222
|
+
pthread_mutex_lock(&gMutex);
|
223
|
+
block();
|
224
|
+
pthread_mutex_unlock(&gMutex);
|
225
|
+
}
|
226
|
+
#define WhileLocked(block) WhileLocked(^block)
|
227
|
+
#else
|
228
|
+
#define BLOCK_QUALIFIER
|
229
|
+
#define WhileLocked(block) do { \
|
230
|
+
pthread_mutex_lock(&gMutex); \
|
231
|
+
block \
|
232
|
+
pthread_mutex_unlock(&gMutex); \
|
233
|
+
} while(0)
|
234
|
+
#endif
|
235
|
+
|
236
|
+
static void AddWeakRefToObject(id obj, MAZeroingWeakRef *ref)
|
237
|
+
{
|
238
|
+
#if __APPLE__
|
239
|
+
CFMutableSetRef set = (void *)CFDictionaryGetValue(gObjectWeakRefsMap, obj);
|
240
|
+
if(!set)
|
241
|
+
{
|
242
|
+
set = CFSetCreateMutable(NULL, 0, NULL);
|
243
|
+
CFDictionarySetValue(gObjectWeakRefsMap, obj, set);
|
244
|
+
CFRelease(set);
|
245
|
+
}
|
246
|
+
CFSetAddValue(set, ref);
|
247
|
+
#else
|
248
|
+
NSHashTable *set = [gObjectWeakRefsMap objectForKey:obj];
|
249
|
+
if (!set)
|
250
|
+
{
|
251
|
+
set = [NSHashTable hashTableWithWeakObjects];
|
252
|
+
[gObjectWeakRefsMap setObject:set forKey:obj];
|
253
|
+
}
|
254
|
+
[set addObject:ref];
|
255
|
+
#endif
|
256
|
+
}
|
257
|
+
|
258
|
+
static void RemoveWeakRefFromObject(id obj, MAZeroingWeakRef *ref)
|
259
|
+
{
|
260
|
+
#if __APPLE__
|
261
|
+
CFMutableSetRef set = (void *)CFDictionaryGetValue(gObjectWeakRefsMap, obj);
|
262
|
+
CFSetRemoveValue(set, ref);
|
263
|
+
#else
|
264
|
+
NSHashTable *set = [gObjectWeakRefsMap objectForKey:obj];
|
265
|
+
[set removeObject:ref];
|
266
|
+
#endif
|
267
|
+
}
|
268
|
+
|
269
|
+
static void ClearWeakRefsForObject(id obj)
|
270
|
+
{
|
271
|
+
#if __APPLE__
|
272
|
+
CFMutableSetRef set = (void *)CFDictionaryGetValue(gObjectWeakRefsMap, obj);
|
273
|
+
if(set)
|
274
|
+
{
|
275
|
+
NSSet *setCopy = [[NSSet alloc] initWithSet: (NSSet *)set];
|
276
|
+
[setCopy makeObjectsPerformSelector: @selector(_zeroTarget)];
|
277
|
+
[setCopy makeObjectsPerformSelector: @selector(_executeCleanupBlockWithTarget:) withObject: obj];
|
278
|
+
[setCopy release];
|
279
|
+
CFDictionaryRemoveValue(gObjectWeakRefsMap, obj);
|
280
|
+
}
|
281
|
+
#else
|
282
|
+
NSHashTable *set = [gObjectWeakRefsMap objectForKey:obj];
|
283
|
+
if (set)
|
284
|
+
{
|
285
|
+
NSArray *setContents = [set allObjects];
|
286
|
+
[setContents makeObjectsPerformSelector:@selector(_zeroTarget)];
|
287
|
+
[setContents makeObjectsPerformSelector:@selector(_executeCleanupBlockWithTarget:) withObject:obj];
|
288
|
+
[gObjectWeakRefsMap removeObjectForKey:obj];
|
289
|
+
}
|
290
|
+
#endif
|
291
|
+
}
|
292
|
+
|
293
|
+
static Class GetCustomSubclass(id obj)
|
294
|
+
{
|
295
|
+
Class class = object_getClass(obj);
|
296
|
+
while(class && ![gCustomSubclasses containsObject: class])
|
297
|
+
class = class_getSuperclass(class);
|
298
|
+
return class;
|
299
|
+
}
|
300
|
+
|
301
|
+
static Class GetRealSuperclass(id obj)
|
302
|
+
{
|
303
|
+
Class class = GetCustomSubclass(obj);
|
304
|
+
NSCAssert1(class, @"Coudn't find ZeroingWeakRef subclass in hierarchy starting from %@, should never happen", object_getClass(obj));
|
305
|
+
return class_getSuperclass(class);
|
306
|
+
}
|
307
|
+
|
308
|
+
static void CustomSubclassRelease(id self, SEL _cmd)
|
309
|
+
{
|
310
|
+
Class superclass = GetRealSuperclass(self);
|
311
|
+
IMP superRelease = class_getMethodImplementation(superclass, @selector(release));
|
312
|
+
WhileLocked({
|
313
|
+
((void (*)(id, SEL))superRelease)(self, _cmd);
|
314
|
+
});
|
315
|
+
}
|
316
|
+
|
317
|
+
static void CustomSubclassDealloc(id self, SEL _cmd)
|
318
|
+
{
|
319
|
+
ClearWeakRefsForObject(self);
|
320
|
+
Class superclass = GetRealSuperclass(self);
|
321
|
+
IMP superDealloc = class_getMethodImplementation(superclass, @selector(dealloc));
|
322
|
+
((void (*)(id, SEL))superDealloc)(self, _cmd);
|
323
|
+
}
|
324
|
+
|
325
|
+
static Class CustomSubclassClassForCoder(id self, SEL _cmd)
|
326
|
+
{
|
327
|
+
Class class = GetCustomSubclass(self);
|
328
|
+
Class superclass = class_getSuperclass(class);
|
329
|
+
IMP superClassForCoder = class_getMethodImplementation(superclass, @selector(classForCoder));
|
330
|
+
Class classForCoder = ((id (*)(id, SEL))superClassForCoder)(self, _cmd);
|
331
|
+
if(classForCoder == class)
|
332
|
+
classForCoder = superclass;
|
333
|
+
return classForCoder;
|
334
|
+
}
|
335
|
+
|
336
|
+
static void KVOSubclassRelease(id self, SEL _cmd)
|
337
|
+
{
|
338
|
+
IMP originalRelease = class_getMethodImplementation(object_getClass(self), @selector(MAZeroingWeakRef_KVO_original_release));
|
339
|
+
WhileLocked({
|
340
|
+
((void (*)(id, SEL))originalRelease)(self, _cmd);
|
341
|
+
});
|
342
|
+
}
|
343
|
+
|
344
|
+
static void KVOSubclassDealloc(id self, SEL _cmd)
|
345
|
+
{
|
346
|
+
ClearWeakRefsForObject(self);
|
347
|
+
IMP originalDealloc = class_getMethodImplementation(object_getClass(self), @selector(MAZeroingWeakRef_KVO_original_dealloc));
|
348
|
+
((void (*)(id, SEL))originalDealloc)(self, _cmd);
|
349
|
+
}
|
350
|
+
|
351
|
+
static void KVOSubclassRemoveObserverForKeyPath(id self, SEL _cmd, id observer, NSString *keyPath)
|
352
|
+
{
|
353
|
+
WhileLocked({
|
354
|
+
IMP originalIMP = class_getMethodImplementation(object_getClass(self), @selector(MAZeroingWeakRef_KVO_original_removeObserver:forKeyPath:));
|
355
|
+
((void (*)(id, SEL, id, NSString *))originalIMP)(self, _cmd, observer, keyPath);
|
356
|
+
|
357
|
+
EnsureCustomSubclass(self);
|
358
|
+
});
|
359
|
+
}
|
360
|
+
|
361
|
+
static void KVOSubclassRemoveObserverForKeyPathContext(id self, SEL _cmd, id observer, NSString *keyPath, void *context)
|
362
|
+
{
|
363
|
+
WhileLocked({
|
364
|
+
IMP originalIMP = class_getMethodImplementation(object_getClass(self), @selector(MAZeroingWeakRef_KVO_original_removeObserver:forKeyPath:context:));
|
365
|
+
((void (*)(id, SEL, id, NSString *, void *))originalIMP)(self, _cmd, observer, keyPath, context);
|
366
|
+
|
367
|
+
EnsureCustomSubclass(self);
|
368
|
+
});
|
369
|
+
}
|
370
|
+
|
371
|
+
#if COREFOUNDATION_HACK_LEVEL >= 3
|
372
|
+
|
373
|
+
static void CallCFReleaseLater(CFTypeRef cf)
|
374
|
+
{
|
375
|
+
mach_port_t thread = mach_thread_self(); // must "release" this
|
376
|
+
|
377
|
+
NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init];
|
378
|
+
SEL sel = @selector(releaseLater:fromThread:);
|
379
|
+
NSInvocation *inv = [NSInvocation invocationWithMethodSignature: [MAZeroingWeakRef methodSignatureForSelector: sel]];
|
380
|
+
[inv setTarget: [MAZeroingWeakRef class]];
|
381
|
+
[inv setSelector: sel];
|
382
|
+
[inv setArgument: &cf atIndex: 2];
|
383
|
+
[inv setArgument: &thread atIndex: 3];
|
384
|
+
|
385
|
+
NSInvocationOperation *op = [[NSInvocationOperation alloc] initWithInvocation: inv];
|
386
|
+
[gCFDelayedDestructionQueue addOperation: op];
|
387
|
+
[op release];
|
388
|
+
[pool release];
|
389
|
+
}
|
390
|
+
|
391
|
+
static const void *kPCThreadExited = &kPCThreadExited;
|
392
|
+
static const void *kPCError = NULL;
|
393
|
+
|
394
|
+
static const void *GetPC(mach_port_t thread)
|
395
|
+
{
|
396
|
+
#if defined(__x86_64__)
|
397
|
+
x86_thread_state64_t state;
|
398
|
+
unsigned int count = x86_THREAD_STATE64_COUNT;
|
399
|
+
thread_state_flavor_t flavor = x86_THREAD_STATE64;
|
400
|
+
#define PC_REGISTER __rip
|
401
|
+
#elif defined(__i386__)
|
402
|
+
i386_thread_state_t state;
|
403
|
+
unsigned int count = i386_THREAD_STATE_COUNT;
|
404
|
+
thread_state_flavor_t flavor = i386_THREAD_STATE;
|
405
|
+
#define PC_REGISTER __eip
|
406
|
+
#elif defined(__arm__)
|
407
|
+
arm_thread_state_t state;
|
408
|
+
unsigned int count = ARM_THREAD_STATE_COUNT;
|
409
|
+
thread_state_flavor_t flavor = ARM_THREAD_STATE;
|
410
|
+
#define PC_REGISTER __pc
|
411
|
+
#elif defined(__ppc__)
|
412
|
+
ppc_thread_state_t state;
|
413
|
+
unsigned int count = PPC_THREAD_STATE_COUNT;
|
414
|
+
thread_state_flavor_t flavor = PPC_THREAD_STATE;
|
415
|
+
#define PC_REGISTER __srr0
|
416
|
+
#elif defined(__ppc64__)
|
417
|
+
ppc_thread_state64_t state;
|
418
|
+
unsigned int count = PPC_THREAD_STATE64_COUNT;
|
419
|
+
thread_state_flavor_t flavor = PPC_THREAD_STATE64;
|
420
|
+
#define PC_REGISTER __srr0
|
421
|
+
#else
|
422
|
+
#error don't know how to get PC for the current architecture!
|
423
|
+
#endif
|
424
|
+
|
425
|
+
kern_return_t ret = thread_get_state(thread, flavor, (thread_state_t)&state, &count);
|
426
|
+
if(ret == KERN_SUCCESS)
|
427
|
+
return (void *)state.PC_REGISTER;
|
428
|
+
else if(ret == KERN_INVALID_ARGUMENT)
|
429
|
+
return kPCThreadExited;
|
430
|
+
else
|
431
|
+
return kPCError;
|
432
|
+
}
|
433
|
+
|
434
|
+
static void CustomCFFinalize(CFTypeRef cf)
|
435
|
+
{
|
436
|
+
WhileLocked({
|
437
|
+
if(CFSetContainsValue(gCFWeakTargets, cf))
|
438
|
+
{
|
439
|
+
if(CFGetRetainCount(cf) == 1)
|
440
|
+
{
|
441
|
+
ClearWeakRefsForObject((id)cf);
|
442
|
+
CFSetRemoveValue(gCFWeakTargets, cf);
|
443
|
+
CFRetain(cf);
|
444
|
+
CallCFReleaseLater(cf);
|
445
|
+
}
|
446
|
+
}
|
447
|
+
else
|
448
|
+
{
|
449
|
+
void (*fptr)(CFTypeRef) = gCFOriginalFinalizes[CFGetTypeID(cf)];
|
450
|
+
if(fptr)
|
451
|
+
fptr(cf);
|
452
|
+
}
|
453
|
+
});
|
454
|
+
}
|
455
|
+
|
456
|
+
#elif COREFOUNDATION_HACK_LEVEL >= 2
|
457
|
+
|
458
|
+
static void CustomCFFinalize(CFTypeRef cf)
|
459
|
+
{
|
460
|
+
WhileLocked({
|
461
|
+
if(CFGetRetainCount(cf) == 1)
|
462
|
+
{
|
463
|
+
ClearWeakRefsForObject((id)cf);
|
464
|
+
void (*fptr)(CFTypeRef) = gCFOriginalFinalizes[CFGetTypeID(cf)];
|
465
|
+
if(fptr)
|
466
|
+
fptr(cf);
|
467
|
+
}
|
468
|
+
});
|
469
|
+
}
|
470
|
+
#endif
|
471
|
+
|
472
|
+
static BOOL IsTollFreeBridged(Class class, id obj)
|
473
|
+
{
|
474
|
+
#if COREFOUNDATION_HACK_LEVEL >= 1
|
475
|
+
CFTypeID typeID = CFGetTypeID(obj);
|
476
|
+
Class tfbClass = __CFRuntimeObjCClassTable[typeID];
|
477
|
+
return class == tfbClass;
|
478
|
+
#else
|
479
|
+
NSString *className = NSStringFromClass(class);
|
480
|
+
return [className hasPrefix:@"NSCF"] || [className hasPrefix:@"__NSCF"];
|
481
|
+
#endif
|
482
|
+
}
|
483
|
+
|
484
|
+
static BOOL IsConstantObject(id obj)
|
485
|
+
{
|
486
|
+
unsigned int retainCount = [obj retainCount];
|
487
|
+
return retainCount == UINT_MAX || retainCount == INT_MAX;
|
488
|
+
}
|
489
|
+
|
490
|
+
#if COREFOUNDATION_HACK_LEVEL >= 3
|
491
|
+
void _CFRelease(CFTypeRef cf);
|
492
|
+
|
493
|
+
+ (void)releaseLater: (CFTypeRef)cf fromThread: (mach_port_t)thread
|
494
|
+
{
|
495
|
+
BOOL retry = YES;
|
496
|
+
|
497
|
+
while(retry)
|
498
|
+
{
|
499
|
+
BLOCK_QUALIFIER const void *pc;
|
500
|
+
// ensure that the PC is outside our inner code when fetching it,
|
501
|
+
// so we don't have to check for all the nested calls
|
502
|
+
WhileLocked({
|
503
|
+
pc = GetPC(thread);
|
504
|
+
});
|
505
|
+
|
506
|
+
if(pc != kPCError)
|
507
|
+
{
|
508
|
+
if(pc == kPCThreadExited || pc < (void *)CustomCFFinalize || pc > (void *)IsTollFreeBridged)
|
509
|
+
{
|
510
|
+
Dl_info info;
|
511
|
+
int success = dladdr(pc, &info);
|
512
|
+
if(success)
|
513
|
+
{
|
514
|
+
if(info.dli_saddr != _CFRelease)
|
515
|
+
{
|
516
|
+
retry = NO; // success!
|
517
|
+
CFRelease(cf);
|
518
|
+
mach_port_mod_refs(mach_task_self(), thread, MACH_PORT_RIGHT_SEND, -1 ); // "release"
|
519
|
+
}
|
520
|
+
}
|
521
|
+
}
|
522
|
+
}
|
523
|
+
}
|
524
|
+
}
|
525
|
+
#endif
|
526
|
+
|
527
|
+
static BOOL IsKVOSubclass(id obj)
|
528
|
+
{
|
529
|
+
#if KVO_HACK_LEVEL >= 1
|
530
|
+
return [obj respondsToSelector: @selector(_isKVOA)] && [obj _isKVOA];
|
531
|
+
#else
|
532
|
+
return [obj class] == class_getSuperclass(object_getClass(obj));
|
533
|
+
#endif
|
534
|
+
}
|
535
|
+
|
536
|
+
// The native ZWR capability table is conceptually a set of SHA1 hashes.
|
537
|
+
// Hashes are used instead of class names because the table is large and
|
538
|
+
// contains a lot of private classes. Embedding private class names in
|
539
|
+
// the binary is likely to cause problems with app review. Manually
|
540
|
+
// removing all private classes from the table is a lot of work. Using
|
541
|
+
// hashes allows for reasonably quick checks and no private API names.
|
542
|
+
// It's implemented as a tree of tables, where each individual table
|
543
|
+
// maps to a single byte. The top level of the tree is a 256-entry table.
|
544
|
+
// Table entries are a NULL pointer for leading bytes which aren't present
|
545
|
+
// at all. Other table entries can either contain a pointer to another
|
546
|
+
// table (in which case the process continues recursively), or they can
|
547
|
+
// contain a pointer to a single hash. In this second case, this indicates
|
548
|
+
// that this hash is the only one present in the table with that prefix
|
549
|
+
// and so a simple comparison can be used to check for membership at
|
550
|
+
// that point.
|
551
|
+
#if __APPLE__
|
552
|
+
static BOOL HashPresentInTable(unsigned char *hash, int length, struct _NativeZWRTableEntry *table)
|
553
|
+
{
|
554
|
+
while(length)
|
555
|
+
{
|
556
|
+
struct _NativeZWRTableEntry entry = table[hash[0]];
|
557
|
+
if(entry.ptr == NULL)
|
558
|
+
{
|
559
|
+
return NO;
|
560
|
+
}
|
561
|
+
else if(!entry.isTable)
|
562
|
+
{
|
563
|
+
return memcmp(entry.ptr, hash + 1, length - 1) == 0;
|
564
|
+
}
|
565
|
+
else
|
566
|
+
{
|
567
|
+
hash++;
|
568
|
+
length--;
|
569
|
+
table = entry.ptr;
|
570
|
+
}
|
571
|
+
}
|
572
|
+
return NO;
|
573
|
+
}
|
574
|
+
#endif
|
575
|
+
|
576
|
+
static BOOL CanNativeZWRClass(Class c)
|
577
|
+
{
|
578
|
+
#if __APPLE__
|
579
|
+
if(!c)
|
580
|
+
return YES;
|
581
|
+
|
582
|
+
const char *name = class_getName(c);
|
583
|
+
unsigned char hash[CC_SHA1_DIGEST_LENGTH];
|
584
|
+
CC_SHA1(name, strlen(name), hash);
|
585
|
+
|
586
|
+
if(HashPresentInTable(hash, CC_SHA1_DIGEST_LENGTH, _MAZeroingWeakRefClassNativeWeakReferenceNotAllowedTable))
|
587
|
+
return NO;
|
588
|
+
else
|
589
|
+
return CanNativeZWRClass(class_getSuperclass(c));
|
590
|
+
#else
|
591
|
+
return NO;
|
592
|
+
#endif
|
593
|
+
}
|
594
|
+
|
595
|
+
static BOOL CanNativeZWR(id obj)
|
596
|
+
{
|
597
|
+
return CanNativeZWRClass(object_getClass(obj));
|
598
|
+
}
|
599
|
+
|
600
|
+
static Class CreatePlainCustomSubclass(Class class)
|
601
|
+
{
|
602
|
+
NSString *newName = [NSString stringWithFormat: @"%s_MAZeroingWeakRefSubclass", class_getName(class)];
|
603
|
+
const char *newNameC = [newName UTF8String];
|
604
|
+
|
605
|
+
Class subclass = objc_allocateClassPair(class, newNameC, 0);
|
606
|
+
|
607
|
+
Method release = class_getInstanceMethod(class, @selector(release));
|
608
|
+
Method dealloc = class_getInstanceMethod(class, @selector(dealloc));
|
609
|
+
Method classForCoder = class_getInstanceMethod(class, @selector(classForCoder));
|
610
|
+
class_addMethod(subclass, @selector(release), (IMP)CustomSubclassRelease, method_getTypeEncoding(release));
|
611
|
+
class_addMethod(subclass, @selector(dealloc), (IMP)CustomSubclassDealloc, method_getTypeEncoding(dealloc));
|
612
|
+
class_addMethod(subclass, @selector(classForCoder), (IMP)CustomSubclassClassForCoder, method_getTypeEncoding(classForCoder));
|
613
|
+
|
614
|
+
objc_registerClassPair(subclass);
|
615
|
+
|
616
|
+
return subclass;
|
617
|
+
}
|
618
|
+
|
619
|
+
static void PatchKVOSubclass(Class class)
|
620
|
+
{
|
621
|
+
// NSLog(@"Patching KVO class %s", class_getName(class));
|
622
|
+
Method removeObserverForKeyPath = class_getInstanceMethod(class, @selector(removeObserver:forKeyPath:));
|
623
|
+
Method release = class_getInstanceMethod(class, @selector(release));
|
624
|
+
Method dealloc = class_getInstanceMethod(class, @selector(dealloc));
|
625
|
+
|
626
|
+
class_addMethod(class,
|
627
|
+
@selector(MAZeroingWeakRef_KVO_original_removeObserver:forKeyPath:),
|
628
|
+
method_getImplementation(removeObserverForKeyPath),
|
629
|
+
method_getTypeEncoding(removeObserverForKeyPath));
|
630
|
+
class_addMethod(class, @selector(MAZeroingWeakRef_KVO_original_release), method_getImplementation(release), method_getTypeEncoding(release));
|
631
|
+
class_addMethod(class, @selector(MAZeroingWeakRef_KVO_original_dealloc), method_getImplementation(dealloc), method_getTypeEncoding(dealloc));
|
632
|
+
|
633
|
+
class_replaceMethod(class,
|
634
|
+
@selector(removeObserver:forKeyPath:),
|
635
|
+
(IMP)KVOSubclassRemoveObserverForKeyPath,
|
636
|
+
method_getTypeEncoding(removeObserverForKeyPath));
|
637
|
+
class_replaceMethod(class, @selector(release), (IMP)KVOSubclassRelease, method_getTypeEncoding(release));
|
638
|
+
class_replaceMethod(class, @selector(dealloc), (IMP)KVOSubclassDealloc, method_getTypeEncoding(dealloc));
|
639
|
+
|
640
|
+
// The context variant is only available on 10.7/iOS5+, so only perform that override if the method actually exists.
|
641
|
+
Method removeObserverForKeyPathContext = class_getInstanceMethod(class, @selector(removeObserver:forKeyPath:context:));
|
642
|
+
if(removeObserverForKeyPathContext)
|
643
|
+
{
|
644
|
+
class_addMethod(class,
|
645
|
+
@selector(MAZeroingWeakRef_KVO_original_removeObserver:forKeyPath:context:),
|
646
|
+
method_getImplementation(removeObserverForKeyPathContext),
|
647
|
+
method_getTypeEncoding(removeObserverForKeyPathContext));
|
648
|
+
class_replaceMethod(class,
|
649
|
+
@selector(removeObserver:forKeyPath:context:),
|
650
|
+
(IMP)KVOSubclassRemoveObserverForKeyPathContext,
|
651
|
+
method_getTypeEncoding(removeObserverForKeyPathContext));
|
652
|
+
|
653
|
+
}
|
654
|
+
}
|
655
|
+
|
656
|
+
static void RegisterCustomSubclass(Class subclass, Class superclass)
|
657
|
+
{
|
658
|
+
[gCustomSubclassMap setObject: subclass forKey: (id <NSCopying>) superclass];
|
659
|
+
[gCustomSubclasses addObject: subclass];
|
660
|
+
}
|
661
|
+
|
662
|
+
static Class CreateCustomSubclass(Class class, id obj)
|
663
|
+
{
|
664
|
+
if(IsTollFreeBridged(class, obj))
|
665
|
+
{
|
666
|
+
#if COREFOUNDATION_HACK_LEVEL >= 2
|
667
|
+
CFTypeID typeID = CFGetTypeID(obj);
|
668
|
+
CFRuntimeClass *cfclass = _CFRuntimeGetClassWithTypeID(typeID);
|
669
|
+
|
670
|
+
if(typeID >= gCFOriginalFinalizesSize)
|
671
|
+
{
|
672
|
+
gCFOriginalFinalizesSize = typeID + 1;
|
673
|
+
gCFOriginalFinalizes = realloc(gCFOriginalFinalizes, gCFOriginalFinalizesSize * sizeof(*gCFOriginalFinalizes));
|
674
|
+
}
|
675
|
+
|
676
|
+
do {
|
677
|
+
gCFOriginalFinalizes[typeID] = cfclass->finalize;
|
678
|
+
} while(!OSAtomicCompareAndSwapPtrBarrier(gCFOriginalFinalizes[typeID], CustomCFFinalize, (void *)&cfclass->finalize));
|
679
|
+
#else
|
680
|
+
NSCAssert2(0, @"Cannot create zeroing weak reference to object of type %@ with COREFOUNDATION_HACK_LEVEL set to %d", class, COREFOUNDATION_HACK_LEVEL);
|
681
|
+
#endif
|
682
|
+
return class;
|
683
|
+
}
|
684
|
+
else if(IsKVOSubclass(obj))
|
685
|
+
{
|
686
|
+
PatchKVOSubclass(class);
|
687
|
+
return class;
|
688
|
+
}
|
689
|
+
else
|
690
|
+
{
|
691
|
+
return CreatePlainCustomSubclass(class);
|
692
|
+
}
|
693
|
+
}
|
694
|
+
|
695
|
+
static void EnsureCustomSubclass(id obj)
|
696
|
+
{
|
697
|
+
if(!GetCustomSubclass(obj) && !IsConstantObject(obj))
|
698
|
+
{
|
699
|
+
Class class = object_getClass(obj);
|
700
|
+
Class subclass = [gCustomSubclassMap objectForKey: class];
|
701
|
+
if(!subclass)
|
702
|
+
{
|
703
|
+
subclass = CreateCustomSubclass(class, obj);
|
704
|
+
RegisterCustomSubclass(subclass, class);
|
705
|
+
}
|
706
|
+
|
707
|
+
// only set the class if the current one is its superclass
|
708
|
+
// otherwise it's possible that it returns something farther up in the hierarchy
|
709
|
+
// and so there's no need to set it then
|
710
|
+
if(class_getSuperclass(subclass) == class)
|
711
|
+
object_setClass(obj, subclass);
|
712
|
+
}
|
713
|
+
}
|
714
|
+
|
715
|
+
static void RegisterRef(MAZeroingWeakRef *ref, id target)
|
716
|
+
{
|
717
|
+
WhileLocked({
|
718
|
+
EnsureCustomSubclass(target);
|
719
|
+
AddWeakRefToObject(target, ref);
|
720
|
+
#if COREFOUNDATION_HACK_LEVEL >= 3
|
721
|
+
if(IsTollFreeBridged(object_getClass(target), target))
|
722
|
+
CFSetAddValue(gCFWeakTargets, target);
|
723
|
+
#endif
|
724
|
+
});
|
725
|
+
}
|
726
|
+
|
727
|
+
static void UnregisterRef(MAZeroingWeakRef *ref)
|
728
|
+
{
|
729
|
+
WhileLocked({
|
730
|
+
id target = ref->_target;
|
731
|
+
|
732
|
+
if(target)
|
733
|
+
RemoveWeakRefFromObject(target, ref);
|
734
|
+
});
|
735
|
+
}
|
736
|
+
|
737
|
+
+ (BOOL)canRefCoreFoundationObjects
|
738
|
+
{
|
739
|
+
return COREFOUNDATION_HACK_LEVEL >= 2 || objc_storeWeak_fptr;
|
740
|
+
}
|
741
|
+
|
742
|
+
+ (id)refWithTarget: (id)target
|
743
|
+
{
|
744
|
+
return [[[self alloc] initWithTarget: target] autorelease];
|
745
|
+
}
|
746
|
+
|
747
|
+
- (id)initWithTarget: (id)target
|
748
|
+
{
|
749
|
+
if((self = [self init]))
|
750
|
+
{
|
751
|
+
if(objc_storeWeak_fptr && CanNativeZWR(target))
|
752
|
+
{
|
753
|
+
objc_storeWeak_fptr(&_target, target);
|
754
|
+
_nativeZWR = YES;
|
755
|
+
}
|
756
|
+
else
|
757
|
+
{
|
758
|
+
_target = target;
|
759
|
+
RegisterRef(self, target);
|
760
|
+
}
|
761
|
+
}
|
762
|
+
return self;
|
763
|
+
}
|
764
|
+
|
765
|
+
- (void)dealloc
|
766
|
+
{
|
767
|
+
if(objc_storeWeak_fptr && _nativeZWR)
|
768
|
+
objc_storeWeak_fptr(&_target, nil);
|
769
|
+
else
|
770
|
+
UnregisterRef(self);
|
771
|
+
|
772
|
+
#if NS_BLOCKS_AVAILABLE
|
773
|
+
[_cleanupBlock release];
|
774
|
+
#endif
|
775
|
+
[super dealloc];
|
776
|
+
}
|
777
|
+
|
778
|
+
- (NSString *)description
|
779
|
+
{
|
780
|
+
return [NSString stringWithFormat: @"<%@: %p -> %@>", [self class], self, [self target]];
|
781
|
+
}
|
782
|
+
|
783
|
+
#if NS_BLOCKS_AVAILABLE
|
784
|
+
- (void)setCleanupBlock: (void (^)(id target))block
|
785
|
+
{
|
786
|
+
block = [block copy];
|
787
|
+
[_cleanupBlock release];
|
788
|
+
_cleanupBlock = block;
|
789
|
+
|
790
|
+
if(objc_loadWeak_fptr && _nativeZWR)
|
791
|
+
{
|
792
|
+
// wrap a pool around this code, otherwise it artificially extends
|
793
|
+
// the lifetime of the target object
|
794
|
+
NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init];
|
795
|
+
|
796
|
+
id target = [self target];
|
797
|
+
if(target != nil) @synchronized(target)
|
798
|
+
{
|
799
|
+
static void *associatedKey = &associatedKey;
|
800
|
+
NSMutableSet *cleanupHelpers = objc_getAssociatedObject(target, associatedKey);
|
801
|
+
|
802
|
+
if(cleanupHelpers == nil)
|
803
|
+
{
|
804
|
+
cleanupHelpers = [NSMutableSet set];
|
805
|
+
objc_setAssociatedObject(target, associatedKey, cleanupHelpers, OBJC_ASSOCIATION_RETAIN);
|
806
|
+
}
|
807
|
+
|
808
|
+
_MAZeroingWeakRefCleanupHelper *helper = [[_MAZeroingWeakRefCleanupHelper alloc] initWithRef: self target: target];
|
809
|
+
[cleanupHelpers addObject:helper];
|
810
|
+
|
811
|
+
[helper release];
|
812
|
+
}
|
813
|
+
|
814
|
+
[pool release];
|
815
|
+
}
|
816
|
+
}
|
817
|
+
#endif
|
818
|
+
|
819
|
+
- (id)target
|
820
|
+
{
|
821
|
+
if(objc_loadWeak_fptr && _nativeZWR)
|
822
|
+
{
|
823
|
+
return objc_loadWeak_fptr(&_target);
|
824
|
+
}
|
825
|
+
else
|
826
|
+
{
|
827
|
+
BLOCK_QUALIFIER id ret;
|
828
|
+
WhileLocked({
|
829
|
+
ret = [_target retain];
|
830
|
+
});
|
831
|
+
return [ret autorelease];
|
832
|
+
}
|
833
|
+
}
|
834
|
+
|
835
|
+
- (void)_zeroTarget
|
836
|
+
{
|
837
|
+
_target = nil;
|
838
|
+
}
|
839
|
+
|
840
|
+
- (void)_executeCleanupBlockWithTarget: (id)target
|
841
|
+
{
|
842
|
+
#if NS_BLOCKS_AVAILABLE
|
843
|
+
if(_cleanupBlock)
|
844
|
+
{
|
845
|
+
_cleanupBlock(target);
|
846
|
+
[_cleanupBlock release];
|
847
|
+
_cleanupBlock = nil;
|
848
|
+
}
|
849
|
+
#endif
|
850
|
+
}
|
851
|
+
|
852
|
+
@end
|