|
@@ -91,9 +91,9 @@ bool _play_video(String p_path, float p_volume, String p_audio_track, String p_s
|
|
|
|
|
|
[_instance.avPlayer addObserver:_instance forKeyPath:@"status" options:0 context:nil];
|
|
[_instance.avPlayer addObserver:_instance forKeyPath:@"status" options:0 context:nil];
|
|
[[NSNotificationCenter defaultCenter] addObserver:_instance
|
|
[[NSNotificationCenter defaultCenter] addObserver:_instance
|
|
- selector:@selector(playerItemDidReachEnd:)
|
|
|
|
- name:AVPlayerItemDidPlayToEndTimeNotification
|
|
|
|
- object:[_instance.avPlayer currentItem]];
|
|
|
|
|
|
+ selector:@selector(playerItemDidReachEnd:)
|
|
|
|
+ name:AVPlayerItemDidPlayToEndTimeNotification
|
|
|
|
+ object:[_instance.avPlayer currentItem]];
|
|
|
|
|
|
[_instance.avPlayer addObserver:_instance forKeyPath:@"rate" options:NSKeyValueObservingOptionNew context:0];
|
|
[_instance.avPlayer addObserver:_instance forKeyPath:@"rate" options:NSKeyValueObservingOptionNew context:0];
|
|
|
|
|
|
@@ -104,13 +104,11 @@ bool _play_video(String p_path, float p_volume, String p_audio_track, String p_s
|
|
AVMediaSelectionGroup *audioGroup = [_instance.avAsset mediaSelectionGroupForMediaCharacteristic: AVMediaCharacteristicAudible];
|
|
AVMediaSelectionGroup *audioGroup = [_instance.avAsset mediaSelectionGroupForMediaCharacteristic: AVMediaCharacteristicAudible];
|
|
|
|
|
|
NSMutableArray *allAudioParams = [NSMutableArray array];
|
|
NSMutableArray *allAudioParams = [NSMutableArray array];
|
|
- for (id track in audioGroup.options)
|
|
|
|
- {
|
|
|
|
|
|
+ for (id track in audioGroup.options) {
|
|
NSString* language = [[track locale] localeIdentifier];
|
|
NSString* language = [[track locale] localeIdentifier];
|
|
NSLog(@"subtitle lang: %@", language);
|
|
NSLog(@"subtitle lang: %@", language);
|
|
|
|
|
|
- if ([language isEqualToString:[NSString stringWithUTF8String:p_audio_track.utf8()]])
|
|
|
|
- {
|
|
|
|
|
|
+ if ([language isEqualToString:[NSString stringWithUTF8String:p_audio_track.utf8()]]) {
|
|
AVMutableAudioMixInputParameters *audioInputParams = [AVMutableAudioMixInputParameters audioMixInputParameters];
|
|
AVMutableAudioMixInputParameters *audioInputParams = [AVMutableAudioMixInputParameters audioMixInputParameters];
|
|
[audioInputParams setVolume:p_volume atTime:kCMTimeZero];
|
|
[audioInputParams setVolume:p_volume atTime:kCMTimeZero];
|
|
[audioInputParams setTrackID:[track trackID]];
|
|
[audioInputParams setTrackID:[track trackID]];
|
|
@@ -122,23 +120,21 @@ bool _play_video(String p_path, float p_volume, String p_audio_track, String p_s
|
|
[_instance.avPlayer.currentItem selectMediaOption:track inMediaSelectionGroup: audioGroup];
|
|
[_instance.avPlayer.currentItem selectMediaOption:track inMediaSelectionGroup: audioGroup];
|
|
[_instance.avPlayer.currentItem setAudioMix:audioMix];
|
|
[_instance.avPlayer.currentItem setAudioMix:audioMix];
|
|
|
|
|
|
- break;
|
|
|
|
- }
|
|
|
|
|
|
+ break;
|
|
|
|
+ }
|
|
}
|
|
}
|
|
|
|
|
|
AVMediaSelectionGroup *subtitlesGroup = [_instance.avAsset mediaSelectionGroupForMediaCharacteristic: AVMediaCharacteristicLegible];
|
|
AVMediaSelectionGroup *subtitlesGroup = [_instance.avAsset mediaSelectionGroupForMediaCharacteristic: AVMediaCharacteristicLegible];
|
|
NSArray *useableTracks = [AVMediaSelectionGroup mediaSelectionOptionsFromArray:subtitlesGroup.options withoutMediaCharacteristics:[NSArray arrayWithObject:AVMediaCharacteristicContainsOnlyForcedSubtitles]];
|
|
NSArray *useableTracks = [AVMediaSelectionGroup mediaSelectionOptionsFromArray:subtitlesGroup.options withoutMediaCharacteristics:[NSArray arrayWithObject:AVMediaCharacteristicContainsOnlyForcedSubtitles]];
|
|
|
|
|
|
- for (id track in useableTracks)
|
|
|
|
- {
|
|
|
|
|
|
+ for (id track in useableTracks) {
|
|
NSString* language = [[track locale] localeIdentifier];
|
|
NSString* language = [[track locale] localeIdentifier];
|
|
NSLog(@"subtitle lang: %@", language);
|
|
NSLog(@"subtitle lang: %@", language);
|
|
|
|
|
|
- if ([language isEqualToString:[NSString stringWithUTF8String:p_subtitle_track.utf8()]])
|
|
|
|
- {
|
|
|
|
- [_instance.avPlayer.currentItem selectMediaOption:track inMediaSelectionGroup: subtitlesGroup];
|
|
|
|
- break;
|
|
|
|
- }
|
|
|
|
|
|
+ if ([language isEqualToString:[NSString stringWithUTF8String:p_subtitle_track.utf8()]]) {
|
|
|
|
+ [_instance.avPlayer.currentItem selectMediaOption:track inMediaSelectionGroup: subtitlesGroup];
|
|
|
|
+ break;
|
|
|
|
+ }
|
|
}
|
|
}
|
|
|
|
|
|
video_playing = true;
|
|
video_playing = true;
|
|
@@ -246,14 +242,12 @@ static void clear_touches() {
|
|
|
|
|
|
// Implement this to override the default layer class (which is [CALayer class]).
|
|
// Implement this to override the default layer class (which is [CALayer class]).
|
|
// We do this so that our view will be backed by a layer that is capable of OpenGL ES rendering.
|
|
// We do this so that our view will be backed by a layer that is capable of OpenGL ES rendering.
|
|
-+ (Class) layerClass
|
|
|
|
-{
|
|
|
|
|
|
++ (Class) layerClass {
|
|
return [CAEAGLLayer class];
|
|
return [CAEAGLLayer class];
|
|
}
|
|
}
|
|
|
|
|
|
//The GL view is stored in the nib file. When it's unarchived it's sent -initWithCoder:
|
|
//The GL view is stored in the nib file. When it's unarchived it's sent -initWithCoder:
|
|
-- (id)initWithCoder:(NSCoder*)coder
|
|
|
|
-{
|
|
|
|
|
|
+- (id)initWithCoder:(NSCoder*)coder {
|
|
active = FALSE;
|
|
active = FALSE;
|
|
if((self = [super initWithCoder:coder]))
|
|
if((self = [super initWithCoder:coder]))
|
|
{
|
|
{
|
|
@@ -262,8 +256,7 @@ static void clear_touches() {
|
|
return self;
|
|
return self;
|
|
}
|
|
}
|
|
|
|
|
|
--(id)initGLES
|
|
|
|
-{
|
|
|
|
|
|
+-(id)initGLES {
|
|
// Get our backing layer
|
|
// Get our backing layer
|
|
CAEAGLLayer *eaglLayer = (CAEAGLLayer*) self.layer;
|
|
CAEAGLLayer *eaglLayer = (CAEAGLLayer*) self.layer;
|
|
|
|
|
|
@@ -277,8 +270,7 @@ static void clear_touches() {
|
|
// Create our EAGLContext, and if successful make it current and create our framebuffer.
|
|
// Create our EAGLContext, and if successful make it current and create our framebuffer.
|
|
context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES3];
|
|
context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES3];
|
|
|
|
|
|
- if(!context || ![EAGLContext setCurrentContext:context] || ![self createFramebuffer])
|
|
|
|
- {
|
|
|
|
|
|
+ if(!context || ![EAGLContext setCurrentContext:context] || ![self createFramebuffer]) {
|
|
[self release];
|
|
[self release];
|
|
return nil;
|
|
return nil;
|
|
}
|
|
}
|
|
@@ -288,14 +280,12 @@ static void clear_touches() {
|
|
return self;
|
|
return self;
|
|
}
|
|
}
|
|
|
|
|
|
--(id<GLViewDelegate>)delegate
|
|
|
|
-{
|
|
|
|
|
|
+-(id<GLViewDelegate>)delegate {
|
|
return delegate;
|
|
return delegate;
|
|
}
|
|
}
|
|
|
|
|
|
// Update the delegate, and if it needs a -setupView: call, set our internal flag so that it will be called.
|
|
// Update the delegate, and if it needs a -setupView: call, set our internal flag so that it will be called.
|
|
--(void)setDelegate:(id<GLViewDelegate>)d
|
|
|
|
-{
|
|
|
|
|
|
+-(void)setDelegate:(id<GLViewDelegate>)d {
|
|
delegate = d;
|
|
delegate = d;
|
|
delegateSetup = ![delegate respondsToSelector:@selector(setupView:)];
|
|
delegateSetup = ![delegate respondsToSelector:@selector(setupView:)];
|
|
}
|
|
}
|
|
@@ -306,8 +296,7 @@ static void clear_touches() {
|
|
// This is the perfect opportunity to also update the framebuffer so that it is
|
|
// This is the perfect opportunity to also update the framebuffer so that it is
|
|
// the same size as our display area.
|
|
// the same size as our display area.
|
|
|
|
|
|
--(void)layoutSubviews
|
|
|
|
-{
|
|
|
|
|
|
+-(void)layoutSubviews {
|
|
//printf("HERE\n");
|
|
//printf("HERE\n");
|
|
[EAGLContext setCurrentContext:context];
|
|
[EAGLContext setCurrentContext:context];
|
|
[self destroyFramebuffer];
|
|
[self destroyFramebuffer];
|
|
@@ -317,8 +306,7 @@ static void clear_touches() {
|
|
|
|
|
|
}
|
|
}
|
|
|
|
|
|
-- (BOOL)createFramebuffer
|
|
|
|
-{
|
|
|
|
|
|
+- (BOOL)createFramebuffer {
|
|
// Generate IDs for a framebuffer object and a color renderbuffer
|
|
// Generate IDs for a framebuffer object and a color renderbuffer
|
|
UIScreen* mainscr = [UIScreen mainScreen];
|
|
UIScreen* mainscr = [UIScreen mainScreen];
|
|
printf("******** screen size %i, %i\n", (int)mainscr.currentMode.size.width, (int)mainscr.currentMode.size.height);
|
|
printf("******** screen size %i, %i\n", (int)mainscr.currentMode.size.width, (int)mainscr.currentMode.size.height);
|
|
@@ -345,8 +333,7 @@ static void clear_touches() {
|
|
glRenderbufferStorageOES(GL_RENDERBUFFER_OES, GL_DEPTH_COMPONENT16_OES, backingWidth, backingHeight);
|
|
glRenderbufferStorageOES(GL_RENDERBUFFER_OES, GL_DEPTH_COMPONENT16_OES, backingWidth, backingHeight);
|
|
glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_DEPTH_ATTACHMENT_OES, GL_RENDERBUFFER_OES, depthRenderbuffer);
|
|
glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_DEPTH_ATTACHMENT_OES, GL_RENDERBUFFER_OES, depthRenderbuffer);
|
|
|
|
|
|
- if(glCheckFramebufferStatusOES(GL_FRAMEBUFFER_OES) != GL_FRAMEBUFFER_COMPLETE_OES)
|
|
|
|
- {
|
|
|
|
|
|
+ if(glCheckFramebufferStatusOES(GL_FRAMEBUFFER_OES) != GL_FRAMEBUFFER_COMPLETE_OES) {
|
|
NSLog(@"failed to make complete framebuffer object %x", glCheckFramebufferStatusOES(GL_FRAMEBUFFER_OES));
|
|
NSLog(@"failed to make complete framebuffer object %x", glCheckFramebufferStatusOES(GL_FRAMEBUFFER_OES));
|
|
return NO;
|
|
return NO;
|
|
}
|
|
}
|
|
@@ -366,8 +353,7 @@ static void clear_touches() {
|
|
}
|
|
}
|
|
|
|
|
|
// Clean up any buffers we have allocated.
|
|
// Clean up any buffers we have allocated.
|
|
-- (void)destroyFramebuffer
|
|
|
|
-{
|
|
|
|
|
|
+- (void)destroyFramebuffer {
|
|
glDeleteFramebuffersOES(1, &viewFramebuffer);
|
|
glDeleteFramebuffersOES(1, &viewFramebuffer);
|
|
viewFramebuffer = 0;
|
|
viewFramebuffer = 0;
|
|
glDeleteRenderbuffersOES(1, &viewRenderbuffer);
|
|
glDeleteRenderbuffersOES(1, &viewRenderbuffer);
|
|
@@ -380,8 +366,7 @@ static void clear_touches() {
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
|
|
-- (void)startAnimation
|
|
|
|
-{
|
|
|
|
|
|
+- (void)startAnimation {
|
|
if (active)
|
|
if (active)
|
|
return;
|
|
return;
|
|
active = TRUE;
|
|
active = TRUE;
|
|
@@ -397,19 +382,16 @@ static void clear_touches() {
|
|
|
|
|
|
// Setup DisplayLink in main thread
|
|
// Setup DisplayLink in main thread
|
|
[displayLink addToRunLoop:[NSRunLoop currentRunLoop] forMode:NSRunLoopCommonModes];
|
|
[displayLink addToRunLoop:[NSRunLoop currentRunLoop] forMode:NSRunLoopCommonModes];
|
|
- }
|
|
|
|
- else {
|
|
|
|
|
|
+ } else {
|
|
animationTimer = [NSTimer scheduledTimerWithTimeInterval:animationInterval target:self selector:@selector(drawView) userInfo:nil repeats:YES];
|
|
animationTimer = [NSTimer scheduledTimerWithTimeInterval:animationInterval target:self selector:@selector(drawView) userInfo:nil repeats:YES];
|
|
}
|
|
}
|
|
|
|
|
|
- if (video_playing)
|
|
|
|
- {
|
|
|
|
|
|
+ if (video_playing) {
|
|
_unpause_video();
|
|
_unpause_video();
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
|
|
-- (void)stopAnimation
|
|
|
|
-{
|
|
|
|
|
|
+- (void)stopAnimation {
|
|
if (!active)
|
|
if (!active)
|
|
return;
|
|
return;
|
|
active = FALSE;
|
|
active = FALSE;
|
|
@@ -418,22 +400,19 @@ static void clear_touches() {
|
|
if (useCADisplayLink) {
|
|
if (useCADisplayLink) {
|
|
[displayLink invalidate];
|
|
[displayLink invalidate];
|
|
displayLink = nil;
|
|
displayLink = nil;
|
|
- }
|
|
|
|
- else {
|
|
|
|
|
|
+ } else {
|
|
[animationTimer invalidate];
|
|
[animationTimer invalidate];
|
|
animationTimer = nil;
|
|
animationTimer = nil;
|
|
}
|
|
}
|
|
|
|
|
|
clear_touches();
|
|
clear_touches();
|
|
|
|
|
|
- if (video_playing)
|
|
|
|
- {
|
|
|
|
|
|
+ if (video_playing) {
|
|
// save position
|
|
// save position
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
|
|
-- (void)setAnimationInterval:(NSTimeInterval)interval
|
|
|
|
-{
|
|
|
|
|
|
+- (void)setAnimationInterval:(NSTimeInterval)interval {
|
|
animationInterval = interval;
|
|
animationInterval = interval;
|
|
if ( (useCADisplayLink && displayLink) || ( !useCADisplayLink && animationTimer ) ) {
|
|
if ( (useCADisplayLink && displayLink) || ( !useCADisplayLink && animationTimer ) ) {
|
|
[self stopAnimation];
|
|
[self stopAnimation];
|
|
@@ -442,8 +421,7 @@ static void clear_touches() {
|
|
}
|
|
}
|
|
|
|
|
|
// Updates the OpenGL view when the timer fires
|
|
// Updates the OpenGL view when the timer fires
|
|
-- (void)drawView
|
|
|
|
-{
|
|
|
|
|
|
+- (void)drawView {
|
|
if (useCADisplayLink) {
|
|
if (useCADisplayLink) {
|
|
// Pause the CADisplayLink to avoid recursion
|
|
// Pause the CADisplayLink to avoid recursion
|
|
[displayLink setPaused: YES];
|
|
[displayLink setPaused: YES];
|
|
@@ -464,8 +442,7 @@ static void clear_touches() {
|
|
[EAGLContext setCurrentContext:context];
|
|
[EAGLContext setCurrentContext:context];
|
|
|
|
|
|
// If our drawing delegate needs to have the view setup, then call -setupView: and flag that it won't need to be called again.
|
|
// If our drawing delegate needs to have the view setup, then call -setupView: and flag that it won't need to be called again.
|
|
- if(!delegateSetup)
|
|
|
|
- {
|
|
|
|
|
|
+ if(!delegateSetup) {
|
|
[delegate setupView:self];
|
|
[delegate setupView:self];
|
|
delegateSetup = YES;
|
|
delegateSetup = YES;
|
|
}
|
|
}
|
|
@@ -484,8 +461,7 @@ static void clear_touches() {
|
|
#endif
|
|
#endif
|
|
}
|
|
}
|
|
|
|
|
|
-- (void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event
|
|
|
|
-{
|
|
|
|
|
|
+- (void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event {
|
|
NSArray* tlist = [[event allTouches] allObjects];
|
|
NSArray* tlist = [[event allTouches] allObjects];
|
|
for (unsigned int i=0; i< [tlist count]; i++) {
|
|
for (unsigned int i=0; i< [tlist count]; i++) {
|
|
|
|
|
|
@@ -502,8 +478,7 @@ static void clear_touches() {
|
|
};
|
|
};
|
|
}
|
|
}
|
|
|
|
|
|
-- (void)touchesMoved:(NSSet *)touches withEvent:(UIEvent *)event
|
|
|
|
-{
|
|
|
|
|
|
+- (void)touchesMoved:(NSSet *)touches withEvent:(UIEvent *)event {
|
|
|
|
|
|
NSArray* tlist = [[event allTouches] allObjects];
|
|
NSArray* tlist = [[event allTouches] allObjects];
|
|
for (unsigned int i=0; i< [tlist count]; i++) {
|
|
for (unsigned int i=0; i< [tlist count]; i++) {
|
|
@@ -524,8 +499,7 @@ static void clear_touches() {
|
|
|
|
|
|
}
|
|
}
|
|
|
|
|
|
-- (void)touchesEnded:(NSSet *)touches withEvent:(UIEvent *)event
|
|
|
|
-{
|
|
|
|
|
|
+- (void)touchesEnded:(NSSet *)touches withEvent:(UIEvent *)event {
|
|
NSArray* tlist = [[event allTouches] allObjects];
|
|
NSArray* tlist = [[event allTouches] allObjects];
|
|
for (unsigned int i=0; i< [tlist count]; i++) {
|
|
for (unsigned int i=0; i< [tlist count]; i++) {
|
|
|
|
|
|
@@ -582,8 +556,7 @@ static void clear_touches() {
|
|
printf("inserting text with character %i\n", character[0]);
|
|
printf("inserting text with character %i\n", character[0]);
|
|
};
|
|
};
|
|
|
|
|
|
-- (void)audioRouteChangeListenerCallback:(NSNotification*)notification
|
|
|
|
-{
|
|
|
|
|
|
+- (void)audioRouteChangeListenerCallback:(NSNotification*)notification {
|
|
printf("*********** route changed!\n");
|
|
printf("*********** route changed!\n");
|
|
NSDictionary *interuptionDict = notification.userInfo;
|
|
NSDictionary *interuptionDict = notification.userInfo;
|
|
|
|
|
|
@@ -591,12 +564,12 @@ static void clear_touches() {
|
|
|
|
|
|
switch (routeChangeReason) {
|
|
switch (routeChangeReason) {
|
|
|
|
|
|
- case AVAudioSessionRouteChangeReasonNewDeviceAvailable:
|
|
|
|
|
|
+ case AVAudioSessionRouteChangeReasonNewDeviceAvailable: {
|
|
NSLog(@"AVAudioSessionRouteChangeReasonNewDeviceAvailable");
|
|
NSLog(@"AVAudioSessionRouteChangeReasonNewDeviceAvailable");
|
|
NSLog(@"Headphone/Line plugged in");
|
|
NSLog(@"Headphone/Line plugged in");
|
|
- break;
|
|
|
|
|
|
+ }; break;
|
|
|
|
|
|
- case AVAudioSessionRouteChangeReasonOldDeviceUnavailable:
|
|
|
|
|
|
+ case AVAudioSessionRouteChangeReasonOldDeviceUnavailable: {
|
|
NSLog(@"AVAudioSessionRouteChangeReasonOldDeviceUnavailable");
|
|
NSLog(@"AVAudioSessionRouteChangeReasonOldDeviceUnavailable");
|
|
NSLog(@"Headphone/Line was pulled. Resuming video play....");
|
|
NSLog(@"Headphone/Line was pulled. Resuming video play....");
|
|
if (_is_video_playing()) {
|
|
if (_is_video_playing()) {
|
|
@@ -606,19 +579,18 @@ static void clear_touches() {
|
|
NSLog(@"resumed play");
|
|
NSLog(@"resumed play");
|
|
});
|
|
});
|
|
};
|
|
};
|
|
- break;
|
|
|
|
|
|
+ }; break;
|
|
|
|
|
|
- case AVAudioSessionRouteChangeReasonCategoryChange:
|
|
|
|
|
|
+ case AVAudioSessionRouteChangeReasonCategoryChange: {
|
|
// called at start - also when other audio wants to play
|
|
// called at start - also when other audio wants to play
|
|
NSLog(@"AVAudioSessionRouteChangeReasonCategoryChange");
|
|
NSLog(@"AVAudioSessionRouteChangeReasonCategoryChange");
|
|
- break;
|
|
|
|
|
|
+ }; break;
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
|
|
|
|
|
|
// When created via code however, we get initWithFrame
|
|
// When created via code however, we get initWithFrame
|
|
--(id)initWithFrame:(CGRect)frame
|
|
|
|
-{
|
|
|
|
|
|
+-(id)initWithFrame:(CGRect)frame {
|
|
self = [super initWithFrame:frame];
|
|
self = [super initWithFrame:frame];
|
|
_instance = self;
|
|
_instance = self;
|
|
printf("after init super %p\n", self);
|
|
printf("after init super %p\n", self);
|
|
@@ -633,7 +605,7 @@ static void clear_touches() {
|
|
printf("******** adding observer for sound routing changes\n");
|
|
printf("******** adding observer for sound routing changes\n");
|
|
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(audioRouteChangeListenerCallback:)
|
|
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(audioRouteChangeListenerCallback:)
|
|
name:AVAudioSessionRouteChangeNotification
|
|
name:AVAudioSessionRouteChangeNotification
|
|
- object:nil];
|
|
|
|
|
|
+ object:nil];
|
|
|
|
|
|
//self.autoresizesSubviews = YES;
|
|
//self.autoresizesSubviews = YES;
|
|
//[self setAutoresizingMask:UIViewAutoresizingFlexibleWidth | UIViewAutoresizingFlexibleWidth];
|
|
//[self setAutoresizingMask:UIViewAutoresizingFlexibleWidth | UIViewAutoresizingFlexibleWidth];
|
|
@@ -641,21 +613,19 @@ static void clear_touches() {
|
|
return self;
|
|
return self;
|
|
}
|
|
}
|
|
|
|
|
|
-// -(BOOL)automaticallyForwardAppearanceAndRotationMethodsToChildViewControllers {
|
|
|
|
-// return YES;
|
|
|
|
-// }
|
|
|
|
|
|
+//- (BOOL)automaticallyForwardAppearanceAndRotationMethodsToChildViewControllers {
|
|
|
|
+// return YES;
|
|
|
|
+//}
|
|
|
|
|
|
-// - (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation{
|
|
|
|
-// return YES;
|
|
|
|
-// }
|
|
|
|
|
|
+//- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation{
|
|
|
|
+// return YES;
|
|
|
|
+//}
|
|
|
|
|
|
// Stop animating and release resources when they are no longer needed.
|
|
// Stop animating and release resources when they are no longer needed.
|
|
-- (void)dealloc
|
|
|
|
-{
|
|
|
|
|
|
+- (void)dealloc {
|
|
[self stopAnimation];
|
|
[self stopAnimation];
|
|
|
|
|
|
- if([EAGLContext currentContext] == context)
|
|
|
|
- {
|
|
|
|
|
|
+ if([EAGLContext currentContext] == context) {
|
|
[EAGLContext setCurrentContext:nil];
|
|
[EAGLContext setCurrentContext:nil];
|
|
}
|
|
}
|
|
|
|
|
|
@@ -665,25 +635,24 @@ static void clear_touches() {
|
|
[super dealloc];
|
|
[super dealloc];
|
|
}
|
|
}
|
|
|
|
|
|
-- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object
|
|
|
|
- change:(NSDictionary *)change context:(void *)context {
|
|
|
|
|
|
+- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context {
|
|
|
|
|
|
if (object == _instance.avPlayerItem && [keyPath isEqualToString:@"status"]) {
|
|
if (object == _instance.avPlayerItem && [keyPath isEqualToString:@"status"]) {
|
|
- if (_instance.avPlayerItem.status == AVPlayerStatusFailed || _instance.avPlayer.status == AVPlayerStatusFailed) {
|
|
|
|
- _stop_video();
|
|
|
|
- video_found_error = true;
|
|
|
|
- }
|
|
|
|
|
|
+ if (_instance.avPlayerItem.status == AVPlayerStatusFailed || _instance.avPlayer.status == AVPlayerStatusFailed) {
|
|
|
|
+ _stop_video();
|
|
|
|
+ video_found_error = true;
|
|
|
|
+ }
|
|
|
|
|
|
- if(_instance.avPlayer.status == AVPlayerStatusReadyToPlay &&
|
|
|
|
- _instance.avPlayerItem.status == AVPlayerItemStatusReadyToPlay &&
|
|
|
|
- CMTIME_COMPARE_INLINE(video_current_time, ==, kCMTimeZero)) {
|
|
|
|
|
|
+ if(_instance.avPlayer.status == AVPlayerStatusReadyToPlay &&
|
|
|
|
+ _instance.avPlayerItem.status == AVPlayerItemStatusReadyToPlay &&
|
|
|
|
+ CMTIME_COMPARE_INLINE(video_current_time, ==, kCMTimeZero)) {
|
|
|
|
|
|
- //NSLog(@"time: %@", video_current_time);
|
|
|
|
|
|
+ //NSLog(@"time: %@", video_current_time);
|
|
|
|
|
|
- [_instance.avPlayer seekToTime:video_current_time];
|
|
|
|
- video_current_time = kCMTimeZero;
|
|
|
|
|
|
+ [_instance.avPlayer seekToTime:video_current_time];
|
|
|
|
+ video_current_time = kCMTimeZero;
|
|
}
|
|
}
|
|
- }
|
|
|
|
|
|
+ }
|
|
|
|
|
|
if (object == _instance.avPlayer && [keyPath isEqualToString:@"rate"]) {
|
|
if (object == _instance.avPlayer && [keyPath isEqualToString:@"rate"]) {
|
|
NSLog(@"Player playback rate changed: %.5f", _instance.avPlayer.rate);
|
|
NSLog(@"Player playback rate changed: %.5f", _instance.avPlayer.rate);
|
|
@@ -699,40 +668,40 @@ static void clear_touches() {
|
|
}
|
|
}
|
|
|
|
|
|
- (void)playerItemDidReachEnd:(NSNotification *)notification {
|
|
- (void)playerItemDidReachEnd:(NSNotification *)notification {
|
|
- _stop_video();
|
|
|
|
|
|
+ _stop_video();
|
|
}
|
|
}
|
|
|
|
|
|
/*
|
|
/*
|
|
- (void)moviePlayBackDidFinish:(NSNotification*)notification {
|
|
- (void)moviePlayBackDidFinish:(NSNotification*)notification {
|
|
|
|
|
|
|
|
|
|
- NSNumber* reason = [[notification userInfo] objectForKey:MPMoviePlayerPlaybackDidFinishReasonUserInfoKey];
|
|
|
|
- switch ([reason intValue]) {
|
|
|
|
- case MPMovieFinishReasonPlaybackEnded:
|
|
|
|
- //NSLog(@"Playback Ended");
|
|
|
|
- break;
|
|
|
|
- case MPMovieFinishReasonPlaybackError:
|
|
|
|
- //NSLog(@"Playback Error");
|
|
|
|
- video_found_error = true;
|
|
|
|
- break;
|
|
|
|
- case MPMovieFinishReasonUserExited:
|
|
|
|
- //NSLog(@"User Exited");
|
|
|
|
- video_found_error = true;
|
|
|
|
- break;
|
|
|
|
- default:
|
|
|
|
- //NSLog(@"Unsupported reason!");
|
|
|
|
- break;
|
|
|
|
- }
|
|
|
|
-
|
|
|
|
- MPMoviePlayerController *player = [notification object];
|
|
|
|
-
|
|
|
|
- [[NSNotificationCenter defaultCenter]
|
|
|
|
- removeObserver:self
|
|
|
|
- name:MPMoviePlayerPlaybackDidFinishNotification
|
|
|
|
- object:player];
|
|
|
|
-
|
|
|
|
- [_instance.moviePlayerController stop];
|
|
|
|
- [_instance.moviePlayerController.view removeFromSuperview];
|
|
|
|
|
|
+ NSNumber* reason = [[notification userInfo] objectForKey:MPMoviePlayerPlaybackDidFinishReasonUserInfoKey];
|
|
|
|
+ switch ([reason intValue]) {
|
|
|
|
+ case MPMovieFinishReasonPlaybackEnded:
|
|
|
|
+ //NSLog(@"Playback Ended");
|
|
|
|
+ break;
|
|
|
|
+ case MPMovieFinishReasonPlaybackError:
|
|
|
|
+ //NSLog(@"Playback Error");
|
|
|
|
+ video_found_error = true;
|
|
|
|
+ break;
|
|
|
|
+ case MPMovieFinishReasonUserExited:
|
|
|
|
+ //NSLog(@"User Exited");
|
|
|
|
+ video_found_error = true;
|
|
|
|
+ break;
|
|
|
|
+ default:
|
|
|
|
+ //NSLog(@"Unsupported reason!");
|
|
|
|
+ break;
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ MPMoviePlayerController *player = [notification object];
|
|
|
|
+
|
|
|
|
+ [[NSNotificationCenter defaultCenter]
|
|
|
|
+ removeObserver:self
|
|
|
|
+ name:MPMoviePlayerPlaybackDidFinishNotification
|
|
|
|
+ object:player];
|
|
|
|
+
|
|
|
|
+ [_instance.moviePlayerController stop];
|
|
|
|
+ [_instance.moviePlayerController.view removeFromSuperview];
|
|
|
|
|
|
//[[MPMusicPlayerController applicationMusicPlayer] setVolume: video_previous_volume];
|
|
//[[MPMusicPlayerController applicationMusicPlayer] setVolume: video_previous_volume];
|
|
video_playing = false;
|
|
video_playing = false;
|