react-native/React/Base/RCTTouchHandler.m

356 lines
11 KiB
Mathematica
Raw Normal View History

/**
* Copyright (c) 2015-present, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*/
#import "RCTTouchHandler.h"
#import <UIKit/UIGestureRecognizerSubclass.h>
#import "RCTAssert.h"
#import "RCTBridge.h"
#import "RCTEventDispatcher.h"
#import "RCTLog.h"
use RCTEventDispatcher for touch events (6/7) Summary: This diff finally makes touch events to be emitted to js from the same object as scroll events. Thanks to changes in previous diffs this means the js will now process them in the right order. This diff on its own would cause us to send events to js on every frame while dragging a finger on the screen. This is something we tried to avoid with event coalescing in the past, and gets fixed in the following diff D2884595. public ___ **This diff is part of a larger stack. This is a high level overview about what’s going on here.** This stack of diffs fixes an issue where scroll events and touch events wouldn’t be processed in the correct order on js side. Current state of world: * touch events are send to js immediately when they happen (this makes js respond to touches as soon as possible) * scroll events are buffered, coalesced and send at the beginning of each js frame (coalescing helps in a case where js is busy processing for several native frames and would get all scroll event for that period afterwards) How did I change this? 1. I’ve made touch events go through the same class every other event (scroll events included) go, RCTEventDispatcher. This gives us a single place to handle all events. 2. I’ve changed RCTEventDispatcher to flush all buffered events every time it gets a touch event before dispatching the touch. This fixes the original ordering issue. 3. I’ve made “touchMove” behave the same way as scroll events do - they are buffered and coalesced. Since “touchMove” events are fired as often as scroll events (while you drag your finger around), doing only 2. would bring back the issue buffering was fixing. All of this together effectively still keeps the order of events right, avoids overloading js in the important case we care about. The only downside is an increased latency for “touchMove” events, since they are to longer send to js immediately. (Even better solution would be changing the native->js event passing system to be pull based instead of push based. That way js would always request touches that has happened since the last time it has asked, which would make it get them as soon as it’s possible to process them and native could do coalescing at that point. However this change has a much bigger scope, so I’m going with this stack of diffs for now.) Reviewed By: nicklockwood Differential Revision: D2884593 fb-gh-sync-id: 749a4dc6256f93fde0d8733687394b080ddd4484
2016-02-03 13:22:17 +00:00
#import "RCTTouchEvent.h"
#import "RCTUIManager.h"
#import "RCTUtils.h"
2015-03-26 09:58:06 +00:00
#import "UIView+React.h"
@interface RCTTouchHandler () <UIGestureRecognizerDelegate>
@end
// TODO: this class behaves a lot like a module, and could be implemented as a
// module if we were to assume that modules and RootViews had a 1:1 relationship
@implementation RCTTouchHandler
{
use RCTEventDispatcher for touch events (6/7) Summary: This diff finally makes touch events to be emitted to js from the same object as scroll events. Thanks to changes in previous diffs this means the js will now process them in the right order. This diff on its own would cause us to send events to js on every frame while dragging a finger on the screen. This is something we tried to avoid with event coalescing in the past, and gets fixed in the following diff D2884595. public ___ **This diff is part of a larger stack. This is a high level overview about what’s going on here.** This stack of diffs fixes an issue where scroll events and touch events wouldn’t be processed in the correct order on js side. Current state of world: * touch events are send to js immediately when they happen (this makes js respond to touches as soon as possible) * scroll events are buffered, coalesced and send at the beginning of each js frame (coalescing helps in a case where js is busy processing for several native frames and would get all scroll event for that period afterwards) How did I change this? 1. I’ve made touch events go through the same class every other event (scroll events included) go, RCTEventDispatcher. This gives us a single place to handle all events. 2. I’ve changed RCTEventDispatcher to flush all buffered events every time it gets a touch event before dispatching the touch. This fixes the original ordering issue. 3. I’ve made “touchMove” behave the same way as scroll events do - they are buffered and coalesced. Since “touchMove” events are fired as often as scroll events (while you drag your finger around), doing only 2. would bring back the issue buffering was fixing. All of this together effectively still keeps the order of events right, avoids overloading js in the important case we care about. The only downside is an increased latency for “touchMove” events, since they are to longer send to js immediately. (Even better solution would be changing the native->js event passing system to be pull based instead of push based. That way js would always request touches that has happened since the last time it has asked, which would make it get them as soon as it’s possible to process them and native could do coalescing at that point. However this change has a much bigger scope, so I’m going with this stack of diffs for now.) Reviewed By: nicklockwood Differential Revision: D2884593 fb-gh-sync-id: 749a4dc6256f93fde0d8733687394b080ddd4484
2016-02-03 13:22:17 +00:00
__weak RCTEventDispatcher *_eventDispatcher;
/**
* Arrays managed in parallel tracking native touch object along with the
* native view that was touched, and the React touch data dictionary.
* These must be kept track of because `UIKit` destroys the touch targets
* if touches are canceled, and we have no other way to recover this info.
*/
NSMutableOrderedSet<UITouch *> *_nativeTouches;
NSMutableArray<NSMutableDictionary *> *_reactTouches;
NSMutableArray<UIView *> *_touchViews;
uint16_t _coalescingKey;
}
- (instancetype)initWithBridge:(RCTBridge *)bridge
{
RCTAssertParam(bridge);
if ((self = [super initWithTarget:nil action:NULL])) {
use RCTEventDispatcher for touch events (6/7) Summary: This diff finally makes touch events to be emitted to js from the same object as scroll events. Thanks to changes in previous diffs this means the js will now process them in the right order. This diff on its own would cause us to send events to js on every frame while dragging a finger on the screen. This is something we tried to avoid with event coalescing in the past, and gets fixed in the following diff D2884595. public ___ **This diff is part of a larger stack. This is a high level overview about what’s going on here.** This stack of diffs fixes an issue where scroll events and touch events wouldn’t be processed in the correct order on js side. Current state of world: * touch events are send to js immediately when they happen (this makes js respond to touches as soon as possible) * scroll events are buffered, coalesced and send at the beginning of each js frame (coalescing helps in a case where js is busy processing for several native frames and would get all scroll event for that period afterwards) How did I change this? 1. I’ve made touch events go through the same class every other event (scroll events included) go, RCTEventDispatcher. This gives us a single place to handle all events. 2. I’ve changed RCTEventDispatcher to flush all buffered events every time it gets a touch event before dispatching the touch. This fixes the original ordering issue. 3. I’ve made “touchMove” behave the same way as scroll events do - they are buffered and coalesced. Since “touchMove” events are fired as often as scroll events (while you drag your finger around), doing only 2. would bring back the issue buffering was fixing. All of this together effectively still keeps the order of events right, avoids overloading js in the important case we care about. The only downside is an increased latency for “touchMove” events, since they are to longer send to js immediately. (Even better solution would be changing the native->js event passing system to be pull based instead of push based. That way js would always request touches that has happened since the last time it has asked, which would make it get them as soon as it’s possible to process them and native could do coalescing at that point. However this change has a much bigger scope, so I’m going with this stack of diffs for now.) Reviewed By: nicklockwood Differential Revision: D2884593 fb-gh-sync-id: 749a4dc6256f93fde0d8733687394b080ddd4484
2016-02-03 13:22:17 +00:00
_eventDispatcher = [bridge moduleForClass:[RCTEventDispatcher class]];
_nativeTouches = [NSMutableOrderedSet new];
_reactTouches = [NSMutableArray new];
_touchViews = [NSMutableArray new];
// `cancelsTouchesInView` and `delaysTouches*` are needed in order to be used as a top level
// event delegated recognizer. Otherwise, lower-level components not built
// using RCT, will fail to recognize gestures.
self.cancelsTouchesInView = NO;
self.delaysTouchesBegan = NO; // This is default value.
self.delaysTouchesEnded = NO;
self.delegate = self;
}
return self;
}
RCT_NOT_IMPLEMENTED(- (instancetype)initWithTarget:(id)target action:(SEL)action)
- (void)attachToView:(UIView *)view
{
RCTAssert(self.view == nil, @"RCTTouchHandler already has attached view.");
[view addGestureRecognizer:self];
}
- (void)detachFromView:(UIView *)view
{
RCTAssertParam(view);
RCTAssert(self.view == view, @"RCTTouchHandler attached to another view.");
[view removeGestureRecognizer:self];
}
#pragma mark - Bookkeeping for touch indices
- (void)_recordNewTouches:(NSSet<UITouch *> *)touches
{
for (UITouch *touch in touches) {
RCTAssert(![_nativeTouches containsObject:touch],
@"Touch is already recorded. This is a critical bug.");
// Find closest React-managed touchable view
UIView *targetView = touch.view;
while (targetView) {
if (targetView.reactTag && targetView.userInteractionEnabled) {
break;
}
targetView = targetView.superview;
}
2015-03-25 00:37:03 +00:00
NSNumber *reactTag = [targetView reactTagAtPoint:[touch locationInView:targetView]];
if (!reactTag || !targetView.userInteractionEnabled) {
continue;
}
// Get new, unique touch identifier for the react touch
const NSUInteger RCTMaxTouches = 11; // This is the maximum supported by iDevices
NSInteger touchID = ([_reactTouches.lastObject[@"identifier"] integerValue] + 1) % RCTMaxTouches;
for (NSDictionary *reactTouch in _reactTouches) {
NSInteger usedID = [reactTouch[@"identifier"] integerValue];
if (usedID == touchID) {
// ID has already been used, try next value
touchID ++;
} else if (usedID > touchID) {
// If usedID > touchID, touchID must be unique, so we can stop looking
break;
}
}
// Create touch
NSMutableDictionary *reactTouch = [[NSMutableDictionary alloc] initWithCapacity:RCTMaxTouches];
2015-03-25 00:37:03 +00:00
reactTouch[@"target"] = reactTag;
reactTouch[@"identifier"] = @(touchID);
// Add to arrays
[_touchViews addObject:targetView];
[_nativeTouches addObject:touch];
[_reactTouches addObject:reactTouch];
}
}
- (void)_recordRemovedTouches:(NSSet<UITouch *> *)touches
{
for (UITouch *touch in touches) {
NSUInteger index = [_nativeTouches indexOfObject:touch];
if (index == NSNotFound) {
continue;
}
[_touchViews removeObjectAtIndex:index];
[_nativeTouches removeObjectAtIndex:index];
[_reactTouches removeObjectAtIndex:index];
}
}
- (void)_updateReactTouchAtIndex:(NSInteger)touchIndex
{
UITouch *nativeTouch = _nativeTouches[touchIndex];
CGPoint windowLocation = [nativeTouch locationInView:nativeTouch.window];
CGPoint rootViewLocation = [nativeTouch.window convertPoint:windowLocation toView:self.view];
UIView *touchView = _touchViews[touchIndex];
CGPoint touchViewLocation = [nativeTouch.window convertPoint:windowLocation toView:touchView];
NSMutableDictionary *reactTouch = _reactTouches[touchIndex];
reactTouch[@"pageX"] = @(RCTSanitizeNaNValue(rootViewLocation.x, @"touchEvent.pageX"));
reactTouch[@"pageY"] = @(RCTSanitizeNaNValue(rootViewLocation.y, @"touchEvent.pageY"));
reactTouch[@"locationX"] = @(RCTSanitizeNaNValue(touchViewLocation.x, @"touchEvent.locationX"));
reactTouch[@"locationY"] = @(RCTSanitizeNaNValue(touchViewLocation.y, @"touchEvent.locationY"));
reactTouch[@"timestamp"] = @(nativeTouch.timestamp * 1000); // in ms, for JS
// TODO: force for a 'normal' touch is usually 1.0;
// should we expose a `normalTouchForce` constant somewhere (which would
// have a value of `1.0 / nativeTouch.maximumPossibleForce`)?
if (RCTForceTouchAvailable()) {
reactTouch[@"force"] = @(RCTSanitizeNaNValue(nativeTouch.force / nativeTouch.maximumPossibleForce, @"touchEvent.force"));
}
}
/**
* Constructs information about touch events to send across the serialized
* boundary. This data should be compliant with W3C `Touch` objects. This data
* alone isn't sufficient to construct W3C `Event` objects. To construct that,
* there must be a simple receiver on the other side of the bridge that
* organizes the touch objects into `Event`s.
*
* We send the data as an array of `Touch`es, the type of action
* (start/end/move/cancel) and the indices that represent "changed" `Touch`es
* from that array.
*/
- (void)_updateAndDispatchTouches:(NSSet<UITouch *> *)touches
eventName:(NSString *)eventName
{
// Update touches
NSMutableArray<NSNumber *> *changedIndexes = [NSMutableArray new];
for (UITouch *touch in touches) {
NSInteger index = [_nativeTouches indexOfObject:touch];
if (index == NSNotFound) {
continue;
}
[self _updateReactTouchAtIndex:index];
[changedIndexes addObject:@(index)];
}
if (changedIndexes.count == 0) {
return;
}
// Deep copy the touches because they will be accessed from another thread
// TODO: would it be safer to do this in the bridge or executor, rather than trusting caller?
NSMutableArray<NSDictionary *> *reactTouches =
[[NSMutableArray alloc] initWithCapacity:_reactTouches.count];
for (NSDictionary *touch in _reactTouches) {
[reactTouches addObject:[touch copy]];
}
BOOL canBeCoalesced = [eventName isEqualToString:@"touchMove"];
// We increment `_coalescingKey` twice here just for sure that
// this `_coalescingKey` will not be reused by ahother (preceding or following) event
// (yes, even if coalescing only happens (and makes sense) on events of the same type).
if (!canBeCoalesced) {
_coalescingKey++;
}
use RCTEventDispatcher for touch events (6/7) Summary: This diff finally makes touch events to be emitted to js from the same object as scroll events. Thanks to changes in previous diffs this means the js will now process them in the right order. This diff on its own would cause us to send events to js on every frame while dragging a finger on the screen. This is something we tried to avoid with event coalescing in the past, and gets fixed in the following diff D2884595. public ___ **This diff is part of a larger stack. This is a high level overview about what’s going on here.** This stack of diffs fixes an issue where scroll events and touch events wouldn’t be processed in the correct order on js side. Current state of world: * touch events are send to js immediately when they happen (this makes js respond to touches as soon as possible) * scroll events are buffered, coalesced and send at the beginning of each js frame (coalescing helps in a case where js is busy processing for several native frames and would get all scroll event for that period afterwards) How did I change this? 1. I’ve made touch events go through the same class every other event (scroll events included) go, RCTEventDispatcher. This gives us a single place to handle all events. 2. I’ve changed RCTEventDispatcher to flush all buffered events every time it gets a touch event before dispatching the touch. This fixes the original ordering issue. 3. I’ve made “touchMove” behave the same way as scroll events do - they are buffered and coalesced. Since “touchMove” events are fired as often as scroll events (while you drag your finger around), doing only 2. would bring back the issue buffering was fixing. All of this together effectively still keeps the order of events right, avoids overloading js in the important case we care about. The only downside is an increased latency for “touchMove” events, since they are to longer send to js immediately. (Even better solution would be changing the native->js event passing system to be pull based instead of push based. That way js would always request touches that has happened since the last time it has asked, which would make it get them as soon as it’s possible to process them and native could do coalescing at that point. However this change has a much bigger scope, so I’m going with this stack of diffs for now.) Reviewed By: nicklockwood Differential Revision: D2884593 fb-gh-sync-id: 749a4dc6256f93fde0d8733687394b080ddd4484
2016-02-03 13:22:17 +00:00
RCTTouchEvent *event = [[RCTTouchEvent alloc] initWithEventName:eventName
reactTag:self.view.reactTag
use RCTEventDispatcher for touch events (6/7) Summary: This diff finally makes touch events to be emitted to js from the same object as scroll events. Thanks to changes in previous diffs this means the js will now process them in the right order. This diff on its own would cause us to send events to js on every frame while dragging a finger on the screen. This is something we tried to avoid with event coalescing in the past, and gets fixed in the following diff D2884595. public ___ **This diff is part of a larger stack. This is a high level overview about what’s going on here.** This stack of diffs fixes an issue where scroll events and touch events wouldn’t be processed in the correct order on js side. Current state of world: * touch events are send to js immediately when they happen (this makes js respond to touches as soon as possible) * scroll events are buffered, coalesced and send at the beginning of each js frame (coalescing helps in a case where js is busy processing for several native frames and would get all scroll event for that period afterwards) How did I change this? 1. I’ve made touch events go through the same class every other event (scroll events included) go, RCTEventDispatcher. This gives us a single place to handle all events. 2. I’ve changed RCTEventDispatcher to flush all buffered events every time it gets a touch event before dispatching the touch. This fixes the original ordering issue. 3. I’ve made “touchMove” behave the same way as scroll events do - they are buffered and coalesced. Since “touchMove” events are fired as often as scroll events (while you drag your finger around), doing only 2. would bring back the issue buffering was fixing. All of this together effectively still keeps the order of events right, avoids overloading js in the important case we care about. The only downside is an increased latency for “touchMove” events, since they are to longer send to js immediately. (Even better solution would be changing the native->js event passing system to be pull based instead of push based. That way js would always request touches that has happened since the last time it has asked, which would make it get them as soon as it’s possible to process them and native could do coalescing at that point. However this change has a much bigger scope, so I’m going with this stack of diffs for now.) Reviewed By: nicklockwood Differential Revision: D2884593 fb-gh-sync-id: 749a4dc6256f93fde0d8733687394b080ddd4484
2016-02-03 13:22:17 +00:00
reactTouches:reactTouches
changedIndexes:changedIndexes
coalescingKey:_coalescingKey];
if (!canBeCoalesced) {
_coalescingKey++;
}
use RCTEventDispatcher for touch events (6/7) Summary: This diff finally makes touch events to be emitted to js from the same object as scroll events. Thanks to changes in previous diffs this means the js will now process them in the right order. This diff on its own would cause us to send events to js on every frame while dragging a finger on the screen. This is something we tried to avoid with event coalescing in the past, and gets fixed in the following diff D2884595. public ___ **This diff is part of a larger stack. This is a high level overview about what’s going on here.** This stack of diffs fixes an issue where scroll events and touch events wouldn’t be processed in the correct order on js side. Current state of world: * touch events are send to js immediately when they happen (this makes js respond to touches as soon as possible) * scroll events are buffered, coalesced and send at the beginning of each js frame (coalescing helps in a case where js is busy processing for several native frames and would get all scroll event for that period afterwards) How did I change this? 1. I’ve made touch events go through the same class every other event (scroll events included) go, RCTEventDispatcher. This gives us a single place to handle all events. 2. I’ve changed RCTEventDispatcher to flush all buffered events every time it gets a touch event before dispatching the touch. This fixes the original ordering issue. 3. I’ve made “touchMove” behave the same way as scroll events do - they are buffered and coalesced. Since “touchMove” events are fired as often as scroll events (while you drag your finger around), doing only 2. would bring back the issue buffering was fixing. All of this together effectively still keeps the order of events right, avoids overloading js in the important case we care about. The only downside is an increased latency for “touchMove” events, since they are to longer send to js immediately. (Even better solution would be changing the native->js event passing system to be pull based instead of push based. That way js would always request touches that has happened since the last time it has asked, which would make it get them as soon as it’s possible to process them and native could do coalescing at that point. However this change has a much bigger scope, so I’m going with this stack of diffs for now.) Reviewed By: nicklockwood Differential Revision: D2884593 fb-gh-sync-id: 749a4dc6256f93fde0d8733687394b080ddd4484
2016-02-03 13:22:17 +00:00
[_eventDispatcher sendEvent:event];
}
#pragma mark - Gesture Recognizer Delegate Callbacks
static BOOL RCTAllTouchesAreCancelledOrEnded(NSSet<UITouch *> *touches)
{
for (UITouch *touch in touches) {
if (touch.phase == UITouchPhaseBegan ||
touch.phase == UITouchPhaseMoved ||
touch.phase == UITouchPhaseStationary) {
return NO;
}
}
return YES;
}
static BOOL RCTAnyTouchesChanged(NSSet<UITouch *> *touches)
{
for (UITouch *touch in touches) {
if (touch.phase == UITouchPhaseBegan ||
touch.phase == UITouchPhaseMoved) {
return YES;
}
}
return NO;
}
#pragma mark - `UIResponder`-ish touch-delivery methods
- (void)touchesBegan:(NSSet<UITouch *> *)touches withEvent:(UIEvent *)event
{
[super touchesBegan:touches withEvent:event];
// "start" has to record new touches *before* extracting the event.
// "end"/"cancel" needs to remove the touch *after* extracting the event.
[self _recordNewTouches:touches];
[self _updateAndDispatchTouches:touches eventName:@"touchStart"];
if (self.state == UIGestureRecognizerStatePossible) {
self.state = UIGestureRecognizerStateBegan;
} else if (self.state == UIGestureRecognizerStateBegan) {
self.state = UIGestureRecognizerStateChanged;
}
}
- (void)touchesMoved:(NSSet<UITouch *> *)touches withEvent:(UIEvent *)event
{
[super touchesMoved:touches withEvent:event];
[self _updateAndDispatchTouches:touches eventName:@"touchMove"];
self.state = UIGestureRecognizerStateChanged;
}
- (void)touchesEnded:(NSSet<UITouch *> *)touches withEvent:(UIEvent *)event
{
[super touchesEnded:touches withEvent:event];
[self _updateAndDispatchTouches:touches eventName:@"touchEnd"];
if (RCTAllTouchesAreCancelledOrEnded(event.allTouches)) {
self.state = UIGestureRecognizerStateEnded;
} else if (RCTAnyTouchesChanged(event.allTouches)) {
self.state = UIGestureRecognizerStateChanged;
}
[self _recordRemovedTouches:touches];
}
- (void)touchesCancelled:(NSSet<UITouch *> *)touches withEvent:(UIEvent *)event
{
[super touchesCancelled:touches withEvent:event];
[self _updateAndDispatchTouches:touches eventName:@"touchCancel"];
if (RCTAllTouchesAreCancelledOrEnded(event.allTouches)) {
self.state = UIGestureRecognizerStateCancelled;
} else if (RCTAnyTouchesChanged(event.allTouches)) {
self.state = UIGestureRecognizerStateChanged;
}
[self _recordRemovedTouches:touches];
}
- (BOOL)canPreventGestureRecognizer:(__unused UIGestureRecognizer *)preventedGestureRecognizer
{
return NO;
}
- (BOOL)canBePreventedByGestureRecognizer:(UIGestureRecognizer *)preventingGestureRecognizer
{
// We fail in favour of other external gesture recognizers.
// iOS will ask `delegate`'s opinion about this gesture recognizer little bit later.
return ![preventingGestureRecognizer.view isDescendantOfView:self.view];
}
- (void)reset
{
if (_nativeTouches.count != 0) {
[self _updateAndDispatchTouches:_nativeTouches.set eventName:@"touchCancel"];
[_nativeTouches removeAllObjects];
[_reactTouches removeAllObjects];
[_touchViews removeAllObjects];
}
}
#pragma mark - Other
- (void)cancel
{
self.enabled = NO;
self.enabled = YES;
}
#pragma mark - UIGestureRecognizerDelegate
- (BOOL)gestureRecognizer:(__unused UIGestureRecognizer *)gestureRecognizer shouldRequireFailureOfGestureRecognizer:(UIGestureRecognizer *)otherGestureRecognizer
{
// Same condition for `failure of` as for `be prevented by`.
return [self canBePreventedByGestureRecognizer:otherGestureRecognizer];
}
@end