开发者

Drag, scale and rotate multiple UIImageviews

I've got it so far that I can move, scale and rotate all the objects, but the multiple objects all move together. I want them to move seperate. I guess I have to change the objectatI开发者_开发知识库ndex to 1, but it just crashes.

I've used the following code:

- (void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event {

    NSArray *allTouches = [touches allObjects];

    UITouch* t;
    if([[event allTouches] count]==1){
        if (CGRectContainsPoint([Birdie frame], [[allTouches objectAtIndex:0] locationInView:theimageView]) && CGRectContainsPoint([imageViewauto frame], [[allTouches objectAtIndex:0] locationInView:theimageView])) {
            t=[[[event allTouches] allObjects] objectAtIndex:0];
            touch1=[t locationInView:nil];
        }
    }else{
        t=[[[event allTouches] allObjects] objectAtIndex:0];
        touch1=[t locationInView:nil];
        t=[[[event allTouches] allObjects] objectAtIndex:1];
        touch2=[t locationInView:nil];
    }
}

-(double)distance:(CGPoint)point1 toPoint:(CGPoint)point2
{
    double deltaX, deltaY;
    deltaX = point1.x - point2.x;
    deltaY = point1.y - point2.y;
    return sqrt(deltaX * deltaX + deltaY * deltaY);
}

- (void)touchesMoved:(NSSet *)touches withEvent:(UIEvent *)event {

    CGPoint currentTouch1;
    CGPoint currentTouch2;
    NSArray *allTouches = [touches allObjects];
    UITouch* t;
    float scale,rotation;

    if([[event allTouches] count]==1){
        t=[[[event allTouches] allObjects] objectAtIndex:0];
        if (CGRectContainsPoint([Birdie frame], [[allTouches objectAtIndex:0] locationInView:theimageView]) && CGRectContainsPoint([imageViewauto frame], [[allTouches objectAtIndex:0] locationInView:theimageView]))
        { 
            touch2=[t locationInView:nil];
            Birdie.center=CGPointMake(Birdie.center.x+touch2.x-touch1.x,Birdie.center.y+touch2.y-touch1.y);
            imageViewauto.center=CGPointMake(imageViewauto.center.x+touch2.x-touch1.x,imageViewauto.center.y+touch2.y-touch1.y);
            touch1=touch2;
        }
    }
    else if([[event allTouches] count]==2)
    {
        t=[[[event allTouches] allObjects] objectAtIndex:0];
        currentTouch1=[t locationInView:nil];

        t=[[[event allTouches] allObjects] objectAtIndex:1];
        currentTouch2=[t locationInView:nil];

        double distance1 =  [self distance:currentTouch1 toPoint:currentTouch2];
        double distance2 = [self distance:touch1 toPoint:touch2];

        if (distance2 == 0)
        {
            //handle the case where distance is zero
        }
        else {
            scale =distance1 / distance2;}

        rotation=atan2(currentTouch2.y-currentTouch1.y, currentTouch2.x-currentTouch1.x)-atan2(touch2.y-touch1.y,touch2.x-touch1.x);
        if(isnan(scale)){
            scale=1.0f;
        }
        NSLog(@"rotation %f",rotation);

        NSLog(@"scale %f",scale);

        if (CGRectContainsPoint([Birdie frame], [[allTouches objectAtIndex:0] locationInView:theimageView]) && CGRectContainsPoint([imageViewauto frame], [[allTouches objectAtIndex:0] locationInView:theimageView]))
        {

            Birdie.transform=CGAffineTransformScale(Birdie.transform, scale,scale);
            Birdie.transform=CGAffineTransformRotate(Birdie.transform, rotation);

            imageViewauto.transform=CGAffineTransformScale(imageViewauto.transform, scale,scale);
            imageViewauto.transform=CGAffineTransformRotate(imageViewauto.transform, rotation);
        }
        else // In case of scaling or rotating the background imageView
        {
            imageView.transform=CGAffineTransformScale(imageView.transform, scale,scale);
            imageView.transform=CGAffineTransformRotate(imageView.transform, rotation);
        }

        touch1=currentTouch1;
        touch2=currentTouch2;
    }
}


First of all, an easy way to handle drag, scale and rotate is to use GestureRecognizers like the bellow one:

    UIPinchGestureRecognizer *pinchRecognizer = [[UIPinchGestureRecognizer alloc] initWithTarget:self action:@selector(scale:)];
[pinchRecognizer setDelegate:self];
[self.view addGestureRecognizer:pinchRecognizer];

UIRotationGestureRecognizer *rotationRecognizer = [[UIRotationGestureRecognizer alloc] initWithTarget:self action:@selector(rotate:)];
[rotationRecognizer setDelegate:self];
[self.view addGestureRecognizer:rotationRecognizer];

UIPanGestureRecognizer *panRecognizer = [[UIPanGestureRecognizer alloc] initWithTarget:self action:@selector(move:)];
[panRecognizer setMinimumNumberOfTouches:1];
[panRecognizer setMaximumNumberOfTouches:1];
[panRecognizer setDelegate:self];
[self.view addGestureRecognizer:panRecognizer];

UITapGestureRecognizer *tapRecognizer = [[UITapGestureRecognizer alloc] initWithTarget:self action:@selector(tapped:)];
[tapRecognizer setNumberOfTapsRequired:1];
[tapRecognizer setDelegate:self];
[self.view addGestureRecognizer:tapRecognizer];

To handle multiple UIImageViews you may use the position of touched points, you can use the bellow functions to do so:

- (CGPoint)locationInView:(UIView*)view;   
- (CGPoint)locationOfTouch:(NSUInteger)touchIndex inView:(UIView*)view;

According to the number of touched you can use either the first or second function, and then see if the touched point is inside the frame of considered UIImageView, for example for scaling you can do as what is show in bellow:

-(void)scale:(id)sender {

UIView * pinchView = [(UIPinchGestureRecognizer*)sender view];

CGPoint first_point = [sender locationOfTouch:0 inView:pinchView];
CGPoint second_point = [sender locationOfTouch:1 inView:pinchView];


if (CGRectContainsPoint(my_image_view.frame, first_point) && CGRectContainsPoint(my_image_view.frame, second_point)) {
    [self.view bringSubviewToFront:pinchView];

    if([(UIPinchGestureRecognizer*)sender state] == UIGestureRecognizerStateEnded) {

        lastScale = 1.0;
        return;
    }

    CGFloat scale = 1.0 - (lastScale - [(UIPinchGestureRecognizer*)sender scale]);

    CGAffineTransform currentTransform = my_image_view.transform;
    CGAffineTransform newTransform = CGAffineTransformScale(currentTransform, scale, scale);
    [my_image_view setTransform:newTransform];

    lastScale = [(UIPinchGestureRecognizer*)sender scale];}}


its working

-(void)scale:(id)sender {

    if([(UIPinchGestureRecognizer*)sender state] == UIGestureRecognizerStateBegan) {
        _lastScale = 1.0;
    }

    CGFloat scale = 1.0 - (_lastScale - [(UIPinchGestureRecognizer*)sender scale]);

    CGAffineTransform currentTransform = self.transform;
    CGAffineTransform newTransform = CGAffineTransformScale(currentTransform, scale, scale);

    [self setTransform:newTransform];


    _lastScale = [(UIPinchGestureRecognizer*)sender scale];
    [self showOverlayWithFrame:self.frame];
}


-(void)rotate:(id)sender {

    if([(UIRotationGestureRecognizer*)sender state] == UIGestureRecognizerStateEnded) {

        _lastRotation = 0.0;

        return;
    }
    CGFloat rotation = 0.0 - (_lastRotation - [(UIRotationGestureRecognizer*)sender rotation]);

    CGAffineTransform currentTransform = self.transform;
    CGAffineTransform newTransform = CGAffineTransformRotate(currentTransform,rotation);

    [self setTransform:newTransform];
    _lastRotation = [(UIRotationGestureRecognizer*)sender rotation];

    [self showOverlayWithFrame:self.frame];
}


-(void)move:(id)sender {

    CGPoint translatedPoint = [(UIPanGestureRecognizer*)sender translationInView:self];

     if([(UIPanGestureRecognizer*)sender state] == UIGestureRecognizerStateBegan) {
         _firstX = [self center].x;
         _firstY = [self center].y;
    }

    translatedPoint = CGPointMake(_firstX+translatedPoint.x, _firstY+translatedPoint.y);

    [self setCenter:translatedPoint];


    [self showOverlayWithFrame:self.frame];

    NSLog(@"after move x= %f     y=%f "  , _firstX,_firstY);
}
0

上一篇:

下一篇:

精彩评论

暂无评论...
验证码 换一张
取 消

最新问答

问答排行榜