2D unity touch control

I have a script called PlayerCharacter to control a player on a Unity 2D platform. It is perfect, it works as usual.

 using UnityEngine; using System.Collections; using System.Collections.Generic; [RequireComponent(typeof (Rigidbody2D))] [RequireComponent(typeof(BoxCollider2D))] public class PlayerCharacter : MonoBehaviour { public float speed = 1.0f; public string axisName = "Horizontal"; private Animator anim; public string jumpButton = "Fire1"; public float jumpPower = 10.0f; public float minJumpDelay = 0.5f; public Transform[] groundChecks; private float jumpTime = 0.0f; private Transform currentPlatform = null; private Vector3 lastPlatformPosition = Vector3.zero; private Vector3 currentPlatformDelta = Vector3.zero; // Use this for initialization void Start () { anim = gameObject.GetComponent<Animator>(); } // Update is called once per frame void Update () { //Left and right movement anim.SetFloat("Speed", Mathf.Abs(Input.GetAxis(axisName))); if(Input.GetAxis(axisName) < 0) { Vector3 newScale = transform.localScale; newScale.x = -1.0f; transform.localScale = newScale; Debug.Log("Move to left"); } else if(Input.GetAxis(axisName) > 0) { Vector3 newScale = transform.localScale; newScale.x = 1.0f; transform.localScale = newScale; Debug.Log ("Move to Right"); } transform.position += transform.right*Input.GetAxis(axisName)*speed*Time.deltaTime; //Jump logic bool grounded = false; foreach(Transform groundCheck in groundChecks) { grounded |= Physics2D.Linecast(transform.position, groundCheck.position, 1 << LayerMask.NameToLayer("Ground")); } anim.SetBool("Grounded", grounded); if(jumpTime > 0) { jumpTime -= Time.deltaTime; } if(Input.GetButton("jumpButton") && anim.GetBool("Grounded") ) { anim.SetBool("Jump",true); rigidbody2D.AddForce(transform.up*jumpPower); jumpTime = minJumpDelay; } if(anim.GetBool("Grounded") && jumpTime <= 0) { anim.SetBool("Jump",false); } //Moving platform logic //Check what platform we are on List<Transform> platforms = new List<Transform>(); bool onSamePlatform = false; foreach(Transform groundCheck in groundChecks) { RaycastHit2D hit = Physics2D.Linecast(transform.position, groundCheck.position, 1 << LayerMask.NameToLayer("Ground")); if(hit.transform != null) { platforms.Add(hit.transform); if(currentPlatform == hit.transform) { onSamePlatform = true; } } } if(!onSamePlatform) { foreach(Transform platform in platforms) { currentPlatform = platform; lastPlatformPosition = currentPlatform.position; } } } void LateUpdate() { if(currentPlatform != null) { //Determine how far platform has moved currentPlatformDelta = currentPlatform.position - lastPlatformPosition; lastPlatformPosition = currentPlatform.position; } if(currentPlatform != null) { //Move with the platform transform.position += currentPlatformDelta; } } } 

The problem occurs when I try to change the script with a touch controller. I searched Google many times and modified the script as I could, and yet it does not give me any result (by the way, I'm new to Unity). Then I found a tutorial from the site about creating a touch controller with GUI graphics ( TouchControls ). I think the study guide is easy to learn. Here is the script

 using UnityEngine; using System.Collections; [RequireComponent(typeof (Rigidbody2D))] [RequireComponent(typeof(BoxCollider2D))] public class TouchControls : MonoBehaviour { // GUI textures public GUITexture guiLeft; public GUITexture guiRight; public GUITexture guiJump; private Animator anim; // Movement variables public float moveSpeed = 5f; public float jumpForce = 50f; public float maxJumpVelocity = 2f; // Movement flags private bool moveLeft, moveRight, doJump = false; void Start () { anim = gameObject.GetComponent<Animator>(); } // Update is called once per frame void Update () { // Check to see if the screen is being touched if (Input.touchCount > 0) { // Get the touch info Touch t = Input.GetTouch(0); // Did the touch action just begin? if (t.phase == TouchPhase.Began) { // Are we touching the left arrow? if (guiLeft.HitTest(t.position, Camera.main)) { Debug.Log("Touching Left Control"); moveLeft = true; } // Are we touching the right arrow? if (guiRight.HitTest(t.position, Camera.main)) { Debug.Log("Touching Right Control"); moveRight = true; } // Are we touching the jump button? if (guiJump.HitTest(t.position, Camera.main)) { Debug.Log("Touching Jump Control"); doJump = true; } } // Did the touch end? if (t.phase == TouchPhase.Ended) { // Stop all movement doJump = moveLeft = moveRight = false; } } // Is the left mouse button down? if (Input.GetMouseButtonDown(0)) { // Are we clicking the left arrow? if (guiLeft.HitTest(Input.mousePosition, Camera.main)) { Debug.Log("Touching Left Control"); moveLeft = true; } // Are we clicking the right arrow? if (guiRight.HitTest(Input.mousePosition, Camera.main)) { Debug.Log("Touching Right Control"); moveRight = true; } // Are we clicking the jump button? if (guiJump.HitTest(Input.mousePosition, Camera.main)) { Debug.Log("Touching Jump Control"); doJump = true; } } if (Input.GetMouseButtonUp(0)) { // Stop all movement on left mouse button up doJump = moveLeft = moveRight = false; } } void FixedUpdate() { //anim.SetFloat("Speed", Mathf.Abs); // Set velocity based on our movement flags. if (moveLeft) { rigidbody2D.velocity = -Vector2.right * moveSpeed; } if (moveRight) { rigidbody2D.velocity = Vector2.right * moveSpeed; } if (doJump) { // If we have not reached the maximum jump velocity, keep applying force. if (rigidbody2D.velocity.y < maxJumpVelocity) { rigidbody2D.AddForce(Vector2.up * jumpForce); } else { // Otherwise stop jumping doJump = false; } } } } 

But I don’t know how to implement the script from the tutorial ( TouchControls ) and assign it to the player control of the script ( PlayerCharacter ). How can I combine both scenarios so that the player can control it using the touch control?

+6
source share
1 answer

The best you can do is not to drag the touch controls from the touchcontrols tutorial to the playercontroller, but vice versa, use the touchcontrols script tutorial as your template.

Since your playercontroller uses floats on its input, such as moveleft = 50.0f; and touchcontrols uses moveleft = true;

The scenarios are very different from each other, just to integrate and work.

therefore, due to the fact that in touchcontrols you leave the update function as is, and only update the fixedupate function using your control logic, since update void is a state controller on the right, left, up and down, so to speak. and it will also handle the actual touch input.

a fixed update can then control some of the things that a player controller has, for example, use force when touching a tagged object or similar material. and the update has only an input condition, it would be good advice to wrap the update touch code in its own function, so the update not only concerned, but also other code related to the logic.

+1
source

All Articles