Reputation: 23
I'm using this touch script to show some effect when touching screen:
var ParticleA : GameObject;
//var bulletHole : GameObject;
function Update () {
var hit : RaycastHit;
// Use Screen.height because many functions (like this one) start in the bottom left of the screen, while MousePosition starts in the top left
var ray : Ray = Camera.main.ScreenPointToRay (Input.mousePosition);
if (Input.GetMouseButtonDown(0))
{
if (Physics.Raycast (ray, hit, 200))
{
var newparA = Instantiate(ParticleA, hit.point, Quaternion.identity);
//var hitRotation = Quaternion.FromToRotation(Vector3.up, hit.normal);
//Instantiate(bulletHole, hit.point, hitRotation);
Destroy(newparA, 12.0);
}
}
}
It doesn't work on Android. When touching, it just shows effect on center screen, not where my finger is actually touching.
What could the issue be?
Upvotes: 2
Views: 2557
Reputation: 409
This code is in C#. Put this Code inside your UPDATE() function. It will work in Unity Editor as well as on Android Platform.
RaycastHit hit;
Ray ray;
public Camera hudCamera;
if (Application.platform == RuntimePlatform.WindowsEditor)
{
ray = hudCamera.ScreenPointToRay(Input.mousePosition);
if (Input.GetMouseButtonDown(0))
{
if (Physics.Raycast(ray, out hit))
{
if (hit.collider == transform.collider)
{
// Here transform.collider is the collider of that gameobject on which you attach this script
// Your Rest of the Logic Here
}
}
}
}
// This Will work on Android Device ////////////////////////////////////////////////////////////////////////
else if(Application.platform == RuntimePlatform.Android)
{
if (Input.touchCount > 0)
{
for (int i = 0; i < Input.touchCount; i++)
{
ray = hudCamera.ScreenPointToRay(Input.GetTouch(i).position);
if (Input.GetTouch(i).phase == TouchPhase.Began)
{
if (Physics.Raycast(ray, out hit))
{
if (hit.collider == transform.collider)
{
// Here transform.collider is the collider of that gameobject on which you attach this script
// Your Rest of the Logic Here
}
}
}
if (Input.GetTouch(i).phase == TouchPhase.Moved)
{
// Logic for finger move on screen
}
if (Input.GetTouch(i).phase == TouchPhase.Ended)
{
if (Input.GetTouch(i).fingerId == fingerId)
{
fingerId = -1;
// Logic when touch ends
}
}
}
}
}
Upvotes: 1
Reputation: 11
You can try this:
var ParticleA : GameObject;
function Update () {
var hit : RaycastHit;
for (var touch : Touch in Input.touches) {
if (touch.phase == TouchPhase.Began) {
var ray = Camera.main.ScreenPointToRay (touch.position);
if (Physics.Raycast (ray, hit, 200))
{
var newparA = Instantiate(ParticleA, hit.point, Quaternion.identity);
Destroy(newparA, 12.0);
}
}
}
}
Upvotes: 0
Reputation: 1
Try this
var ParticleA : GameObject;
function Update ()
{
var hit : RaycastHit;
if (Input.GetMouseButtonDown(0))
{
if (Physics.Raycast (transform.position, Vector3.forward,out hit ))
{
var newparA =Instantiate(ParticleA, hit.point,Quaternion.identity );
Destroy(newparA, 12.0);
}
Upvotes: 0
Reputation: 3277
You don't want to use Input.GetMouseButtonDown(0)
(that's for mouse... as the name says).
Use Input.touches
: http://docs.unity3d.com/Manual/MobileInput.html
Upvotes: 3