Unity實現圓形Image組件
本文實例為大傢分享瞭Unity實現圓形Image組件的具體代碼,供大傢參考,具體內容如下
一、前言
遊戲裡很多圖片都是以圓形展示的,例如頭像、技能圖標等,一般做法是使用Image組件+Mask組件實現,但是Mask組件會影響效率(增加額外的drawcall)所以不建議大量使用
UGUI的Mask實現原理:利用GPU的模版緩沖
Mask組件會賦給父級和子級UI一個特殊的材質,這個材質會給Image的每個像素點進行標記並放在一個稱為Stencil Buffer的緩存內,父級每個像素點的標記設置為1,子級UI進行渲染的時候會去檢查這個Stencil Buffer內的標記是否為1,如果為1則進行渲染,否則不渲染
二、實現自己的圓形組件
像Image,RawImage這些組件都是繼承自自MsakGraphics類,MsakGraphics類繼承自Graphic類,Graphic類中有個OnPopulateMesh方法用於繪制圖形,UGUI的Image組件實現原理是重寫瞭OnPopulateMesh方法並繪制瞭一個矩形,所以按照這個思路我們可以重寫OnPopulateMesh方法直接繪制一個圓形
——獲取圖片的長寬、uv等信息
——OnPopulateMesh:當UI元素生成頂點數據時會調用OnPopulateMesh(VertexHelper vh)函數,我們隻需要將原先的矩形頂點數據清除,改寫入圓形頂點數據,這樣渲染出來的自然是圓形圖片
——不規則UI元素的響應區域判定
UI組件的響應區域判定是通過實現ICanvasRaycastFilter接口中的IsRaycastLocationValid函數,它的返回值是一個bool值,返回true則視為可以響應,例如Image組件,它判定瞭兩個條件:當前屏幕坐標是否在當前圖片矩形區域內和當前屏幕坐標的圖片區域透明度是否大於alphaHitTestMinimumThreshold參數
我們想實現精確的點擊判斷,可以代碼動態將alphaHitTestMinimumThreshold參數設置為0.1,這樣就實現瞭隻有在透明度大於0.1的像素點才視為響應,但它要求圖片的Read/Write Enabled必須開啟,這就導致瞭圖片占用瞭兩份內存,所以不建議使用
對於像素級的點擊判定,有一種算法可以實現:Ray-Crossing算法
此算法適用於所有圖形,實現思路是從指定點向任意方向發出一條水平射線,與圖形相交,如果交點是奇數個,則點在圖形內,如果交點是偶數個,則點在圖形外
using UnityEngine; using UnityEngine.Sprites; using UnityEngine.UI; using System.Collections.Generic; /// <summary> /// 圓形Image組件 /// </summary> [AddComponentMenu("LFramework/UI/CircleImage", 11)] public class CircleImage : MaskableGraphic, ICanvasRaycastFilter { /// <summary> /// 渲染類型 /// </summary> public enum RenderType { Simple, Filled, } /// <summary> /// 填充類型 /// </summary> public enum FilledType { Radial360, } /// <summary> /// 繪制起始點(填充類型-360度) /// </summary> public enum Origin360 { Right, Top, Left, Bottom, } //Sprite圖片 [SerializeField] Sprite m_Sprite; public Sprite Sprite { get { return m_Sprite; } } //貼圖 public override Texture mainTexture { get { if (m_Sprite == null) { if (material != null && material.mainTexture != null) { return material.mainTexture; } return s_WhiteTexture; } return m_Sprite.texture; } } //渲染類型 [SerializeField] RenderType m_RenderType; //填充類型 [SerializeField] FilledType m_FilledType; //繪制起始點(填充類型-360度) [SerializeField] Origin360 m_Origin360; //是否為順時針繪制 [SerializeField] bool m_Clockwise; //填充度 [SerializeField] [Range(0, 1)] float m_FillAmount; //多少個三角面組成 [SerializeField] int segements = 100; List<Vector3> vertexCache = new List<Vector3>(); protected override void OnPopulateMesh(VertexHelper vh) { vh.Clear(); vertexCache.Clear(); switch (m_RenderType) { case RenderType.Simple: GenerateSimpleSprite(vh); break; case RenderType.Filled: GenerateFilledSprite(vh); break; } } void GenerateSimpleSprite(VertexHelper vh) { Vector4 uv = m_Sprite == null ? Vector4.zero : DataUtility.GetOuterUV(m_Sprite); float uvWidth = uv.z - uv.x; float uvHeight = uv.w - uv.y; float width = rectTransform.rect.width; float height = rectTransform.rect.height; float dia = width > height ? width : height; float r = dia * 0.5f; Vector2 uvCenter = new Vector2((uv.x + uv.z) * 0.5f, (uv.y + uv.w) * 0.5f); Vector3 posCenter = new Vector2((0.5f - rectTransform.pivot.x) * width, (0.5f - rectTransform.pivot.y) * height); float uvScaleX = uvWidth / width; float uvScaleY = uvHeight / height; float deltaRad = 2 * Mathf.PI / segements; float curRad = 0; int vertexCount = segements + 1; vh.AddVert(posCenter, color, uvCenter); for (int i = 0; i < vertexCount - 1; i++) { UIVertex vertex = new UIVertex(); Vector3 posOffset = new Vector3(r * Mathf.Cos(curRad), r * Mathf.Sin(curRad)); vertex.position = posCenter + posOffset; vertex.color = color; vertex.uv0 = new Vector2(uvCenter.x + posOffset.x * uvScaleX, uvCenter.y + posOffset.y * uvScaleY); vh.AddVert(vertex); vertexCache.Add(vertex.position); curRad += deltaRad; } for (int i = 0; i < vertexCount - 2; i++) { vh.AddTriangle(0, i + 1, i + 2); } vh.AddTriangle(0, segements, 1); } void GenerateFilledSprite(VertexHelper vh) { Vector4 uv = m_Sprite == null ? Vector4.zero : DataUtility.GetOuterUV(m_Sprite); float uvWidth = uv.z - uv.x; float uvHeight = uv.w - uv.y; float width = rectTransform.rect.width; float height = rectTransform.rect.height; float dia = width > height ? width : height; float r = dia * 0.5f; Vector2 uvCenter = new Vector2((uv.x + uv.z) * 0.5f, (uv.y + uv.w) * 0.5f); Vector3 posCenter = new Vector2((0.5f - rectTransform.pivot.x) * width, (0.5f - rectTransform.pivot.y) * height); float uvScaleX = uvWidth / width; float uvScaleY = uvHeight / height; float deltaRad = 2 * Mathf.PI / segements; switch (m_FilledType) { case FilledType.Radial360: float quarterRad = 2 * Mathf.PI * 0.25f; float curRad = quarterRad * (int)m_Origin360; int vertexCount = m_FillAmount == 1 ? segements + 1 : Mathf.RoundToInt(segements * m_FillAmount) + 2; vh.AddVert(posCenter, color, uvCenter); for (int i = 0; i < vertexCount - 1; i++) { UIVertex vertex = new UIVertex(); Vector3 posOffset = new Vector3(r * Mathf.Cos(curRad), r * Mathf.Sin(curRad)); vertex.position = posCenter + posOffset; vertex.color = color; vertex.uv0 = new Vector2(uvCenter.x + posOffset.x * uvScaleX, uvCenter.y + posOffset.y * uvScaleY); vh.AddVert(vertex); vertexCache.Add(vertex.position); curRad += m_Clockwise ? -deltaRad : deltaRad; } for (int i = 0; i < vertexCount - 2; i++) { vh.AddTriangle(0, i + 1, i + 2); } if (m_FillAmount == 1) { vh.AddTriangle(0, segements, 1); } break; } } public bool IsRaycastLocationValid(Vector2 sp, Camera eventCamera) { Vector2 localPos; int crossPointCount; RectTransformUtility.ScreenPointToLocalPointInRectangle(rectTransform, sp, eventCamera, out localPos); RayCrossing(localPos, out crossPointCount); return crossPointCount % 2 != 0; } public void RayCrossing(Vector2 localPos, out int crossPointCount) { crossPointCount = 0; for (int i = 0; i < vertexCache.Count; i++) { Vector3 p1 = vertexCache[i]; Vector3 p2 = vertexCache[(i + 1) % vertexCache.Count]; if (p1.y == p2.y) continue; if (localPos.y <= Mathf.Min(p1.y, p2.y)) continue; if (localPos.y >= Mathf.Max(p1.y, p2.y)) continue; float crossX = (localPos.y - p1.y) * (p2.x - p1.x) / (p2.y - p1.y) + p1.x; if (crossX >= localPos.x) { crossPointCount++; } } } }
以上就是本文的全部內容,希望對大傢的學習有所幫助,也希望大傢多多支持WalkonNet。