diff --git a/.github/workflows/linux-test.yml b/.github/workflows/linux-test.yml index a08a54a21..cbe4bdb24 100644 --- a/.github/workflows/linux-test.yml +++ b/.github/workflows/linux-test.yml @@ -19,8 +19,6 @@ jobs: with: ref: ${{ inputs.ref }} - - # Cache built libraries - name: Concat native library source files run: | @@ -34,6 +32,8 @@ jobs: Packages/com.github.homuler.mediapipe/Runtime/Plugins/libmediapipe_c.so Packages/com.github.homuler.mediapipe/Runtime/Plugins/Protobuf/*.dll Packages/com.github.homuler.mediapipe/Runtime/Scripts/Protobuf/**/*.cs + Packages/com.github.homuler.mediapipe/PackageResources/MediaPipe/*.bytes + Packages/com.github.homuler.mediapipe/PackageResources/MediaPipe/*.txt key: libs-ubuntu-20.04-v1-${{ hashFiles('cache_key.txt') }} # Setup build tools diff --git a/.github/workflows/macos-test.yml b/.github/workflows/macos-test.yml index 48430fa05..4a5d7078a 100644 --- a/.github/workflows/macos-test.yml +++ b/.github/workflows/macos-test.yml @@ -32,6 +32,8 @@ jobs: Packages/com.github.homuler.mediapipe/Runtime/Plugins/libmediapipe_c.dylib Packages/com.github.homuler.mediapipe/Runtime/Plugins/Protobuf/*.dll Packages/com.github.homuler.mediapipe/Runtime/Scripts/Protobuf/**/*.cs + Packages/com.github.homuler.mediapipe/PackageResources/MediaPipe/*.bytes + Packages/com.github.homuler.mediapipe/PackageResources/MediaPipe/*.txt key: libs-macos-11-v1-${{ hashFiles('cache_key.txt') }} # Setup build tools diff --git a/.github/workflows/windows-test.yml b/.github/workflows/windows-test.yml index 6e4e0e84e..ac2377beb 100644 --- a/.github/workflows/windows-test.yml +++ b/.github/workflows/windows-test.yml @@ -33,6 +33,8 @@ jobs: Packages/com.github.homuler.mediapipe/Runtime/Plugins/mediapipe_c.dll Packages/com.github.homuler.mediapipe/Runtime/Plugins/Protobuf/*.dll Packages/com.github.homuler.mediapipe/Runtime/Scripts/Protobuf/**/*.cs + Packages/com.github.homuler.mediapipe/PackageResources/MediaPipe/*.bytes + Packages/com.github.homuler.mediapipe/PackageResources/MediaPipe/*.txt key: libs-windows-2019-v1-${{ hashFiles('cache_key.txt') }} - name: Remove cache_key.txt diff --git a/Assets/MediaPipeUnity/Samples/Scenes/Tasks/Face Detection/FaceDetectorRunner.cs b/Assets/MediaPipeUnity/Samples/Scenes/Tasks/Face Detection/FaceDetectorRunner.cs index fcc3810dc..960ca6c12 100644 --- a/Assets/MediaPipeUnity/Samples/Scenes/Tasks/Face Detection/FaceDetectorRunner.cs +++ b/Assets/MediaPipeUnity/Samples/Scenes/Tasks/Face Detection/FaceDetectorRunner.cs @@ -101,7 +101,7 @@ protected override IEnumerator Run() else { // clear the annotation - _detectionResultAnnotationController.DrawNow(FaceDetectionResult.Empty); + _detectionResultAnnotationController.DrawNow(default); } break; case Tasks.Vision.Core.RunningMode.VIDEO: @@ -112,7 +112,7 @@ protected override IEnumerator Run() else { // clear the annotation - _detectionResultAnnotationController.DrawNow(FaceDetectionResult.Empty); + _detectionResultAnnotationController.DrawNow(default); } break; case Tasks.Vision.Core.RunningMode.LIVE_STREAM: diff --git a/Packages/com.github.homuler.mediapipe/Runtime/Scripts/Tasks/Components/Containers/DetectionResult.cs b/Packages/com.github.homuler.mediapipe/Runtime/Scripts/Tasks/Components/Containers/DetectionResult.cs index c261b0e3e..e07686cb1 100644 --- a/Packages/com.github.homuler.mediapipe/Runtime/Scripts/Tasks/Components/Containers/DetectionResult.cs +++ b/Packages/com.github.homuler.mediapipe/Runtime/Scripts/Tasks/Components/Containers/DetectionResult.cs @@ -117,18 +117,18 @@ internal DetectionResult(List detections) public void Clear() => detections.Clear(); - public static readonly DetectionResult Empty = new DetectionResult(new List()); + public static DetectionResult Empty => Alloc(0); public static DetectionResult Alloc(int capacity) => new DetectionResult(new List(capacity)); - public static DetectionResult CreateFrom(List detectionsProto) + internal static DetectionResult CreateFrom(List detectionsProto) { var result = Alloc(detectionsProto.Count); Copy(detectionsProto, ref result); return result; } - public static void Copy(List source, ref DetectionResult destination) + internal static void Copy(List source, ref DetectionResult destination) { var detections = destination.detections; if (source.Count < detections.Count) diff --git a/Packages/com.github.homuler.mediapipe/Runtime/Scripts/Tasks/Core/BaseOptions.cs b/Packages/com.github.homuler.mediapipe/Runtime/Scripts/Tasks/Core/BaseOptions.cs index 5fa880640..341b3b742 100644 --- a/Packages/com.github.homuler.mediapipe/Runtime/Scripts/Tasks/Core/BaseOptions.cs +++ b/Packages/com.github.homuler.mediapipe/Runtime/Scripts/Tasks/Core/BaseOptions.cs @@ -18,7 +18,7 @@ public enum Delegate public string modelAssetPath { get; } = string.Empty; public byte[] modelAssetBuffer { get; } = null; - public BaseOptions(Delegate delegateCase = Delegate.CPU, string modelAssetPath = "", byte[] modelAssetBuffer = null) + public BaseOptions(Delegate delegateCase = Delegate.CPU, string modelAssetPath = null, byte[] modelAssetBuffer = null) { this.delegateCase = delegateCase; this.modelAssetPath = modelAssetPath; diff --git a/Packages/com.github.homuler.mediapipe/Runtime/Scripts/Tasks/Vision/FaceDetector/FaceDetector.cs b/Packages/com.github.homuler.mediapipe/Runtime/Scripts/Tasks/Vision/FaceDetector/FaceDetector.cs index 91bbe899a..8dcc2bd68 100644 --- a/Packages/com.github.homuler.mediapipe/Runtime/Scripts/Tasks/Vision/FaceDetector/FaceDetector.cs +++ b/Packages/com.github.homuler.mediapipe/Runtime/Scripts/Tasks/Vision/FaceDetector/FaceDetector.cs @@ -32,6 +32,8 @@ public sealed class FaceDetector : Core.BaseVisionTaskApi private readonly NormalizedRect _normalizedRect = new NormalizedRect(); private readonly List _detectionsForRead; + private readonly FaceDetectionResult _emptyResult = FaceDetectionResult.Alloc(0); + private FaceDetector( CalculatorGraphConfig graphConfig, Core.RunningMode runningMode, @@ -99,12 +101,13 @@ public static FaceDetector CreateFromOptions(FaceDetectorOptions options) /// frame of reference coordinates system, i.e. in `[0,image_width) x [0, /// image_height)`, which are the dimensions of the underlying image data. /// - public FaceDetectionResult Detect(Image image, Core.ImageProcessingOptions? imageProcessingOptions) + public FaceDetectionResult Detect(Image image, Core.ImageProcessingOptions? imageProcessingOptions = null) { using var outDetectionsPacket = DetectInternal(image, imageProcessingOptions); if (outDetectionsPacket.IsEmpty()) { - return FaceDetectionResult.Empty; + _emptyResult.Clear(); + return _emptyResult; } outDetectionsPacket.GetDetectionList(_detectionsForRead); return FaceDetectionResult.CreateFrom(_detectionsForRead); @@ -167,12 +170,13 @@ private Packet DetectInternal(Image image, Core.ImageProcessingOptions? imagePro /// frame of reference coordinates system, i.e. in `[0,image_width) x [0, /// image_height)`, which are the dimensions of the underlying image data. /// - public FaceDetectionResult DetectForVideo(Image image, int timestampMs, Core.ImageProcessingOptions? imageProcessingOptions) + public FaceDetectionResult DetectForVideo(Image image, int timestampMs, Core.ImageProcessingOptions? imageProcessingOptions = null) { using var outDetectionsPacket = DetectVideoInternal(image, timestampMs, imageProcessingOptions); if (outDetectionsPacket.IsEmpty()) { - return FaceDetectionResult.Empty; + _emptyResult.Clear(); + return _emptyResult; } outDetectionsPacket.GetDetectionList(_detectionsForRead); return FaceDetectionResult.CreateFrom(_detectionsForRead); @@ -258,6 +262,7 @@ private static Tasks.Core.TaskRunner.PacketsCallback BuildPacketsCallback(FaceDe } var result = FaceDetectionResult.Alloc(options.numFaces); + var emptyResult = FaceDetectionResult.Alloc(0); return (PacketMap outputPackets) => { @@ -277,8 +282,9 @@ private static Tasks.Core.TaskRunner.PacketsCallback BuildPacketsCallback(FaceDe if (outDetectionsPacket.IsEmpty()) { + emptyResult.Clear(); resultCallback( - FaceDetectionResult.Empty, + emptyResult, image, (int)timestamp); return; diff --git a/Packages/com.github.homuler.mediapipe/Tests/EditMode/Tasks/Vision.meta b/Packages/com.github.homuler.mediapipe/Tests/EditMode/Tasks/Vision.meta new file mode 100644 index 000000000..4be2b7cfe --- /dev/null +++ b/Packages/com.github.homuler.mediapipe/Tests/EditMode/Tasks/Vision.meta @@ -0,0 +1,8 @@ +fileFormatVersion: 2 +guid: f4d3355dee426a010afa95f0ab58ea16 +folderAsset: yes +DefaultImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Packages/com.github.homuler.mediapipe/Tests/EditMode/Tasks/Vision/FaceDetector.meta b/Packages/com.github.homuler.mediapipe/Tests/EditMode/Tasks/Vision/FaceDetector.meta new file mode 100644 index 000000000..b016260d4 --- /dev/null +++ b/Packages/com.github.homuler.mediapipe/Tests/EditMode/Tasks/Vision/FaceDetector.meta @@ -0,0 +1,8 @@ +fileFormatVersion: 2 +guid: 5857b5d2495abfc048d2f1ec862e98bd +folderAsset: yes +DefaultImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Packages/com.github.homuler.mediapipe/Tests/EditMode/Tasks/Vision/FaceDetector/FaceDetectorTest.cs b/Packages/com.github.homuler.mediapipe/Tests/EditMode/Tasks/Vision/FaceDetector/FaceDetectorTest.cs new file mode 100644 index 000000000..657b8f3fa --- /dev/null +++ b/Packages/com.github.homuler.mediapipe/Tests/EditMode/Tasks/Vision/FaceDetector/FaceDetectorTest.cs @@ -0,0 +1,354 @@ +using System; +using System.Collections; +using NUnit.Framework; +using Mediapipe.Unity; +using Mediapipe.Tasks.Core; +using Mediapipe.Tasks.Vision.Core; +using Mediapipe.Tasks.Vision.FaceDetector; +using Unity.Collections; +using UnityEditor; +using UnityEngine; +using UnityEngine.TestTools; +using System.Text.RegularExpressions; +using Mediapipe.Tasks.Components.Containers; +using UnityEngine.Experimental.Rendering; + +using Stopwatch = System.Diagnostics.Stopwatch; + +namespace Mediapipe.Tests.Tasks.Vision +{ + public class FaceDetectorTest + { + private const string _ResourcePath = "Packages/com.github.homuler.mediapipe/PackageResources/MediaPipe"; + private const string _TestResourcePath = "Packages/com.github.homuler.mediapipe/Tests/Resources"; + + private const int _CallbackTimeoutMillisec = 1000; + + private static readonly ResourceManager _ResourceManager = new LocalResourceManager(); + private readonly Lazy _faceDetectorModel = + new Lazy(() => AssetDatabase.LoadAssetAtPath($"{_ResourcePath}/blaze_face_short_range.bytes")); + + private readonly Lazy _facePicture = + new Lazy(() => AssetDatabase.LoadAssetAtPath($"{_TestResourcePath}/lenna.png")); + + #region Create + [Test] + public void Create_ShouldThrowBadStatusException_When_AssetModelIsNotSpecified() + { + var options = new FaceDetectorOptions(new BaseOptions(BaseOptions.Delegate.CPU)); + + _ = Assert.Throws(() => + { + using var _ = FaceDetector.CreateFromOptions(options); + }); + } + + [Test] + public void Create_ShouldReturnFaceDetector_When_AssetModelBufferIsValid() + { + var options = new FaceDetectorOptions(new BaseOptions(BaseOptions.Delegate.CPU, modelAssetBuffer: _faceDetectorModel.Value.bytes)); + + Assert.DoesNotThrow(() => + { + using (var faceDetector = FaceDetector.CreateFromOptions(options)) + { + faceDetector.Close(); + } + }); + } + + [Test] + public void Create_ShouldThrowBadStatusException_When_AssetModelPathDoesNotExist() + { + var options = new FaceDetectorOptions(new BaseOptions(BaseOptions.Delegate.CPU, modelAssetPath: "unknown_path.bytes")); + + LogAssert.Expect(LogType.Exception, new Regex("FileNotFoundException")); + + _ = Assert.Throws(() => + { + using var _ = FaceDetector.CreateFromOptions(options); + }); + } + + [UnityTest] + public IEnumerator Create_returns_FaceLandmarker_when_assetModelPath_is_valid() + { + yield return _ResourceManager.PrepareAssetAsync("blaze_face_short_range.bytes"); + + var options = new FaceDetectorOptions(new BaseOptions(BaseOptions.Delegate.CPU, modelAssetPath: "blaze_face_short_range.bytes")); + + Assert.DoesNotThrow(() => + { + using (var faceLandmarker = FaceDetector.CreateFromOptions(options)) + { + faceLandmarker.Close(); + } + }); + } + #endregion + + #region Detect + [Test] + public void Detect_ShouldReturnAnEmptyResult_When_ImageIsEmpty() + { + var options = new FaceDetectorOptions(new BaseOptions(BaseOptions.Delegate.CPU, modelAssetBuffer: _faceDetectorModel.Value.bytes), runningMode: RunningMode.IMAGE); + + using (var faceDetector = FaceDetector.CreateFromOptions(options)) + { + var width = 32; + var height = 32; + var pixelData = BuildSolidColorData(width, height, UnityEngine.Color.gray); + using (var image = new Image(ImageFormat.Types.Format.Srgba, width, height, width * 4, pixelData)) + { + var result = faceDetector.Detect(image, null); + Assert.AreEqual(0, result.detections.Count); + } + } + } + + [Test] + public void Detect_ShouldReturnFaceDetectionResult_When_FacesAreDetected() + { + var options = new FaceDetectorOptions(new BaseOptions(BaseOptions.Delegate.CPU, modelAssetBuffer: _faceDetectorModel.Value.bytes), runningMode: RunningMode.IMAGE); + + using (var faceDetector = FaceDetector.CreateFromOptions(options)) + { + using (var image = CopyAsImage(_facePicture.Value)) + { + var result = faceDetector.Detect(image, null); + Assert.AreEqual(1, result.detections.Count); + } + } + } + #endregion + + #region TryDetect + [Test] + public void TryeDetect_ShouldReturnFalse_When_ImageIsEmpty() + { + var options = new FaceDetectorOptions(new BaseOptions(BaseOptions.Delegate.CPU, modelAssetBuffer: _faceDetectorModel.Value.bytes), runningMode: RunningMode.IMAGE); + + using (var faceDetector = FaceDetector.CreateFromOptions(options)) + { + var width = 32; + var height = 32; + var pixelData = BuildSolidColorData(width, height, UnityEngine.Color.gray); + using (var image = new Image(ImageFormat.Types.Format.Srgba, width, height, width * 4, pixelData)) + { + var result = DetectionResult.Alloc(0); + var found = faceDetector.TryDetect(image, null, ref result); + Assert.IsFalse(found); + } + } + } + + [Test] + public void TryeDetect_ShouldReturnTrue_When_FacesAreDetected() + { + var options = new FaceDetectorOptions(new BaseOptions(BaseOptions.Delegate.CPU, modelAssetBuffer: _faceDetectorModel.Value.bytes), runningMode: RunningMode.IMAGE); + + using (var faceDetector = FaceDetector.CreateFromOptions(options)) + { + using (var image = CopyAsImage(_facePicture.Value)) + { + var result = DetectionResult.Alloc(0); + var found = faceDetector.TryDetect(image, null, ref result); + Assert.IsTrue(found); + Assert.AreEqual(1, result.detections.Count); + } + } + } + #endregion + + #region DetectForVideo + [Test] + public void DetectForVideo_ShouldReturnAnEmptyResult_When_ImageIsEmpty() + { + var options = new FaceDetectorOptions(new BaseOptions(BaseOptions.Delegate.CPU, modelAssetBuffer: _faceDetectorModel.Value.bytes), runningMode: RunningMode.VIDEO); + + using (var faceDetector = FaceDetector.CreateFromOptions(options)) + { + var width = 32; + var height = 32; + var pixelData = BuildSolidColorData(width, height, UnityEngine.Color.gray); + using (var image = new Image(ImageFormat.Types.Format.Srgba, width, height, width * 4, pixelData)) + { + var result = faceDetector.DetectForVideo(image, 1, null); + Assert.AreEqual(0, result.detections.Count); + } + } + } + + [Test] + public void DetectForVideo_ShouldReturnFaceDetectionResult_When_FacesAreDetected() + { + var options = new FaceDetectorOptions(new BaseOptions(BaseOptions.Delegate.CPU, modelAssetBuffer: _faceDetectorModel.Value.bytes), runningMode: RunningMode.VIDEO); + + using (var faceDetector = FaceDetector.CreateFromOptions(options)) + { + using (var image = CopyAsImage(_facePicture.Value)) + { + var result = faceDetector.DetectForVideo(image, 1, null); + Assert.AreEqual(1, result.detections.Count); + } + } + } + #endregion + + #region TryDetectForVideo + [Test] + public void TryDetectForVideo_ShouldReturnFalse_When_ImageIsEmpty() + { + var options = new FaceDetectorOptions(new BaseOptions(BaseOptions.Delegate.CPU, modelAssetBuffer: _faceDetectorModel.Value.bytes), runningMode: RunningMode.VIDEO); + + using (var faceDetector = FaceDetector.CreateFromOptions(options)) + { + var width = 32; + var height = 32; + var pixelData = BuildSolidColorData(width, height, UnityEngine.Color.gray); + using (var image = new Image(ImageFormat.Types.Format.Srgba, width, height, width * 4, pixelData)) + { + var result = DetectionResult.Alloc(0); + var found = faceDetector.TryDetectForVideo(image, 1, null, ref result); + Assert.IsFalse(found); + } + } + } + + [Test] + public void TryDetectForVideo_ShouldReturnTrue_When_FacesAreDetected() + { + var options = new FaceDetectorOptions(new BaseOptions(BaseOptions.Delegate.CPU, modelAssetBuffer: _faceDetectorModel.Value.bytes), runningMode: RunningMode.VIDEO); + + using (var faceDetector = FaceDetector.CreateFromOptions(options)) + { + using (var image = CopyAsImage(_facePicture.Value)) + { + var result = DetectionResult.Alloc(0); + var found = faceDetector.TryDetectForVideo(image, 1, null, ref result); + Assert.IsTrue(found); + Assert.AreEqual(1, result.detections.Count); + } + } + } + #endregion + + #region DetectAsync + [UnityTest] + public IEnumerator DetectAsync_ShouldInvokeTheCallbackWithAnEmptyResult_When_ImageIsEmpty() + { + var isCallbackInvoked = false; + var result = DetectionResult.Alloc(0); + void callback(DetectionResult detectionResult, Image image, int timestamp) + { + isCallbackInvoked = true; + result = detectionResult; + }; + var options = new FaceDetectorOptions(new BaseOptions(BaseOptions.Delegate.CPU, + modelAssetBuffer: _faceDetectorModel.Value.bytes), + runningMode: RunningMode.LIVE_STREAM, + resultCallback: callback); + + using (var faceDetector = FaceDetector.CreateFromOptions(options)) + { + var width = 32; + var height = 32; + var pixelData = BuildSolidColorData(width, height, UnityEngine.Color.gray); + using (var image = new Image(ImageFormat.Types.Format.Srgba, width, height, width * 4, pixelData)) + { + faceDetector.DetectAsync(image, 1, null); + } + + var stopwatch = new Stopwatch(); + stopwatch.Start(); + yield return new WaitUntil(() => + { + return isCallbackInvoked || stopwatch.ElapsedMilliseconds > _CallbackTimeoutMillisec; + }); + + Assert.IsTrue(isCallbackInvoked); + Assert.AreEqual(0, result.detections.Count); + } + } + + + [UnityTest] + public IEnumerator DetectAsync_invokes_the_callback_If_faces_are_detected() + { + var isCallbackInvoked = false; + var result = DetectionResult.Alloc(0); + void callback(DetectionResult detectionResult, Image image, int timestamp) + { + isCallbackInvoked = true; + result = detectionResult; + }; + var options = new FaceDetectorOptions(new BaseOptions(BaseOptions.Delegate.CPU, + modelAssetBuffer: _faceDetectorModel.Value.bytes), + runningMode: RunningMode.LIVE_STREAM, + resultCallback: callback); + + using (var faceDetector = FaceDetector.CreateFromOptions(options)) + { + using (var image = CopyAsImage(_facePicture.Value)) + { + faceDetector.DetectAsync(image, 1, null); + } + + var stopwatch = new Stopwatch(); + stopwatch.Start(); + yield return new WaitUntil(() => + { + return isCallbackInvoked || stopwatch.ElapsedMilliseconds > _CallbackTimeoutMillisec; + }); + + Assert.IsTrue(isCallbackInvoked); + Assert.AreEqual(1, result.detections.Count); + } + } + #endregion + + private NativeArray BuildSolidColorData(int width, int height, Color32 color) + { + var srcBytes = new byte[width * height * 4]; + for (var i = 0; i < srcBytes.Length; i += 4) + { + srcBytes[i] = color.r; + srcBytes[i + 1] = color.g; + srcBytes[i + 2] = color.b; + srcBytes[i + 3] = color.a; + } + return BuildPixelData(srcBytes); + } + + private NativeArray BuildPixelData(byte[] bytes) + { + var pixelData = new NativeArray(bytes.Length, Allocator.Temp, NativeArrayOptions.UninitializedMemory); + pixelData.CopyFrom(bytes); + + return pixelData; + } + + private Image CopyAsImage(Texture2D src) + { + var srcData = src.GetPixels32(); + var dst = new Texture2D(src.width, src.height, TextureFormat.RGBA32, false); + + var dstData = dst.GetPixels32(); + var w = src.width; + var h = src.height; + + for (var x = 0; x < w; x++) + { + for (var y = 0; y < h; y++) + { + dstData[x + (y * w)] = srcData[x + ((h - y - 1) * w)]; + } + } + + dst.SetPixels32(dstData); + dst.Apply(); + + return new Image(ImageFormat.Types.Format.Srgba, dst); + } + } +} diff --git a/Packages/com.github.homuler.mediapipe/Tests/EditMode/Tasks/Vision/FaceDetector/FaceDetectorTest.cs.meta b/Packages/com.github.homuler.mediapipe/Tests/EditMode/Tasks/Vision/FaceDetector/FaceDetectorTest.cs.meta new file mode 100644 index 000000000..ee40d4db5 --- /dev/null +++ b/Packages/com.github.homuler.mediapipe/Tests/EditMode/Tasks/Vision/FaceDetector/FaceDetectorTest.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 1c3588d21e48e8cf9b3eccbca82c2cf5 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Packages/com.github.homuler.mediapipe/Tests/Resources.meta b/Packages/com.github.homuler.mediapipe/Tests/Resources.meta new file mode 100644 index 000000000..f7ebd0135 --- /dev/null +++ b/Packages/com.github.homuler.mediapipe/Tests/Resources.meta @@ -0,0 +1,8 @@ +fileFormatVersion: 2 +guid: 1f9195e71581d4688b1cba0be2cf595f +folderAsset: yes +DefaultImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Packages/com.github.homuler.mediapipe/Tests/Resources/lenna.png b/Packages/com.github.homuler.mediapipe/Tests/Resources/lenna.png new file mode 100644 index 000000000..59ef68aab Binary files /dev/null and b/Packages/com.github.homuler.mediapipe/Tests/Resources/lenna.png differ diff --git a/Packages/com.github.homuler.mediapipe/Tests/Resources/lenna.png.meta b/Packages/com.github.homuler.mediapipe/Tests/Resources/lenna.png.meta new file mode 100644 index 000000000..4ad8a9a62 --- /dev/null +++ b/Packages/com.github.homuler.mediapipe/Tests/Resources/lenna.png.meta @@ -0,0 +1,153 @@ +fileFormatVersion: 2 +guid: a13c2be5cd64d3ef9b66384493ed2a2d +TextureImporter: + internalIDToNameTable: [] + externalObjects: {} + serializedVersion: 12 + mipmaps: + mipMapMode: 0 + enableMipMap: 1 + sRGBTexture: 1 + linearTexture: 0 + fadeOut: 0 + borderMipMap: 0 + mipMapsPreserveCoverage: 0 + alphaTestReferenceValue: 0.5 + mipMapFadeDistanceStart: 1 + mipMapFadeDistanceEnd: 3 + bumpmap: + convertToNormalMap: 0 + externalNormalMap: 0 + heightScale: 0.25 + normalMapFilter: 0 + flipGreenChannel: 0 + isReadable: 1 + streamingMipmaps: 0 + streamingMipmapsPriority: 0 + vTOnly: 0 + ignoreMipmapLimit: 0 + grayScaleToAlpha: 0 + generateCubemap: 6 + cubemapConvolution: 0 + seamlessCubemap: 0 + textureFormat: 1 + maxTextureSize: 2048 + textureSettings: + serializedVersion: 2 + filterMode: 1 + aniso: 1 + mipBias: 0 + wrapU: 0 + wrapV: 0 + wrapW: 0 + nPOTScale: 1 + lightmap: 0 + compressionQuality: 50 + spriteMode: 0 + spriteExtrude: 1 + spriteMeshType: 1 + alignment: 0 + spritePivot: {x: 0.5, y: 0.5} + spritePixelsToUnits: 100 + spriteBorder: {x: 0, y: 0, z: 0, w: 0} + spriteGenerateFallbackPhysicsShape: 1 + alphaUsage: 1 + alphaIsTransparency: 0 + spriteTessellationDetail: -1 + textureType: 0 + textureShape: 1 + singleChannelComponent: 0 + flipbookRows: 1 + flipbookColumns: 1 + maxTextureSizeSet: 0 + compressionQualitySet: 0 + textureFormatSet: 0 + ignorePngGamma: 0 + applyGammaDecoding: 0 + swizzle: 50462976 + cookieLightType: 0 + platformSettings: + - serializedVersion: 3 + buildTarget: DefaultTexturePlatform + maxTextureSize: 2048 + resizeAlgorithm: 0 + textureFormat: -1 + textureCompression: 1 + compressionQuality: 50 + crunchedCompression: 0 + allowsAlphaSplitting: 0 + overridden: 0 + ignorePlatformSupport: 0 + androidETC2FallbackOverride: 0 + forceMaximumCompressionQuality_BC6H_BC7: 0 + - serializedVersion: 3 + buildTarget: WebGL + maxTextureSize: 2048 + resizeAlgorithm: 0 + textureFormat: -1 + textureCompression: 1 + compressionQuality: 50 + crunchedCompression: 0 + allowsAlphaSplitting: 0 + overridden: 0 + ignorePlatformSupport: 0 + androidETC2FallbackOverride: 0 + forceMaximumCompressionQuality_BC6H_BC7: 0 + - serializedVersion: 3 + buildTarget: Standalone + maxTextureSize: 2048 + resizeAlgorithm: 0 + textureFormat: -1 + textureCompression: 1 + compressionQuality: 50 + crunchedCompression: 0 + allowsAlphaSplitting: 0 + overridden: 0 + ignorePlatformSupport: 0 + androidETC2FallbackOverride: 0 + forceMaximumCompressionQuality_BC6H_BC7: 0 + - serializedVersion: 3 + buildTarget: Android + maxTextureSize: 2048 + resizeAlgorithm: 0 + textureFormat: -1 + textureCompression: 1 + compressionQuality: 50 + crunchedCompression: 0 + allowsAlphaSplitting: 0 + overridden: 0 + ignorePlatformSupport: 0 + androidETC2FallbackOverride: 0 + forceMaximumCompressionQuality_BC6H_BC7: 0 + - serializedVersion: 3 + buildTarget: Server + maxTextureSize: 2048 + resizeAlgorithm: 0 + textureFormat: -1 + textureCompression: 1 + compressionQuality: 50 + crunchedCompression: 0 + allowsAlphaSplitting: 0 + overridden: 0 + ignorePlatformSupport: 0 + androidETC2FallbackOverride: 0 + forceMaximumCompressionQuality_BC6H_BC7: 0 + spriteSheet: + serializedVersion: 2 + sprites: [] + outline: [] + physicsShape: [] + bones: [] + spriteID: + internalID: 0 + vertices: [] + indices: + edges: [] + weights: [] + secondaryTextures: [] + nameFileIdTable: {} + mipmapLimitGroupName: + pSDRemoveMatte: 0 + userData: + assetBundleName: + assetBundleVariant: