diff --git a/Benchmark/Benchmark.csproj b/Benchmark/Benchmark.csproj
new file mode 100644
index 0000000..1881749
--- /dev/null
+++ b/Benchmark/Benchmark.csproj
@@ -0,0 +1,18 @@
+
+
+
+ net8.0
+ Exe
+ false
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/Benchmark/Inference.cs b/Benchmark/Inference.cs
new file mode 100644
index 0000000..aa27c30
--- /dev/null
+++ b/Benchmark/Inference.cs
@@ -0,0 +1,38 @@
+using BenchmarkDotNet.Attributes;
+
+using KokoroSharp;
+using KokoroSharp.Core;
+
+using System;
+using System.Collections.Generic;
+
+namespace Benchmark;
+
+[InProcess]
+public class Inference {
+ const string text = "This is a performance benchmark of Kokoro.";
+ static readonly Dictionary<(KModel, bool UseCuda), KokoroModel> models = [];
+ static int[] tokens;
+ static KokoroVoice voice;
+
+ [GlobalSetup]
+ public void Setup() {
+ tokens = KokoroSharp.Processing.Tokenizer.Tokenize(text);
+ voice = KokoroVoiceManager.GetVoice("af_heart");
+ foreach (var model in Enum.GetValues()) {
+ if (!KokoroTTS.IsDownloaded(model))
+ KokoroTTS.LoadModel(model).Dispose(); // downloads the model if not already present.
+ var options = new Microsoft.ML.OnnxRuntime.SessionOptions();
+ models[(model, false)] = new KokoroModel(KokoroTTS.ModelNamesMap[model], options);
+ var options2 = new Microsoft.ML.OnnxRuntime.SessionOptions();
+ options2.AppendExecutionProvider_CUDA(); // Use CUDA for GPU inference.
+ models[(model, true)] = new KokoroModel(KokoroTTS.ModelNamesMap[model], options2);
+ }
+ }
+
+ [ParamsAllValues]
+ public KModel Model { get; set; }
+
+ [Benchmark] public float[] CPU() => models[(Model, false)].Infer(tokens, voice!.Features);
+ [Benchmark] public float[] CUDA() => models[(Model, true)].Infer(tokens, voice!.Features);
+}
diff --git a/Benchmark/Program.cs b/Benchmark/Program.cs
new file mode 100644
index 0000000..af5ba4f
--- /dev/null
+++ b/Benchmark/Program.cs
@@ -0,0 +1,3 @@
+using BenchmarkDotNet.Running;
+
+BenchmarkSwitcher.FromAssembly(typeof(Program).Assembly).Run(args);
\ No newline at end of file
diff --git a/Benchmark/Tokenizer.cs b/Benchmark/Tokenizer.cs
new file mode 100644
index 0000000..157c75a
--- /dev/null
+++ b/Benchmark/Tokenizer.cs
@@ -0,0 +1,16 @@
+using BenchmarkDotNet.Attributes;
+
+using System.IO;
+using System.Runtime.CompilerServices;
+
+namespace Benchmark;
+
+[SimpleJob]
+public class Tokenizer {
+ static readonly string text = File.ReadAllText(Path.Join(GetMyPath(), "../../README.md"));
+ static string GetMyPath([CallerFilePath] string filePath = "") => filePath;
+
+ [Benchmark] public string PreprocessText() => KokoroSharp.Processing.Tokenizer.PreprocessText(text);
+ [Benchmark] public string Phonemize() => KokoroSharp.Processing.Tokenizer.Phonemize(text, "en-us");
+ [Benchmark] public int[] Tokenize() => KokoroSharp.Processing.Tokenizer.Tokenize(text);
+}
diff --git a/KokoroSharp.sln b/KokoroSharp.sln
index 40b7b2e..c344f34 100644
--- a/KokoroSharp.sln
+++ b/KokoroSharp.sln
@@ -7,6 +7,8 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "KokoroSharp", "KokoroSharp\
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "KokoroSharp.Tests", "KokoroSharp.Tests\KokoroSharp.Tests.csproj", "{B0AB7C0B-D1C1-447C-9EB5-5CC9CB9E8943}"
EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Benchmark", "Benchmark\Benchmark.csproj", "{FE74270D-B73E-4F9B-9467-2C225FD95FA1}"
+EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
@@ -21,6 +23,10 @@ Global
{B0AB7C0B-D1C1-447C-9EB5-5CC9CB9E8943}.Debug|Any CPU.Build.0 = Debug|Any CPU
{B0AB7C0B-D1C1-447C-9EB5-5CC9CB9E8943}.Release|Any CPU.ActiveCfg = Release|Any CPU
{B0AB7C0B-D1C1-447C-9EB5-5CC9CB9E8943}.Release|Any CPU.Build.0 = Release|Any CPU
+ {FE74270D-B73E-4F9B-9467-2C225FD95FA1}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {FE74270D-B73E-4F9B-9467-2C225FD95FA1}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {FE74270D-B73E-4F9B-9467-2C225FD95FA1}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {FE74270D-B73E-4F9B-9467-2C225FD95FA1}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
diff --git a/KokoroSharp/HighLevel/KokoroLoader.cs b/KokoroSharp/HighLevel/KokoroLoader.cs
index f96f118..9c06fdc 100644
--- a/KokoroSharp/HighLevel/KokoroLoader.cs
+++ b/KokoroSharp/HighLevel/KokoroLoader.cs
@@ -8,7 +8,7 @@
public enum KModel { float32, float16, int8 }
public partial class KokoroTTS {
- static IReadOnlyDictionary ModelNamesMap { get; } = new Dictionary() {
+ internal static IReadOnlyDictionary ModelNamesMap { get; } = new Dictionary() {
{ float32, "kokoro.onnx" },
{ float16, "kokoro-quant.onnx" },
{ int8, "kokoro-quant-convinteger.onnx" },
diff --git a/KokoroSharp/KokoroSharp.csproj b/KokoroSharp/KokoroSharp.csproj
index f984a26..3cfff31 100644
--- a/KokoroSharp/KokoroSharp.csproj
+++ b/KokoroSharp/KokoroSharp.csproj
@@ -34,7 +34,7 @@
-
+