{"name": "onenm_local_llm", "latest": {"version": "0.1.5", "pubspec": {"name": "onenm_local_llm", "description": "Flutter plugin for on-device LLM inference on Android using llama.cpp. Simplifies model management, loading, and multi-turn chat \u2014 no cloud, no API keys, fully offline.", "version": "0.1.5", "repository": "https://github.com/SxryxnshS5/onenm_local_llm/tree/main/flutter-llama.cpp", "issue_tracker": "https://github.com/SxryxnshS5/onenm_local_llm/issues", "topics": ["ai", "llm", "llama", "on-device", "inference"], "environment": {"sdk": "^3.5.3", "flutter": ">=3.3.0"}, "dependencies": {"flutter": {"sdk": "flutter"}, "plugin_platform_interface": "^2.0.2", "path_provider": "^2.1.2", "http": "^1.2.0"}, "dev_dependencies": {"flutter_test": {"sdk": "flutter"}, "flutter_lints": "^4.0.0"}, "flutter": {"plugin": {"platforms": {"android": {"package": "com.theorangeshade.onenm_local_llm", "pluginClass": "OnenmLocalLlmPlugin"}}}}}, "archive_url": "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/packages/onenm_local_llm/versions/0.1.5.tar.gz", "archive_sha256": "42c330eb83312b171b2d76a49b739accf713d3af2079959465d1ddb3adfe47fc", "published": "2026-03-19T19:02:48.448365Z"}, "versions": [{"version": "0.1.0", "pubspec": {"name": "onenm_local_llm", "description": "Flutter plugin for on-device LLM inference on Android using llama.cpp. Simplifies model management, loading, and multi-turn chat \u2014 no cloud, no API keys, fully offline.", "version": "0.1.0", "repository": "https://github.com/SxryxnshS5/onenm_local_llm", "issue_tracker": "https://github.com/SxryxnshS5/onenm_local_llm/issues", "topics": ["ai", "llm", "llama", "on-device", "inference"], "environment": {"sdk": "^3.5.3", "flutter": ">=3.3.0"}, "dependencies": {"flutter": {"sdk": "flutter"}, "plugin_platform_interface": "^2.0.2", "path_provider": "^2.1.2", "http": "^1.2.0"}, "dev_dependencies": {"flutter_test": {"sdk": "flutter"}, "flutter_lints": "^4.0.0"}, "flutter": {"plugin": {"platforms": {"android": {"package": "com.theorangeshade.onenm_local_llm", "pluginClass": "OnenmLocalLlmPlugin"}}}}}, "archive_url": "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/packages/onenm_local_llm/versions/0.1.0.tar.gz", "archive_sha256": "5282bec7b41d3742323dd39acfb6adea842616c5e46e759fdd2357d69ba2e082", "published": "2026-03-14T00:42:12.956785Z"}, {"version": "0.1.1", "pubspec": {"name": "onenm_local_llm", "description": "Flutter plugin for on-device LLM inference on Android using llama.cpp. Simplifies model management, loading, and multi-turn chat \u2014 no cloud, no API keys, fully offline.", "version": "0.1.1", "repository": "https://github.com/SxryxnshS5/onenm_local_llm", "issue_tracker": "https://github.com/SxryxnshS5/onenm_local_llm/issues", "topics": ["ai", "llm", "llama", "on-device", "inference"], "environment": {"sdk": "^3.5.3", "flutter": ">=3.3.0"}, "dependencies": {"flutter": {"sdk": "flutter"}, "plugin_platform_interface": "^2.0.2", "path_provider": "^2.1.2", "http": "^1.2.0"}, "dev_dependencies": {"flutter_test": {"sdk": "flutter"}, "flutter_lints": "^4.0.0"}, "flutter": {"plugin": {"platforms": {"android": {"package": "com.theorangeshade.onenm_local_llm", "pluginClass": "OnenmLocalLlmPlugin"}}}}}, "archive_url": "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/packages/onenm_local_llm/versions/0.1.1.tar.gz", "archive_sha256": "a9789f5858fb6bb4f55975442d00e5648580c676852da4fc7ba5ab798f4bba41", "published": "2026-03-15T00:50:57.581160Z"}, {"version": "0.1.2", "pubspec": {"name": "onenm_local_llm", "description": "Flutter plugin for on-device LLM inference on Android using llama.cpp. Simplifies model management, loading, and multi-turn chat \u2014 no cloud, no API keys, fully offline.", "version": "0.1.2", "repository": "https://github.com/SxryxnshS5/onenm_local_llm", "issue_tracker": "https://github.com/SxryxnshS5/onenm_local_llm/issues", "topics": ["ai", "llm", "llama", "on-device", "inference"], "environment": {"sdk": "^3.5.3", "flutter": ">=3.3.0"}, "dependencies": {"flutter": {"sdk": "flutter"}, "plugin_platform_interface": "^2.0.2", "path_provider": "^2.1.2", "http": "^1.2.0"}, "dev_dependencies": {"flutter_test": {"sdk": "flutter"}, "flutter_lints": "^4.0.0"}, "flutter": {"plugin": {"platforms": {"android": {"package": "com.theorangeshade.onenm_local_llm", "pluginClass": "OnenmLocalLlmPlugin"}}}}}, "archive_url": "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/packages/onenm_local_llm/versions/0.1.2.tar.gz", "archive_sha256": "1f7af4338dfe440fa6af9c792105a669468602bf247392372e01579d4d603830", "published": "2026-03-15T01:21:34.888606Z"}, {"version": "0.1.3", "pubspec": {"name": "onenm_local_llm", "description": "Flutter plugin for on-device LLM inference on Android using llama.cpp. Simplifies model management, loading, and multi-turn chat \u2014 no cloud, no API keys, fully offline.", "version": "0.1.3", "repository": "https://github.com/SxryxnshS5/onenm_local_llm", "issue_tracker": "https://github.com/SxryxnshS5/onenm_local_llm/issues", "topics": ["ai", "llm", "llama", "on-device", "inference"], "environment": {"sdk": "^3.5.3", "flutter": ">=3.3.0"}, "dependencies": {"flutter": {"sdk": "flutter"}, "plugin_platform_interface": "^2.0.2", "path_provider": "^2.1.2", "http": "^1.2.0"}, "dev_dependencies": {"flutter_test": {"sdk": "flutter"}, "flutter_lints": "^4.0.0"}, "flutter": {"plugin": {"platforms": {"android": {"package": "com.theorangeshade.onenm_local_llm", "pluginClass": "OnenmLocalLlmPlugin"}}}}}, "archive_url": "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/packages/onenm_local_llm/versions/0.1.3.tar.gz", "archive_sha256": "de2629c38d0d9e0a440a41956eca3d88b8d6d919ba1253e9f6fee8892a9a182a", "published": "2026-03-16T03:08:35.767069Z"}, {"version": "0.1.4", "pubspec": {"name": "onenm_local_llm", "description": "Flutter plugin for on-device LLM inference on Android using llama.cpp. Simplifies model management, loading, and multi-turn chat \u2014 no cloud, no API keys, fully offline.", "version": "0.1.4", "repository": "https://github.com/SxryxnshS5/onenm_local_llm/tree/main/flutter-llama.cpp", "issue_tracker": "https://github.com/SxryxnshS5/onenm_local_llm/issues", "topics": ["ai", "llm", "llama", "on-device", "inference"], "environment": {"sdk": "^3.5.3", "flutter": ">=3.3.0"}, "dependencies": {"flutter": {"sdk": "flutter"}, "plugin_platform_interface": "^2.0.2", "path_provider": "^2.1.2", "http": "^1.2.0"}, "dev_dependencies": {"flutter_test": {"sdk": "flutter"}, "flutter_lints": "^4.0.0"}, "flutter": {"plugin": {"platforms": {"android": {"package": "com.theorangeshade.onenm_local_llm", "pluginClass": "OnenmLocalLlmPlugin"}}}}}, "archive_url": "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/packages/onenm_local_llm/versions/0.1.4.tar.gz", "archive_sha256": "9940405bfcb06287bd84e51ec4381485de8da927889944460fa06bd1fbd32f5c", "published": "2026-03-17T02:27:38.847408Z"}, {"version": "0.1.5", "pubspec": {"name": "onenm_local_llm", "description": "Flutter plugin for on-device LLM inference on Android using llama.cpp. Simplifies model management, loading, and multi-turn chat \u2014 no cloud, no API keys, fully offline.", "version": "0.1.5", "repository": "https://github.com/SxryxnshS5/onenm_local_llm/tree/main/flutter-llama.cpp", "issue_tracker": "https://github.com/SxryxnshS5/onenm_local_llm/issues", "topics": ["ai", "llm", "llama", "on-device", "inference"], "environment": {"sdk": "^3.5.3", "flutter": ">=3.3.0"}, "dependencies": {"flutter": {"sdk": "flutter"}, "plugin_platform_interface": "^2.0.2", "path_provider": "^2.1.2", "http": "^1.2.0"}, "dev_dependencies": {"flutter_test": {"sdk": "flutter"}, "flutter_lints": "^4.0.0"}, "flutter": {"plugin": {"platforms": {"android": {"package": "com.theorangeshade.onenm_local_llm", "pluginClass": "OnenmLocalLlmPlugin"}}}}}, "archive_url": "https://mirrors.tuna.tsinghua.edu.cn/dart-pub/packages/onenm_local_llm/versions/0.1.5.tar.gz", "archive_sha256": "42c330eb83312b171b2d76a49b739accf713d3af2079959465d1ddb3adfe47fc", "published": "2026-03-19T19:02:48.448365Z"}]}