diff --git a/.github/workflows/linux-build.yaml b/.github/workflows/linux-build.yaml index 7597f5c8..99d08449 100644 --- a/.github/workflows/linux-build.yaml +++ b/.github/workflows/linux-build.yaml @@ -49,7 +49,7 @@ jobs: - uses: subosito/flutter-action@v2 with: channel: 'stable' - flutter-version: '3.24.0' + flutter-version: '3.24.5' - name: Install dependencies run: sudo apt-get install -y clang cmake ninja-build pkg-config libgtk-3-dev liblzma-dev libmpv-dev - name: Install project dependencies diff --git a/.github/workflows/macos-build.yaml b/.github/workflows/macos-build.yaml index 1331f3b4..a2e966bd 100644 --- a/.github/workflows/macos-build.yaml +++ b/.github/workflows/macos-build.yaml @@ -21,7 +21,7 @@ jobs: - uses: subosito/flutter-action@v2 with: channel: 'stable' - flutter-version: '3.24.0' + flutter-version: '3.24.5' - name: Install project dependencies run: flutter pub get - name: Generate intermediates diff --git a/.github/workflows/windows-build.yaml b/.github/workflows/windows-build.yaml index e0beccd2..440b880c 100644 --- a/.github/workflows/windows-build.yaml +++ b/.github/workflows/windows-build.yaml @@ -73,11 +73,12 @@ jobs: # Step 9: Install vcpkg and ffmpeg - name: Install vcpkg and ffmpeg + shell: powershell run: | - git clone https://github.com/microsoft/vcpkg.git C:\vcpkg + if (!(Test-Path "C:\vcpkg")) { git clone https://github.com/microsoft/vcpkg.git C:\vcpkg } C:\vcpkg\bootstrap-vcpkg.bat - C:\vcpkg\vcpkg install ffmpeg - shell: cmd + cd openvino_bindings/third_party + C:\vcpkg\vcpkg install # Step 10: Download and Install OpenVINO Runtime - name: Download and Install OpenVINO Runtime 24.5.0 @@ -126,7 +127,7 @@ jobs: - uses: subosito/flutter-action@v2 with: channel: 'stable' - flutter-version: '3.24.0' + flutter-version: '3.24.5' - name: Install project dependencies run: flutter pub get - name: Generate intermediates diff --git a/README.md b/README.md index 56d085b5..84a26b94 100644 --- a/README.md +++ b/README.md @@ -1,31 +1,100 @@ +
+ # OpenVINO™ Test Drive [![codecov](https://codecov.io/gh/openvinotoolkit/openvino_testdrive/graph/badge.svg?token=DH98FAPH65)](https://codecov.io/gh/openvinotoolkit/openvino_testdrive) +[![openvino](https://img.shields.io/badge/openvino-2024.4-blue)]() + +
+ +Get started with OpenVINO™ Test Drive, an application that allows you to run generative AI and vision models trained by [Intel® Geti™](https://docs.geti.intel.com/) directly on your computer or edge device using [OpenVINO™ Runtime](https://github.com/openvinotoolkit/openvino). + + +

+ sample +

+ +With use of OpenVINO™ Test Drive you can: ++ **Chat with LLMs** and evaluating model performance on your computer or edge device ++ **Experiment with different text prompts** to generate images using Stable Diffusion and Stable DiffusionXL models (coming soon) ++ **Transcribe speech from video** using Whisper models, including generation of timestamps (coming soon) ++ **Run and visualize results of models** trained by Intel® Geti™ using single image inference or batch inference mode + +## Installation + +Download the latest release from the [Releases repository](https://storage.openvinotoolkit.org/repositories/openvino_testdrive/packages/). + +> [!NOTE] +> To verify downloaded file integrity, you can generate a SHA-256 of the downloaded file and compare it to the SHA-256 from corresponding `.sha256` file published in Releases repository. + +
+Installation on Windows + +1. Downloading the zip archive [Releases repository](https://storage.openvinotoolkit.org/repositories/openvino_testdrive/packages/) `Windows` folder . + +

+ +

+ +2. Extract zip archive double-click the MSIX installation package, click `Install` button and it will display the installation process -Get started with OpenVINO Test Drive, an application that allows you to run LLMs and models trained by [Intel Geti](https://geti.intel.com/) directly on your computer or edge device using OpenVINO. +

+ +

-# Features -### LLM models -+ **Text Generation**: Generate text and engage in chat experiences. -+ **Performance metrics**: Evaluate model performance on your computer or edge device. -### Computer vision models -+ **Single Image Inference**: Perform inference on individual images. -+ **Batch Inference**: Conduct inference on batches of images. +3. Click on the application name on Windows app list to launch OpenVINO™ Test Drive. -# High level architecture -![Design Graph](./design_graph.png) +
-# Using the Test Drive +## Quick start -Upon starting the application, you can import a model using either Huggingface for LLMs or “from local disk” for Geti models. +Upon starting the application, you can import a model using either Hugging Face for LLMs or upload Intel® Geti™ models from local disk. -![Preview](./preview.png) +### Text generation and LLM performance evaluation -# Getting Started +1. Find a model on Hugging Face and import it +

+ +

-## Release +2. Chat with LLMs via `Playground` tab. +

+ +

-Download the latest release from the [Releases page](https://github.com/openvinotoolkit/openvino_testdrive/releases). +3. Use `Performance metrics` tab to get model performance metrics on your computer or edge device +

+ +

+ +### Images inference with models trained by Intel® Geti™ + +1. Download deployment code for the model in OpenVINO format trained by Intel® Geti™. + +

+ +

+ +> [!NOTE] +> Please check [Intel® Geti™ documentation](https://docs.geti.intel.com) for more details. + +2. Import deployment code into OpenVINO™ Test Drive using `Import model` -> `Local disk` button. + +

+ +

+ +3. Run and visualize results of inference on individual images using `Live inference` tab. + +

+ +

+ +4. For batch inference, use `Batch inference` tab and provide paths to folder with input images in a `Source folder` and specify `Destination folder` for output batch inference results. Click on `Start` to start batch inference. + +

+ +

## Build @@ -33,10 +102,21 @@ The application requires the flutter SDK and the dependencies for your specific Secondly, the bindings and its dependencies for your platform to be added to `./bindings`. 1. [Install flutter sdk](https://docs.flutter.dev/get-started/install). Make sure to follow the guide for flutter dependencies. -2. [Download the bindings](https://github.com/intel-sandbox/applications.ai.geti.flutter.inference/releases) and extract them to ./bindings folder +2. Build the bindings and put them to `./bindings` folder. OpenVINO™ Test Drive uses bindings to OpenVINO™ GenAI and OpenVINO™ Vision ModelAPI located in `./openvino_bindings` folder. See [readme](./openvino_bindings/README.md) for more details. 3. Once done you can start the application: `flutter run` -## Build bindings +## Ecosystem + +- [OpenVINO™](https://github.com/openvinotoolkit/openvino) - software toolkit for optimizing and deploying deep learning models. +- [GenAI Repository](https://github.com/openvinotoolkit/openvino.genai) and [OpenVINO Tokenizers](https://github.com/openvinotoolkit/openvino_tokenizers) - resources and tools for developing and optimizing Generative AI applications. +- [Intel® Geti™](https://docs.geti.intel.com/) - software for building computer vision models. +- [OpenVINO™ Vision ModelAPI](https://github.com/openvinotoolkit/model_api) - a set of wrapper classes for particular tasks and model architectures, simplifying data preprocess and postprocess as well as routine procedures. + +## Contributing + +For those who would like to contribute to the OpenVINO™ Test Drive, please check out [Contribution Guidelines](CONTRIBUTING.md) for more details. -The Test Drive uses c bindings to OpenVINO. These are located in `./openvino_bindings` folder. See [readme.md](./openvino_bindings/README.md). +## License +OpenVINO™ Test Drive repository is licensed under [Apache License Version 2.0](LICENSE). +By contributing to the project, you agree to the license and copyright terms therein and release your contribution under these terms. diff --git a/assets/manifest.json b/assets/manifest.json index 10592396..50377b65 100644 --- a/assets/manifest.json +++ b/assets/manifest.json @@ -1,996 +1,1799 @@ { "popular_models": [ { - "name": "Mistral 7b Instruct", + "name": "Mistral 7b Instruct V0.1", "id": "mistral-7b-instruct-v0.1-int8-ov", - "fileSize": 7824223166, + "fileSize": 7824223238, "optimizationPrecision": "int8", "contextWindow": 32768, - "description": "Chat with Mistral 7b Instruct V0.1 model", - "task": "text-generation" + "description": "Chat with Mistral 7b Instruct V0.1", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { - "name": "Phi 3 Mini 4k", + "name": "Phi 3 Mini 4k Instruct", "id": "Phi-3-mini-4k-instruct-int4-ov", - "fileSize": 2637358233, + "fileSize": 2317366276, "optimizationPrecision": "int4", "contextWindow": 4096, - "description": "Chat with Phi 3 Mini 4k Instruct model", - "task": "text-generation" + "description": "Chat with Phi 3 Mini 4k Instruct", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" + }, + { + "name": "Whisper Base", + "id": "whisper-base-fp16-ov", + "fileSize": 263786768, + "optimizationPrecision": "fp16", + "contextWindow": 0, + "description": "Transcribe video with Whisper Base", + "task": "speech", + "author": "OpenVINO", + "collection": "speech-to-text-672321d5c070537a178a8aeb" }, { "name": "Open_llama_3b_v2", "id": "open_llama_3b_v2-int8-ov", - "fileSize": 3689232132, + "fileSize": 3689232204, "optimizationPrecision": "int8", "contextWindow": 2048, - "description": "Chat with Open_llama_3b_v2 model", - "task": "text-generation" + "description": "Chat with Open_llama_3b_v2", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" + } + ], + "all_models": [ + { + "name": "Distil Large V2", + "id": "distil-large-v2-fp16-ov", + "fileSize": 1623693910, + "optimizationPrecision": "fp16", + "contextWindow": 0, + "description": "Transcribe video with Distil Large V2", + "task": "speech", + "author": "OpenVINO", + "collection": "speech-to-text-672321d5c070537a178a8aeb" }, { - "name": "Open_llama_3b_v2", - "id": "open_llama_3b_v2-int8-ov", - "fileSize": 3689232132, + "name": "Distil Large V2", + "id": "distil-large-v2-int8-ov", + "fileSize": 811967934, "optimizationPrecision": "int8", - "contextWindow": 2048, - "description": "Chat with Open_llama_3b_v2 model", - "task": "text-generation" + "contextWindow": 0, + "description": "Transcribe video with Distil Large V2", + "task": "speech", + "author": "OpenVINO", + "collection": "speech-to-text-672321d5c070537a178a8aeb" }, { - "name": "Open_llama_3b_v2", - "id": "open_llama_3b_v2-int8-ov", - "fileSize": 3689232132, + "name": "Distil Large V3", + "id": "distil-large-v3-int4-ov", + "fileSize": 470886140, + "optimizationPrecision": "int4", + "contextWindow": 0, + "description": "Transcribe video with Distil Large V3", + "task": "speech", + "author": "OpenVINO", + "collection": "speech-to-text-672321d5c070537a178a8aeb" + }, + { + "name": "Distil Large V3", + "id": "distil-large-v3-fp16-ov", + "fileSize": 1623692375, + "optimizationPrecision": "fp16", + "contextWindow": 0, + "description": "Transcribe video with Distil Large V3", + "task": "speech", + "author": "OpenVINO", + "collection": "speech-to-text-672321d5c070537a178a8aeb" + }, + { + "name": "Distil Large V3", + "id": "distil-large-v3-int8-ov", + "fileSize": 811966358, "optimizationPrecision": "int8", - "contextWindow": 2048, - "description": "Chat with Open_llama_3b_v2 model", - "task": "text-generation" - } - ], - "all_models": [ + "contextWindow": 0, + "description": "Transcribe video with Distil Large V3", + "task": "speech", + "author": "OpenVINO", + "collection": "speech-to-text-672321d5c070537a178a8aeb" + }, + { + "name": "Distil Large V2", + "id": "distil-large-v2-int4-ov", + "fileSize": 470887685, + "optimizationPrecision": "int4", + "contextWindow": 0, + "description": "Transcribe video with Distil Large V2", + "task": "speech", + "author": "OpenVINO", + "collection": "speech-to-text-672321d5c070537a178a8aeb" + }, + { + "name": "Distil Whisper Base", + "id": "distil-whisper-base-fp16-ov", + "fileSize": 160942998, + "optimizationPrecision": "fp16", + "contextWindow": 0, + "description": "Transcribe video with Distil Whisper Base", + "task": "speech", + "author": "OpenVINO", + "collection": "speech-to-text-672321d5c070537a178a8aeb" + }, + { + "name": "Distil Whisper Base", + "id": "distil-whisper-base-int4-ov", + "fileSize": 66325080, + "optimizationPrecision": "int4", + "contextWindow": 0, + "description": "Transcribe video with Distil Whisper Base", + "task": "speech", + "author": "OpenVINO", + "collection": "speech-to-text-672321d5c070537a178a8aeb" + }, + { + "name": "Distil Whisper Base", + "id": "distil-whisper-base-int8-ov", + "fileSize": 88033050, + "optimizationPrecision": "int8", + "contextWindow": 0, + "description": "Transcribe video with Distil Whisper Base", + "task": "speech", + "author": "OpenVINO", + "collection": "speech-to-text-672321d5c070537a178a8aeb" + }, + { + "name": "Distil Whisper Large V3", + "id": "distil-whisper-large-v3-int4-ov", + "fileSize": 905249204, + "optimizationPrecision": "int4", + "contextWindow": 0, + "description": "Transcribe video with Distil Whisper Large V3", + "task": "speech", + "author": "OpenVINO", + "collection": "speech-to-text-672321d5c070537a178a8aeb" + }, + { + "name": "Distil Whisper Medium", + "id": "distil-whisper-medium-fp16-ov", + "fileSize": 1611734288, + "optimizationPrecision": "fp16", + "contextWindow": 0, + "description": "Transcribe video with Distil Whisper Medium", + "task": "speech", + "author": "OpenVINO", + "collection": "speech-to-text-672321d5c070537a178a8aeb" + }, + { + "name": "Distil Whisper Large V3", + "id": "distil-whisper-large-v3-int8-ov", + "fileSize": 1642880457, + "optimizationPrecision": "int8", + "contextWindow": 0, + "description": "Transcribe video with Distil Whisper Large V3", + "task": "speech", + "author": "OpenVINO", + "collection": "speech-to-text-672321d5c070537a178a8aeb" + }, + { + "name": "Distil Whisper Large V3", + "id": "distil-whisper-large-v3-fp16-ov", + "fileSize": 3318535654, + "optimizationPrecision": "fp16", + "contextWindow": 0, + "description": "Transcribe video with Distil Whisper Large V3", + "task": "speech", + "author": "OpenVINO", + "collection": "speech-to-text-672321d5c070537a178a8aeb" + }, + { + "name": "Distil Whisper Medium", + "id": "distil-whisper-medium-int4-ov", + "fileSize": 467835674, + "optimizationPrecision": "int4", + "contextWindow": 0, + "description": "Transcribe video with Distil Whisper Medium", + "task": "speech", + "author": "OpenVINO", + "collection": "speech-to-text-672321d5c070537a178a8aeb" + }, + { + "name": "Distil Whisper Medium", + "id": "distil-whisper-medium-int8-ov", + "fileSize": 820786325, + "optimizationPrecision": "int8", + "contextWindow": 0, + "description": "Transcribe video with Distil Whisper Medium", + "task": "speech", + "author": "OpenVINO", + "collection": "speech-to-text-672321d5c070537a178a8aeb" + }, + { + "name": "Distil Whisper Tiny", + "id": "distil-whisper-tiny-int4-ov", + "fileSize": 42419245, + "optimizationPrecision": "int4", + "contextWindow": 0, + "description": "Transcribe video with Distil Whisper Tiny", + "task": "speech", + "author": "OpenVINO", + "collection": "speech-to-text-672321d5c070537a178a8aeb" + }, + { + "name": "Distil Whisper Tiny", + "id": "distil-whisper-tiny-int8-ov", + "fileSize": 50341791, + "optimizationPrecision": "int8", + "contextWindow": 0, + "description": "Transcribe video with Distil Whisper Tiny", + "task": "speech", + "author": "OpenVINO", + "collection": "speech-to-text-672321d5c070537a178a8aeb" + }, + { + "name": "Distil Whisper Tiny", + "id": "distil-whisper-tiny-fp16-ov", + "fileSize": 87657508, + "optimizationPrecision": "fp16", + "contextWindow": 0, + "description": "Transcribe video with Distil Whisper Tiny", + "task": "speech", + "author": "OpenVINO", + "collection": "speech-to-text-672321d5c070537a178a8aeb" + }, + { + "name": "Whisper Tiny", + "id": "whisper-tiny-int4-ov", + "fileSize": 68582454, + "optimizationPrecision": "int4", + "contextWindow": 0, + "description": "Transcribe video with Whisper Tiny", + "task": "speech", + "author": "OpenVINO", + "collection": "speech-to-text-672321d5c070537a178a8aeb" + }, + { + "name": "Whisper Base", + "id": "whisper-base-int8-ov", + "fileSize": 140207692, + "optimizationPrecision": "int8", + "contextWindow": 0, + "description": "Transcribe video with Whisper Base", + "task": "speech", + "author": "OpenVINO", + "collection": "speech-to-text-672321d5c070537a178a8aeb" + }, + { + "name": "Whisper Medium", + "id": "whisper-medium-int8-ov", + "fileSize": 1250473365, + "optimizationPrecision": "int8", + "contextWindow": 0, + "description": "Transcribe video with Whisper Medium", + "task": "speech", + "author": "OpenVINO", + "collection": "speech-to-text-672321d5c070537a178a8aeb" + }, + { + "name": "Whisper Medium", + "id": "whisper-medium-int4-ov", + "fileSize": 719474580, + "optimizationPrecision": "int4", + "contextWindow": 0, + "description": "Transcribe video with Whisper Medium", + "task": "speech", + "author": "OpenVINO", + "collection": "speech-to-text-672321d5c070537a178a8aeb" + }, + { + "name": "Whisper Tiny", + "id": "whisper-tiny-int8-ov", + "fileSize": 80769346, + "optimizationPrecision": "int8", + "contextWindow": 0, + "description": "Transcribe video with Whisper Tiny", + "task": "speech", + "author": "OpenVINO", + "collection": "speech-to-text-672321d5c070537a178a8aeb" + }, + { + "name": "Whisper Base", + "id": "whisper-base-int4-ov", + "fileSize": 107331169, + "optimizationPrecision": "int4", + "contextWindow": 0, + "description": "Transcribe video with Whisper Base", + "task": "speech", + "author": "OpenVINO", + "collection": "speech-to-text-672321d5c070537a178a8aeb" + }, + { + "name": "Whisper Large V3", + "id": "whisper-large-v3-fp16-ov", + "fileSize": 5038558437, + "optimizationPrecision": "fp16", + "contextWindow": 0, + "description": "Transcribe video with Whisper Large V3", + "task": "speech", + "author": "OpenVINO", + "collection": "speech-to-text-672321d5c070537a178a8aeb" + }, + { + "name": "Whisper Medium", + "id": "whisper-medium-fp16-ov", + "fileSize": 2465759114, + "optimizationPrecision": "fp16", + "contextWindow": 0, + "description": "Transcribe video with Whisper Medium", + "task": "speech", + "author": "OpenVINO", + "collection": "speech-to-text-672321d5c070537a178a8aeb" + }, + { + "name": "Whisper Tiny", + "id": "whisper-tiny-fp16-ov", + "fileSize": 147388830, + "optimizationPrecision": "fp16", + "contextWindow": 0, + "description": "Transcribe video with Whisper Tiny", + "task": "speech", + "author": "OpenVINO", + "collection": "speech-to-text-672321d5c070537a178a8aeb" + }, + { + "name": "Whisper Base", + "id": "whisper-base-fp16-ov", + "fileSize": 263786768, + "optimizationPrecision": "fp16", + "contextWindow": 0, + "description": "Transcribe video with Whisper Base", + "task": "speech", + "author": "OpenVINO", + "collection": "speech-to-text-672321d5c070537a178a8aeb" + }, { "name": "Phi 2", "id": "phi-2-fp16-ov", - "fileSize": 5978786593, + "fileSize": 5978786613, "optimizationPrecision": "fp16", "contextWindow": 2048, - "description": "Chat with Phi 2 model", - "task": "text-generation" + "description": "Chat with Phi 2", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Phi 2", "id": "phi-2-int8-ov", - "fileSize": 3004595529, + "fileSize": 3004595590, "optimizationPrecision": "int8", "contextWindow": 2048, - "description": "Chat with Phi 2 model", - "task": "text-generation" + "description": "Chat with Phi 2", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Mistral 7b Instruct V0.1", "id": "mistral-7b-instruct-v0.1-int8-ov", - "fileSize": 7824223166, + "fileSize": 7824223238, "optimizationPrecision": "int8", "contextWindow": 32768, - "description": "Chat with Mistral 7b Instruct V0.1 model", - "task": "text-generation" + "description": "Chat with Mistral 7b Instruct V0.1", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Mistral 7b Instruct V0.1", "id": "mistral-7b-instruct-v0.1-fp16-ov", - "fileSize": 15576387089, + "fileSize": 15576387130, "optimizationPrecision": "fp16", "contextWindow": 32768, - "description": "Chat with Mistral 7b Instruct V0.1 model", - "task": "text-generation" + "description": "Chat with Mistral 7b Instruct V0.1", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Mistral 7b Instruct V0.1", "id": "mistral-7b-instruct-v0.1-int4-ov", - "fileSize": 4967917794, + "fileSize": 4967917876, "optimizationPrecision": "int4", "contextWindow": 32768, - "description": "Chat with Mistral 7b Instruct V0.1 model", - "task": "text-generation" + "description": "Chat with Mistral 7b Instruct V0.1", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Codegen25 7b Multi", "id": "codegen25-7b-multi-fp16-ov", - "fileSize": 14822539137, + "fileSize": 14822064608, "optimizationPrecision": "fp16", "contextWindow": 2048, - "description": "Chat with Codegen25 7b Multi model", - "task": "text-generation" + "description": "Chat with Codegen25 7b Multi", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Codegen25 7b Multi", "id": "codegen25-7b-multi-int8-ov", - "fileSize": 7414035410, + "fileSize": 7413624227, "optimizationPrecision": "int8", "contextWindow": 2048, - "description": "Chat with Codegen25 7b Multi model", - "task": "text-generation" + "description": "Chat with Codegen25 7b Multi", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Mixtral 8x7b Instruct V0.1", "id": "mixtral-8x7b-instruct-v0.1-int4-ov", - "fileSize": 30833964831, + "fileSize": 30833964913, "optimizationPrecision": "int4", "contextWindow": 32768, - "description": "Chat with Mixtral 8x7b Instruct V0.1 model", - "task": "text-generation" + "description": "Chat with Mixtral 8x7b Instruct V0.1", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Mixtral 8x7B Instruct V0.1", "id": "Mixtral-8x7B-Instruct-v0.1-int8-ov", - "fileSize": 50160688476, + "fileSize": 50160688558, "optimizationPrecision": "int8", "contextWindow": 32768, - "description": "Chat with Mixtral 8x7B Instruct V0.1 model", - "task": "text-generation" + "description": "Chat with Mixtral 8x7B Instruct V0.1", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Notus 7b V1", "id": "notus-7b-v1-fp16-ov", - "fileSize": 15576386988, + "fileSize": 15576387018, "optimizationPrecision": "fp16", "contextWindow": 32768, - "description": "Chat with Notus 7b V1 model", - "task": "text-generation" + "description": "Chat with Notus 7b V1", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Notus 7b V1", "id": "notus-7b-v1-int8-ov", - "fileSize": 7803125798, + "fileSize": 7803125869, "optimizationPrecision": "int8", "contextWindow": 32768, - "description": "Chat with Notus 7b V1 model", - "task": "text-generation" + "description": "Chat with Notus 7b V1", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Neural Chat 7b V3 3", "id": "neural-chat-7b-v3-3-fp16-ov", - "fileSize": 15576386599, + "fileSize": 15576386640, "optimizationPrecision": "fp16", "contextWindow": 32768, - "description": "Chat with Neural Chat 7b V3 3 model", - "task": "text-generation" + "description": "Chat with Neural Chat 7b V3 3", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Neural Chat 7b V3 3", "id": "neural-chat-7b-v3-3-int8-ov", - "fileSize": 7803125410, + "fileSize": 7803125481, "optimizationPrecision": "int8", "contextWindow": 32768, - "description": "Chat with Neural Chat 7b V3 3 model", - "task": "text-generation" + "description": "Chat with Neural Chat 7b V3 3", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Zephyr 7b Beta", "id": "zephyr-7b-beta-int8-ov", - "fileSize": 7803126061, + "fileSize": 7803126133, "optimizationPrecision": "int8", "contextWindow": 32768, - "description": "Chat with Zephyr 7b Beta model", - "task": "text-generation" + "description": "Chat with Zephyr 7b Beta", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Zephyr 7b Beta", "id": "zephyr-7b-beta-int4-ov", - "fileSize": 4904138531, + "fileSize": 4904138613, "optimizationPrecision": "int4", "contextWindow": 32768, - "description": "Chat with Zephyr 7b Beta model", - "task": "text-generation" + "description": "Chat with Zephyr 7b Beta", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Dolly V2 3b", "id": "dolly-v2-3b-int4-ov", - "fileSize": 2434908470, + "fileSize": 1694037426, "optimizationPrecision": "int4", "contextWindow": 2048, - "description": "Chat with Dolly V2 3b model", - "task": "text-generation" + "description": "Chat with Dolly V2 3b", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Dolly V2 3b", "id": "dolly-v2-3b-int8-ov", - "fileSize": 2993180745, + "fileSize": 2993264386, "optimizationPrecision": "int8", "contextWindow": 2048, - "description": "Chat with Dolly V2 3b model", - "task": "text-generation" + "description": "Chat with Dolly V2 3b", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Dolly V2 3b", "id": "dolly-v2-3b-fp16-ov", - "fileSize": 5967078157, + "fileSize": 5967350336, "optimizationPrecision": "fp16", "contextWindow": 2048, - "description": "Chat with Dolly V2 3b model", - "task": "text-generation" + "description": "Chat with Dolly V2 3b", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Codegen2 3_7B_P", "id": "codegen2-3_7B_P-int4-ov", - "fileSize": 2252764320, + "fileSize": 2252764402, "optimizationPrecision": "int4", "contextWindow": 2048, - "description": "Chat with Codegen2 3_7B_P model", - "task": "text-generation" + "description": "Chat with Codegen2 3_7B_P", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Codegen2 3_7B_P", "id": "codegen2-3_7B_P-fp16-ov", - "fileSize": 7835969716, + "fileSize": 7835969757, "optimizationPrecision": "fp16", "contextWindow": 2048, - "description": "Chat with Codegen2 3_7B_P model", - "task": "text-generation" + "description": "Chat with Codegen2 3_7B_P", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Zephyr 7b Beta", "id": "zephyr-7b-beta-fp16-ov", - "fileSize": 15576387241, + "fileSize": 15576387282, "optimizationPrecision": "fp16", "contextWindow": 32768, - "description": "Chat with Zephyr 7b Beta model", - "task": "text-generation" + "description": "Chat with Zephyr 7b Beta", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Codegen2 3_7B_P", "id": "codegen2-3_7B_P-int8-ov", - "fileSize": 3927738589, + "fileSize": 3927738671, "optimizationPrecision": "int8", "contextWindow": 2048, - "description": "Chat with Codegen2 3_7B_P model", - "task": "text-generation" + "description": "Chat with Codegen2 3_7B_P", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "TinyLlama 1.1B Chat V1.0", "id": "TinyLlama-1.1B-Chat-v1.0-fp16-ov", - "fileSize": 2368272475, + "fileSize": 2368272527, "optimizationPrecision": "fp16", "contextWindow": 2048, - "description": "Chat with TinyLlama 1.1B Chat V1.0 model", - "task": "text-generation" + "description": "Chat with TinyLlama 1.1B Chat V1.0", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "TinyLlama 1.1B Chat V1.0", "id": "TinyLlama-1.1B-Chat-v1.0-int4-ov", - "fileSize": 668269097, + "fileSize": 668269179, "optimizationPrecision": "int4", "contextWindow": 2048, - "description": "Chat with TinyLlama 1.1B Chat V1.0 model", - "task": "text-generation" + "description": "Chat with TinyLlama 1.1B Chat V1.0", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "TinyLlama 1.1B Chat V1.0", "id": "TinyLlama-1.1B-Chat-v1.0-int8-ov", - "fileSize": 1187586826, + "fileSize": 1187586908, "optimizationPrecision": "int8", "contextWindow": 2048, - "description": "Chat with TinyLlama 1.1B Chat V1.0 model", - "task": "text-generation" + "description": "Chat with TinyLlama 1.1B Chat V1.0", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Gpt Neox 20b", "id": "gpt-neox-20b-int8-ov", - "fileSize": 22128276574, + "fileSize": 22128276646, "optimizationPrecision": "int8", "contextWindow": 2048, - "description": "Chat with Gpt Neox 20b model", - "task": "text-generation" + "description": "Chat with Gpt Neox 20b", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Gpt Neox 20b", "id": "gpt-neox-20b-fp16-ov", - "fileSize": 44140098869, + "fileSize": 44140098910, "optimizationPrecision": "fp16", "contextWindow": 2048, - "description": "Chat with Gpt Neox 20b model", - "task": "text-generation" - }, - { - "name": "Gpt Neox 20b", - "id": "gpt-neox-20b-int4-ov", - "fileSize": 13968006447, - "optimizationPrecision": "int4", - "contextWindow": 2048, - "description": "Chat with Gpt Neox 20b model", - "task": "text-generation" + "description": "Chat with Gpt Neox 20b", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Gpt J 6b", "id": "gpt-j-6b-int4-ov", - "fileSize": 4196810211, + "fileSize": 4196810272, "optimizationPrecision": "int4", "contextWindow": 2048, - "description": "Chat with Gpt J 6b model", - "task": "text-generation" + "description": "Chat with Gpt J 6b", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Gpt J 6b", "id": "gpt-j-6b-int8-ov", - "fileSize": 6515945720, + "fileSize": 6515945792, "optimizationPrecision": "int8", "contextWindow": 2048, - "description": "Chat with Gpt J 6b model", - "task": "text-generation" + "description": "Chat with Gpt J 6b", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Gpt J 6b", "id": "gpt-j-6b-fp16-ov", - "fileSize": 13001251300, + "fileSize": 13001251330, "optimizationPrecision": "fp16", "contextWindow": 2048, - "description": "Chat with Gpt J 6b model", - "task": "text-generation" + "description": "Chat with Gpt J 6b", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Falcon 7b Instruct", "id": "falcon-7b-instruct-int4-ov", - "fileSize": 3959308647, + "fileSize": 3959308719, "optimizationPrecision": "int4", "contextWindow": 2048, - "description": "Chat with Falcon 7b Instruct model", - "task": "text-generation" + "description": "Chat with Falcon 7b Instruct", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Falcon 7b Instruct", "id": "falcon-7b-instruct-fp16-ov", - "fileSize": 14825512501, + "fileSize": 14825512542, "optimizationPrecision": "fp16", "contextWindow": 2048, - "description": "Chat with Falcon 7b Instruct model", - "task": "text-generation" + "description": "Chat with Falcon 7b Instruct", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Falcon 7b Instruct", "id": "falcon-7b-instruct-int8-ov", - "fileSize": 7449021953, + "fileSize": 7449022025, "optimizationPrecision": "int8", "contextWindow": 2048, - "description": "Chat with Falcon 7b Instruct model", - "task": "text-generation" + "description": "Chat with Falcon 7b Instruct", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Open_llama_7b_v2", "id": "open_llama_7b_v2-int8-ov", - "fileSize": 7243946706, + "fileSize": 7243946788, "optimizationPrecision": "int8", "contextWindow": 2048, - "description": "Chat with Open_llama_7b_v2 model", - "task": "text-generation" + "description": "Chat with Open_llama_7b_v2", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Open_llama_7b_v2", "id": "open_llama_7b_v2-int4-ov", - "fileSize": 4581255942, + "fileSize": 4581256014, "optimizationPrecision": "int4", "contextWindow": 2048, - "description": "Chat with Open_llama_7b_v2 model", - "task": "text-generation" + "description": "Chat with Open_llama_7b_v2", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Open_llama_7b_v2", "id": "open_llama_7b_v2-fp16-ov", - "fileSize": 14502136910, + "fileSize": 14502136961, "optimizationPrecision": "fp16", "contextWindow": 2048, - "description": "Chat with Open_llama_7b_v2 model", - "task": "text-generation" + "description": "Chat with Open_llama_7b_v2", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Open_llama_3b_v2", "id": "open_llama_3b_v2-int8-ov", - "fileSize": 3689232132, + "fileSize": 3689232204, "optimizationPrecision": "int8", "contextWindow": 2048, - "description": "Chat with Open_llama_3b_v2 model", - "task": "text-generation" + "description": "Chat with Open_llama_3b_v2", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Open_llama_3b_v2", "id": "open_llama_3b_v2-fp16-ov", - "fileSize": 7361187312, + "fileSize": 7361187363, "optimizationPrecision": "fp16", "contextWindow": 2048, - "description": "Chat with Open_llama_3b_v2 model", - "task": "text-generation" + "description": "Chat with Open_llama_3b_v2", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Phi 2", "id": "phi-2-int4-ov", - "fileSize": 1963097577, + "fileSize": 1963097638, "optimizationPrecision": "int4", "contextWindow": 2048, - "description": "Chat with Phi 2 model", - "task": "text-generation" + "description": "Chat with Phi 2", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Neural Chat 7b V3 3", "id": "neural-chat-7b-v3-3-int4-ov", - "fileSize": 4957174834, + "fileSize": 4957174906, "optimizationPrecision": "int4", "contextWindow": 32768, - "description": "Chat with Neural Chat 7b V3 3 model", - "task": "text-generation" + "description": "Chat with Neural Chat 7b V3 3", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Notus 7b V1", "id": "notus-7b-v1-int4-ov", - "fileSize": 4957175373, + "fileSize": 4957175444, "optimizationPrecision": "int4", "contextWindow": 32768, - "description": "Chat with Notus 7b V1 model", - "task": "text-generation" + "description": "Chat with Notus 7b V1", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "RedPajama INCITE Chat 3B V1", "id": "RedPajama-INCITE-Chat-3B-v1-int8-ov", - "fileSize": 3003403190, + "fileSize": 2993181070, "optimizationPrecision": "int8", "contextWindow": 2048, - "description": "Chat with RedPajama INCITE Chat 3B V1 model", - "task": "text-generation" + "description": "Chat with RedPajama INCITE Chat 3B V1", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "RedPajama INCITE 7B Instruct", "id": "RedPajama-INCITE-7B-Instruct-fp16-ov", - "fileSize": 14717999973, + "fileSize": 14718000035, "optimizationPrecision": "fp16", "contextWindow": 2048, - "description": "Chat with RedPajama INCITE 7B Instruct model", - "task": "text-generation" + "description": "Chat with RedPajama INCITE 7B Instruct", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "RedPajama INCITE 7B Instruct", "id": "RedPajama-INCITE-7B-Instruct-int4-ov", - "fileSize": 7384270376, + "fileSize": 7384270468, "optimizationPrecision": "int4", "contextWindow": 2048, - "description": "Chat with RedPajama INCITE 7B Instruct model", - "task": "text-generation" + "description": "Chat with RedPajama INCITE 7B Instruct", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "RedPajama INCITE 7B Chat", "id": "RedPajama-INCITE-7B-Chat-int4-ov", - "fileSize": 4753728620, + "fileSize": 4753728702, "optimizationPrecision": "int4", "contextWindow": 2048, - "description": "Chat with RedPajama INCITE 7B Chat model", - "task": "text-generation" + "description": "Chat with RedPajama INCITE 7B Chat", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "RedPajama INCITE 7B Instruct", "id": "RedPajama-INCITE-7B-Instruct-int8-ov", - "fileSize": 7384270355, + "fileSize": 7384270448, "optimizationPrecision": "int8", "contextWindow": 2048, - "description": "Chat with RedPajama INCITE 7B Instruct model", - "task": "text-generation" + "description": "Chat with RedPajama INCITE 7B Instruct", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "RedPajama INCITE 7B Chat", "id": "RedPajama-INCITE-7B-Chat-fp16-ov", - "fileSize": 14717999600, + "fileSize": 14717999652, "optimizationPrecision": "fp16", "contextWindow": 2048, - "description": "Chat with RedPajama INCITE 7B Chat model", - "task": "text-generation" + "description": "Chat with RedPajama INCITE 7B Chat", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "RedPajama INCITE Chat 3B V1", "id": "RedPajama-INCITE-Chat-3B-v1-int4-ov", - "fileSize": 1972726843, + "fileSize": 1693954060, "optimizationPrecision": "int4", "contextWindow": 2048, - "description": "Chat with RedPajama INCITE Chat 3B V1 model", - "task": "text-generation" + "description": "Chat with RedPajama INCITE Chat 3B V1", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "RedPajama INCITE 7B Chat", "id": "RedPajama-INCITE-7B-Chat-int8-ov", - "fileSize": 7384270239, + "fileSize": 7384270331, "optimizationPrecision": "int8", "contextWindow": 2048, - "description": "Chat with RedPajama INCITE 7B Chat model", - "task": "text-generation" + "description": "Chat with RedPajama INCITE 7B Chat", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "RedPajama INCITE Chat 3B V1", "id": "RedPajama-INCITE-Chat-3B-v1-fp16-ov", - "fileSize": 5977741177, + "fileSize": 5967266928, "optimizationPrecision": "fp16", "contextWindow": 2048, - "description": "Chat with RedPajama INCITE Chat 3B V1 model", - "task": "text-generation" - }, - { - "name": "Dolly V2 12b", - "id": "dolly-v2-12b-int4-ov", - "fileSize": 8093674841, - "optimizationPrecision": "int4", - "contextWindow": 2048, - "description": "Chat with Dolly V2 12b model", - "task": "text-generation" + "description": "Chat with RedPajama INCITE Chat 3B V1", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Dolly V2 7b", "id": "dolly-v2-7b-int4-ov", - "fileSize": 4753855475, + "fileSize": 4753855546, "optimizationPrecision": "int4", "contextWindow": 2048, - "description": "Chat with Dolly V2 7b model", - "task": "text-generation" + "description": "Chat with Dolly V2 7b", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Dolly V2 7b", "id": "dolly-v2-7b-int8-ov", - "fileSize": 7384407579, + "fileSize": 7384407651, "optimizationPrecision": "int8", "contextWindow": 2048, - "description": "Chat with Dolly V2 7b model", - "task": "text-generation" + "description": "Chat with Dolly V2 7b", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Dolly V2 7b", "id": "dolly-v2-7b-fp16-ov", - "fileSize": 14718147683, + "fileSize": 14718147724, "optimizationPrecision": "fp16", "contextWindow": 2048, - "description": "Chat with Dolly V2 7b model", - "task": "text-generation" + "description": "Chat with Dolly V2 7b", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Mistral 7B Instruct V0.2", "id": "Mistral-7B-Instruct-v0.2-int8-ov", - "fileSize": 7823897819, + "fileSize": 7823897901, "optimizationPrecision": "int8", "contextWindow": 32768, - "description": "Chat with Mistral 7B Instruct V0.2 model", - "task": "text-generation" + "description": "Chat with Mistral 7B Instruct V0.2", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Dolly V2 12b", "id": "dolly-v2-12b-int8-ov", - "fileSize": 12785790267, + "fileSize": 12785790338, "optimizationPrecision": "int8", "contextWindow": 2048, - "description": "Chat with Dolly V2 12b model", - "task": "text-generation" + "description": "Chat with Dolly V2 12b", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Mistral 7B Instruct V0.2", "id": "Mistral-7B-Instruct-v0.2-int4-ov", - "fileSize": 4957164416, + "fileSize": 4957164498, "optimizationPrecision": "int4", "contextWindow": 32768, - "description": "Chat with Mistral 7B Instruct V0.2 model", - "task": "text-generation" + "description": "Chat with Mistral 7B Instruct V0.2", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Mistral 7B Instruct V0.2", "id": "Mistral-7B-Instruct-v0.2-fp16-ov", - "fileSize": 15576062131, + "fileSize": 15576062183, "optimizationPrecision": "fp16", "contextWindow": 32768, - "description": "Chat with Mistral 7B Instruct V0.2 model", - "task": "text-generation" + "description": "Chat with Mistral 7B Instruct V0.2", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Codegen25 7b Multi", "id": "codegen25-7b-multi-int4-ov", - "fileSize": 4760257312, + "fileSize": 4074538822, "optimizationPrecision": "int4", "contextWindow": 2048, - "description": "Chat with Codegen25 7b Multi model", - "task": "text-generation" + "description": "Chat with Codegen25 7b Multi", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Persimmon 8b Chat", "id": "persimmon-8b-chat-int4-ov", - "fileSize": 6896839595, + "fileSize": 6896839666, "optimizationPrecision": "int4", "contextWindow": 2048, - "description": "Chat with Persimmon 8b Chat model", - "task": "text-generation" + "description": "Chat with Persimmon 8b Chat", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Persimmon 8b Chat", "id": "persimmon-8b-chat-int8-ov", - "fileSize": 12791514405, + "fileSize": 12791514477, "optimizationPrecision": "int8", "contextWindow": 2048, - "description": "Chat with Persimmon 8b Chat model", - "task": "text-generation" + "description": "Chat with Persimmon 8b Chat", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Pythia 1.4b", "id": "pythia-1.4b-int4-ov", - "fileSize": 6890411793, + "fileSize": 6890411865, "optimizationPrecision": "int4", "contextWindow": 2048, - "description": "Chat with Pythia 1.4b model", - "task": "text-generation" + "description": "Chat with Pythia 1.4b", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Pythia 1.4b", "id": "pythia-1.4b-int8-ov", - "fileSize": 12785086603, + "fileSize": 12785086675, "optimizationPrecision": "int8", "contextWindow": 2048, - "description": "Chat with Pythia 1.4b model", - "task": "text-generation" + "description": "Chat with Pythia 1.4b", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Persimmon 8b Chat", "id": "persimmon-8b-chat-fp16-ov", - "fileSize": 25461688234, + "fileSize": 25461688275, "optimizationPrecision": "fp16", "contextWindow": 2048, - "description": "Chat with Persimmon 8b Chat model", - "task": "text-generation" + "description": "Chat with Persimmon 8b Chat", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Pythia 1.4b", "id": "pythia-1.4b-fp16-ov", - "fileSize": 25455260443, + "fileSize": 25455260474, "optimizationPrecision": "fp16", "contextWindow": 2048, - "description": "Chat with Pythia 1.4b model", - "task": "text-generation" - }, - { - "name": "Pythia 12b", - "id": "pythia-12b-int4-ov", - "fileSize": 3824586206, - "optimizationPrecision": "int4", - "contextWindow": 2048, - "description": "Chat with Pythia 12b model", - "task": "text-generation" + "description": "Chat with Pythia 1.4b", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Pythia 12b", "id": "pythia-12b-int8-ov", - "fileSize": 7153039029, + "fileSize": 7153039101, "optimizationPrecision": "int8", "contextWindow": 2048, - "description": "Chat with Pythia 12b model", - "task": "text-generation" + "description": "Chat with Pythia 12b", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Pythia 2.8b", "id": "pythia-2.8b-int8-ov", - "fileSize": 7153039039, + "fileSize": 7153039111, "optimizationPrecision": "int8", "contextWindow": 2048, - "description": "Chat with Pythia 2.8b model", - "task": "text-generation" + "description": "Chat with Pythia 2.8b", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Pythia 2.8b", "id": "pythia-2.8b-int4-ov", - "fileSize": 3824586216, + "fileSize": 3824586288, "optimizationPrecision": "int4", "contextWindow": 2048, - "description": "Chat with Pythia 2.8b model", - "task": "text-generation" + "description": "Chat with Pythia 2.8b", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Pythia 12b", "id": "pythia-12b-fp16-ov", - "fileSize": 14293243461, + "fileSize": 14293243502, "optimizationPrecision": "fp16", "contextWindow": 2048, - "description": "Chat with Pythia 12b model", - "task": "text-generation" + "description": "Chat with Pythia 12b", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Pythia 6.9b", "id": "pythia-6.9b-int4-ov", - "fileSize": 3824586216, + "fileSize": 3824586288, "optimizationPrecision": "int4", "contextWindow": 2048, - "description": "Chat with Pythia 6.9b model", - "task": "text-generation" + "description": "Chat with Pythia 6.9b", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Pythia 6.9b", "id": "pythia-6.9b-int8-ov", - "fileSize": 7153039039, + "fileSize": 7153039111, "optimizationPrecision": "int8", "contextWindow": 2048, - "description": "Chat with Pythia 6.9b model", - "task": "text-generation" + "description": "Chat with Pythia 6.9b", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Pythia 2.8b", "id": "pythia-2.8b-fp16-ov", - "fileSize": 14293243256, + "fileSize": 14293243287, "optimizationPrecision": "fp16", "contextWindow": 2048, - "description": "Chat with Pythia 2.8b model", - "task": "text-generation" + "description": "Chat with Pythia 2.8b", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Pythia 6.9b", "id": "pythia-6.9b-fp16-ov", - "fileSize": 14293243256, + "fileSize": 14293243287, "optimizationPrecision": "fp16", "contextWindow": 2048, - "description": "Chat with Pythia 6.9b model", - "task": "text-generation" + "description": "Chat with Pythia 6.9b", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Pythia 1b", "id": "pythia-1b-int4-ov", - "fileSize": 669587847, + "fileSize": 669587919, "optimizationPrecision": "int4", "contextWindow": 2048, - "description": "Chat with Pythia 1b model", - "task": "text-generation" + "description": "Chat with Pythia 1b", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Pythia 1b", "id": "pythia-1b-int8-ov", - "fileSize": 1107284420, + "fileSize": 1107284481, "optimizationPrecision": "int8", "contextWindow": 2048, - "description": "Chat with Pythia 1b model", - "task": "text-generation" + "description": "Chat with Pythia 1b", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Pythia 1b", "id": "pythia-1b-fp16-ov", - "fileSize": 2181025578, + "fileSize": 2181025619, "optimizationPrecision": "fp16", "contextWindow": 2048, - "description": "Chat with Pythia 1b model", - "task": "text-generation" + "description": "Chat with Pythia 1b", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Neural Chat 7b V1 1", "id": "neural-chat-7b-v1-1-int4-ov", - "fileSize": 3824586268, + "fileSize": 3824586350, "optimizationPrecision": "int4", "contextWindow": 2048, - "description": "Chat with Neural Chat 7b V1 1 model", - "task": "text-generation" + "description": "Chat with Neural Chat 7b V1 1", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Neural Chat 7b V1 1", "id": "neural-chat-7b-v1-1-int8-ov", - "fileSize": 7153039101, + "fileSize": 7153039173, "optimizationPrecision": "int8", "contextWindow": 2048, - "description": "Chat with Neural Chat 7b V1 1 model", - "task": "text-generation" + "description": "Chat with Neural Chat 7b V1 1", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Neural Chat 7b V1 1", "id": "neural-chat-7b-v1-1-fp16-ov", - "fileSize": 14293243287, + "fileSize": 14293243328, "optimizationPrecision": "fp16", "contextWindow": 2048, - "description": "Chat with Neural Chat 7b V1 1 model", - "task": "text-generation" + "description": "Chat with Neural Chat 7b V1 1", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Phi 3 Medium 4k Instruct", "id": "Phi-3-medium-4k-instruct-fp16-ov", - "fileSize": 29965606794, + "fileSize": 29965606845, "optimizationPrecision": "fp16", "contextWindow": 4096, - "description": "Chat with Phi 3 Medium 4k Instruct model", - "task": "text-generation" + "description": "Chat with Phi 3 Medium 4k Instruct", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Phi 3 Medium 4k Instruct", "id": "Phi-3-medium-4k-instruct-int4-ov", - "fileSize": 7964805299, + "fileSize": 7964805381, "optimizationPrecision": "int4", "contextWindow": 4096, - "description": "Chat with Phi 3 Medium 4k Instruct model", - "task": "text-generation" + "description": "Chat with Phi 3 Medium 4k Instruct", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Phi 3 Medium 4k Instruct", "id": "Phi-3-medium-4k-instruct-int8-ov", - "fileSize": 15040585641, + "fileSize": 15040585723, "optimizationPrecision": "int8", "contextWindow": 4096, - "description": "Chat with Phi 3 Medium 4k Instruct model", - "task": "text-generation" + "description": "Chat with Phi 3 Medium 4k Instruct", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Mpt 7b", "id": "mpt-7b-int8-ov", - "fileSize": 7146788625, + "fileSize": 7146788697, "optimizationPrecision": "int8", "contextWindow": 2048, - "description": "Chat with Mpt 7b model", - "task": "text-generation" + "description": "Chat with Mpt 7b", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Mpt 7b", "id": "mpt-7b-fp16-ov", - "fileSize": 14286814799, + "fileSize": 14286814840, "optimizationPrecision": "fp16", "contextWindow": 2048, - "description": "Chat with Mpt 7b model", - "task": "text-generation" + "description": "Chat with Mpt 7b", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Starcoder2 15b", "id": "starcoder2-15b-int8-ov", - "fileSize": 17190289720, + "fileSize": 17190289792, "optimizationPrecision": "int8", "contextWindow": 16384, - "description": "Chat with Starcoder2 15b model", - "task": "text-generation" + "description": "Chat with Starcoder2 15b", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Starcoder2 15b", "id": "starcoder2-15b-int4-ov", - "fileSize": 9169658515, + "fileSize": 9169658587, "optimizationPrecision": "int4", "contextWindow": 16384, - "description": "Chat with Starcoder2 15b model", - "task": "text-generation" + "description": "Chat with Starcoder2 15b", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Starcoder2 15b", "id": "starcoder2-15b-fp16-ov", - "fileSize": 34262102706, + "fileSize": 34262102747, "optimizationPrecision": "fp16", "contextWindow": 16384, - "description": "Chat with Starcoder2 15b model", - "task": "text-generation" + "description": "Chat with Starcoder2 15b", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Starcoder2 7b", "id": "starcoder2-7b-fp16-ov", - "fileSize": 15470719144, + "fileSize": 15470719185, "optimizationPrecision": "fp16", "contextWindow": 16384, - "description": "Chat with Starcoder2 7b model", - "task": "text-generation" + "description": "Chat with Starcoder2 7b", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Starcoder2 7b", "id": "starcoder2-7b-int4-ov", - "fileSize": 4111254624, + "fileSize": 4111254696, "optimizationPrecision": "int4", "contextWindow": 16384, - "description": "Chat with Starcoder2 7b model", - "task": "text-generation" + "description": "Chat with Starcoder2 7b", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Starcoder2 7b", "id": "starcoder2-7b-int8-ov", - "fileSize": 7729586294, + "fileSize": 7729586365, "optimizationPrecision": "int8", "contextWindow": 16384, - "description": "Chat with Starcoder2 7b model", - "task": "text-generation" + "description": "Chat with Starcoder2 7b", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Starcoder2 3b", "id": "starcoder2-3b-int4-ov", - "fileSize": 1780962229, + "fileSize": 1780962300, "optimizationPrecision": "int4", "contextWindow": 16384, - "description": "Chat with Starcoder2 3b model", - "task": "text-generation" + "description": "Chat with Starcoder2 3b", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Starcoder2 3b", "id": "starcoder2-3b-fp16-ov", - "fileSize": 6526229346, + "fileSize": 6526229387, "optimizationPrecision": "fp16", "contextWindow": 16384, - "description": "Chat with Starcoder2 3b model", - "task": "text-generation" + "description": "Chat with Starcoder2 3b", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Starcoder2 3b", "id": "starcoder2-3b-int8-ov", - "fileSize": 3273295377, + "fileSize": 3273295448, "optimizationPrecision": "int8", "contextWindow": 16384, - "description": "Chat with Starcoder2 3b model", - "task": "text-generation" + "description": "Chat with Starcoder2 3b", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Phi 3 Mini 4k Instruct", "id": "Phi-3-mini-4k-instruct-fp16-ov", - "fileSize": 8209920736, + "fileSize": 8212081759, "optimizationPrecision": "fp16", "contextWindow": 4096, - "description": "Chat with Phi 3 Mini 4k Instruct model", - "task": "text-generation" + "description": "Chat with Phi 3 Mini 4k Instruct", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Phi 3 Mini 128k Instruct", "id": "Phi-3-mini-128k-instruct-fp16-ov", - "fileSize": 8210027955, + "fileSize": 8210027996, "optimizationPrecision": "fp16", "contextWindow": 131072, - "description": "Chat with Phi 3 Mini 128k Instruct model", - "task": "text-generation" + "description": "Chat with Phi 3 Mini 128k Instruct", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Phi 3 Mini 128k Instruct", "id": "Phi-3-mini-128k-instruct-int4-ov", - "fileSize": 2637434178, + "fileSize": 2637434260, "optimizationPrecision": "int4", "contextWindow": 131072, - "description": "Chat with Phi 3 Mini 128k Instruct model", - "task": "text-generation" + "description": "Chat with Phi 3 Mini 128k Instruct", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Phi 3 Mini 4k Instruct", "id": "Phi-3-mini-4k-instruct-int4-ov", - "fileSize": 2637358233, + "fileSize": 2317366276, "optimizationPrecision": "int4", "contextWindow": 4096, - "description": "Chat with Phi 3 Mini 4k Instruct model", - "task": "text-generation" + "description": "Chat with Phi 3 Mini 4k Instruct", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Phi 3 Mini 4k Instruct", "id": "Phi-3-mini-4k-instruct-int8-ov", - "fileSize": 4108269167, + "fileSize": 4110513544, "optimizationPrecision": "int8", "contextWindow": 4096, - "description": "Chat with Phi 3 Mini 4k Instruct model", - "task": "text-generation" + "description": "Chat with Phi 3 Mini 4k Instruct", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Phi 3 Mini 128k Instruct", "id": "Phi-3-mini-128k-instruct-int8-ov", - "fileSize": 4108345114, + "fileSize": 4108345195, "optimizationPrecision": "int8", "contextWindow": 131072, - "description": "Chat with Phi 3 Mini 128k Instruct model", - "task": "text-generation" + "description": "Chat with Phi 3 Mini 128k Instruct", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "Open_llama_3b_v2", "id": "open_llama_3b_v2-int4-ov", - "fileSize": 1960434251, + "fileSize": 1960434322, "optimizationPrecision": "int4", "contextWindow": 2048, - "description": "Chat with Open_llama_3b_v2 model", - "task": "text-generation" + "description": "Chat with Open_llama_3b_v2", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "RedPajama INCITE Instruct 3B V1", "id": "RedPajama-INCITE-Instruct-3B-v1-fp16-ov", - "fileSize": 5975908581, + "fileSize": 5975908643, "optimizationPrecision": "fp16", "contextWindow": 2048, - "description": "Chat with RedPajama INCITE Instruct 3B V1 model", - "task": "text-generation" + "description": "Chat with RedPajama INCITE Instruct 3B V1", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "RedPajama INCITE Instruct 3B V1", "id": "RedPajama-INCITE-Instruct-3B-v1-int4-ov", - "fileSize": 1970894247, + "fileSize": 1970894350, "optimizationPrecision": "int4", "contextWindow": 2048, - "description": "Chat with RedPajama INCITE Instruct 3B V1 model", - "task": "text-generation" + "description": "Chat with RedPajama INCITE Instruct 3B V1", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" }, { "name": "RedPajama INCITE Instruct 3B V1", "id": "RedPajama-INCITE-Instruct-3B-v1-int8-ov", - "fileSize": 3001571035, + "fileSize": 3001571127, "optimizationPrecision": "int8", "contextWindow": 2048, - "description": "Chat with RedPajama INCITE Instruct 3B V1 model", - "task": "text-generation" + "description": "Chat with RedPajama INCITE Instruct 3B V1", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" + }, + { + "name": "Gemma 2b It", + "id": "gemma-2b-it-fp16-ov", + "fileSize": 5437344390, + "optimizationPrecision": "fp16", + "contextWindow": 8192, + "description": "Chat with Gemma 2b It", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" + }, + { + "name": "Gemma 2b It", + "id": "gemma-2b-it-int8-ov", + "fileSize": 2753201206, + "optimizationPrecision": "int8", + "contextWindow": 8192, + "description": "Chat with Gemma 2b It", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" + }, + { + "name": "Gemma 2b", + "id": "gemma-2b-fp16-ov", + "fileSize": 5437342382, + "optimizationPrecision": "fp16", + "contextWindow": 8192, + "description": "Chat with Gemma 2b", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" + }, + { + "name": "Gemma 2b", + "id": "gemma-2b-int4-ov", + "fileSize": 1722552895, + "optimizationPrecision": "int4", + "contextWindow": 8192, + "description": "Chat with Gemma 2b", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" + }, + { + "name": "Gemma 2b It", + "id": "gemma-2b-it-int4-ov", + "fileSize": 1722555876, + "optimizationPrecision": "int4", + "contextWindow": 8192, + "description": "Chat with Gemma 2b It", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" + }, + { + "name": "Gemma 7b", + "id": "gemma-7b-fp16-ov", + "fileSize": 18419687858, + "optimizationPrecision": "fp16", + "contextWindow": 8192, + "description": "Chat with Gemma 7b", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" + }, + { + "name": "Gemma 7b", + "id": "gemma-7b-int4-ov", + "fileSize": 5223957293, + "optimizationPrecision": "int4", + "contextWindow": 8192, + "description": "Chat with Gemma 7b", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" + }, + { + "name": "Gemma 7b", + "id": "gemma-7b-int8-ov", + "fileSize": 9228794075, + "optimizationPrecision": "int8", + "contextWindow": 8192, + "description": "Chat with Gemma 7b", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" + }, + { + "name": "Gemma 7b It", + "id": "gemma-7b-it-int8-ov", + "fileSize": 9228795991, + "optimizationPrecision": "int8", + "contextWindow": 8192, + "description": "Chat with Gemma 7b It", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" + }, + { + "name": "Gemma 7b It", + "id": "gemma-7b-it-fp16-ov", + "fileSize": 18419689774, + "optimizationPrecision": "fp16", + "contextWindow": 8192, + "description": "Chat with Gemma 7b It", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" + }, + { + "name": "Gemma 7b It", + "id": "gemma-7b-it-int4-ov", + "fileSize": 5223959210, + "optimizationPrecision": "int4", + "contextWindow": 8192, + "description": "Chat with Gemma 7b It", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" + }, + { + "name": "Bloomz 1b1", + "id": "bloomz-1b1-fp16-ov", + "fileSize": 2322177211, + "optimizationPrecision": "fp16", + "contextWindow": 0, + "description": "Chat with Bloomz 1b1", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" + }, + { + "name": "Bloomz 1b1", + "id": "bloomz-1b1-int4-ov", + "fileSize": 812344015, + "optimizationPrecision": "int4", + "contextWindow": 0, + "description": "Chat with Bloomz 1b1", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" + }, + { + "name": "Bloomz 3b", + "id": "bloomz-3b-int8-ov", + "fileSize": 3267500581, + "optimizationPrecision": "int8", + "contextWindow": 0, + "description": "Chat with Bloomz 3b", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" + }, + { + "name": "Bloomz 1b1", + "id": "bloomz-1b1-int8-ov", + "fileSize": 1184136430, + "optimizationPrecision": "int8", + "contextWindow": 0, + "description": "Chat with Bloomz 1b1", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" + }, + { + "name": "Bloomz 3b", + "id": "bloomz-3b-int4-ov", + "fileSize": 2043570274, + "optimizationPrecision": "int4", + "contextWindow": 0, + "description": "Chat with Bloomz 3b", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" + }, + { + "name": "Bloomz 3b", + "id": "bloomz-3b-fp16-ov", + "fileSize": 6488568080, + "optimizationPrecision": "fp16", + "contextWindow": 0, + "description": "Chat with Bloomz 3b", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" + }, + { + "name": "Codegen 2B Multi", + "id": "codegen-2B-multi-int8-ov", + "fileSize": 2995146381, + "optimizationPrecision": "int8", + "contextWindow": 2048, + "description": "Chat with Codegen 2B Multi", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" + }, + { + "name": "Codegen 2B Multi", + "id": "codegen-2B-multi-fp16-ov", + "fileSize": 5980001146, + "optimizationPrecision": "fp16", + "contextWindow": 2048, + "description": "Chat with Codegen 2B Multi", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" + }, + { + "name": "Codegen 2B Multi", + "id": "codegen-2B-multi-int4-ov", + "fileSize": 1696055110, + "optimizationPrecision": "int4", + "contextWindow": 2048, + "description": "Chat with Codegen 2B Multi", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" + }, + { + "name": "Codegen 6B Multi", + "id": "codegen-6B-multi-int4-ov", + "fileSize": 4165776649, + "optimizationPrecision": "int4", + "contextWindow": 2048, + "description": "Chat with Codegen 6B Multi", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" + }, + { + "name": "Codegen 6B Multi", + "id": "codegen-6B-multi-fp16-ov", + "fileSize": 15149850695, + "optimizationPrecision": "fp16", + "contextWindow": 2048, + "description": "Chat with Codegen 6B Multi", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" + }, + { + "name": "Codegen 6B Multi", + "id": "codegen-6B-multi-int8-ov", + "fileSize": 7601603664, + "optimizationPrecision": "int8", + "contextWindow": 2048, + "description": "Chat with Codegen 6B Multi", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" + }, + { + "name": "Phi 3.5 Mini Instruct", + "id": "Phi-3.5-mini-instruct-fp16-ov", + "fileSize": 8211750487, + "optimizationPrecision": "fp16", + "contextWindow": 131072, + "description": "Chat with Phi 3.5 Mini Instruct", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" + }, + { + "name": "Phi 3.5 Mini Instruct", + "id": "Phi-3.5-mini-instruct-int8-ov", + "fileSize": 4110193740, + "optimizationPrecision": "int8", + "contextWindow": 131072, + "description": "Chat with Phi 3.5 Mini Instruct", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" + }, + { + "name": "Phi 3.5 Mini Instruct", + "id": "Phi-3.5-mini-instruct-int4-ov", + "fileSize": 2242029798, + "optimizationPrecision": "int4", + "contextWindow": 131072, + "description": "Chat with Phi 3.5 Mini Instruct", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" + }, + { + "name": "Gemma 2 9b It", + "id": "gemma-2-9b-it-int4-ov", + "fileSize": 5698219462, + "optimizationPrecision": "int4", + "contextWindow": 8192, + "description": "Chat with Gemma 2 9b It", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" + }, + { + "name": "Gemma 2 9b It", + "id": "gemma-2-9b-it-int8-ov", + "fileSize": 9992851184, + "optimizationPrecision": "int8", + "contextWindow": 8192, + "description": "Chat with Gemma 2 9b It", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" + }, + { + "name": "Gemma 2 9b It", + "id": "gemma-2-9b-it-fp16-ov", + "fileSize": 19924637310, + "optimizationPrecision": "fp16", + "contextWindow": 8192, + "description": "Chat with Gemma 2 9b It", + "task": "text-generation", + "author": "OpenVINO", + "collection": "llm-6687aaa2abca3bbcec71a9bd" } ] } diff --git a/design_graph.png b/design_graph.png deleted file mode 100644 index 33bed087..00000000 Binary files a/design_graph.png and /dev/null differ diff --git a/docs/geti_batch.gif b/docs/geti_batch.gif new file mode 100644 index 00000000..25311a0b Binary files /dev/null and b/docs/geti_batch.gif differ diff --git a/docs/geti_cv.gif b/docs/geti_cv.gif new file mode 100644 index 00000000..ca0e0fcd Binary files /dev/null and b/docs/geti_cv.gif differ diff --git a/docs/geti_download.gif b/docs/geti_download.gif new file mode 100644 index 00000000..baa26aa4 Binary files /dev/null and b/docs/geti_download.gif differ diff --git a/docs/geti_import.gif b/docs/geti_import.gif new file mode 100644 index 00000000..1d91dd20 Binary files /dev/null and b/docs/geti_import.gif differ diff --git a/docs/llm_import.gif b/docs/llm_import.gif new file mode 100644 index 00000000..aa47540f Binary files /dev/null and b/docs/llm_import.gif differ diff --git a/docs/llm_model_chat.gif b/docs/llm_model_chat.gif new file mode 100644 index 00000000..98d47226 Binary files /dev/null and b/docs/llm_model_chat.gif differ diff --git a/docs/metrics.gif b/docs/metrics.gif new file mode 100644 index 00000000..c62735bc Binary files /dev/null and b/docs/metrics.gif differ diff --git a/docs/readme.gif b/docs/readme.gif new file mode 100644 index 00000000..923f5133 Binary files /dev/null and b/docs/readme.gif differ diff --git a/docs/win_inst.gif b/docs/win_inst.gif new file mode 100644 index 00000000..1f926392 Binary files /dev/null and b/docs/win_inst.gif differ diff --git a/docs/win_inst2.gif b/docs/win_inst2.gif new file mode 100644 index 00000000..ecb4fe44 Binary files /dev/null and b/docs/win_inst2.gif differ diff --git a/integration_test/app_test.dart b/integration_test/app_test.dart index 2c6bd98f..01eb345b 100644 --- a/integration_test/app_test.dart +++ b/integration_test/app_test.dart @@ -1,4 +1,3 @@ -import 'package:flutter/foundation.dart'; import 'package:flutter_test/flutter_test.dart'; import 'package:inference/main.dart'; import 'package:integration_test/integration_test.dart'; @@ -7,28 +6,27 @@ void main() { IntegrationTestWidgetsFlutterBinding.ensureInitialized(); testWidgets('Download model from HF', (tester) async { - final originalOnError = FlutterError.onError!; - const app = App(); await tester.pumpWidget(app); - FlutterError.onError = originalOnError; - - await tester.tap(find.text('Import Model')); + await tester.tap(find.text('Import model')); await tester.pumpAndSettle(); - await tester.tap(find.text('Huggingface')); + await tester.tap(find.text('Hugging Face')); await tester.pumpAndSettle(); - await tester.tap(find.bySemanticsLabel('Search by name')); + final searchBarFinder = find.bySemanticsLabel('Find a model').first; + await tester.tap(searchBarFinder, warnIfMissed: false); await tester.pumpAndSettle(); - await tester.enterText(find.bySemanticsLabel('Search by name'), 'tiny'); + await tester.enterText(searchBarFinder, 'tiny'); await tester.pumpAndSettle(); - await tester.tap(find.text('TinyLlama-1.1B-Chat-v1.0-int4-ov')); + await tester.tap(find.text('TinyLlama 1.1B Chat V1.0').first); await tester.pumpAndSettle(); - await tester.tap(find.text('Add model')); + await tester.tap(find.text('Import selected model')); await tester.pumpFrames(app, const Duration(seconds: 1)); - expect(find.textContaining(RegExp(r'^[1-9]\d* MB$')), findsNWidgets(2)); + expect(find.textContaining(RegExp(r'^[1-9][\d,]* MB$')), findsNWidgets(2)); + + await tester.pumpAndSettle(); }); } \ No newline at end of file diff --git a/lib/importers/manifest_importer.dart b/lib/importers/manifest_importer.dart index 116f272c..7391c7d5 100644 --- a/lib/importers/manifest_importer.dart +++ b/lib/importers/manifest_importer.dart @@ -1,9 +1,14 @@ import 'dart:convert'; +import 'dart:io'; import 'package:flutter/services.dart'; import 'package:flutter/widgets.dart'; +import 'package:inference/project.dart'; +import 'package:inference/public_model_info.dart'; import 'package:inference/utils/get_public_thumbnail.dart'; import 'package:inference/utils.dart'; +import 'package:path_provider/path_provider.dart'; +import 'package:uuid/uuid.dart'; class Model { final String name; @@ -45,6 +50,48 @@ class Model { task: json['task'], ); } + + Future convertToProject() async { + final directory = await getApplicationSupportDirectory(); + final projectId = const Uuid().v4(); + final storagePath = platformContext.join(directory.path, projectId.toString()); + await Directory(storagePath).create(recursive: true); + final projectType = parseProjectType(task); + + final project = PublicProject( + projectId, + "OpenVINO/$id", + "1.0.0", + name, + DateTime.now().toIso8601String(), + projectType, + storagePath, + thumbnail, + PublicModelInfo( + id, + DateTime.now().toIso8601String(), + 0, + 0, + task, + const Collection("https://huggingface.co/api/collections/OpenVINO/llm-6687aaa2abca3bbcec71a9bd", "", "text"), + ), + ); + + project.tasks.add( + Task( + genUUID(), + task, + task, + [], + null, + [], + "", + "", + ), + ); + + return project; + } } class ManifestImporter { diff --git a/lib/inference/download_page.dart b/lib/inference/download_page.dart index fac683be..c228c99e 100644 --- a/lib/inference/download_page.dart +++ b/lib/inference/download_page.dart @@ -38,7 +38,7 @@ class _DownloadPageState extends State { final downloadProvider = Provider.of(context, listen: false); final projectProvider = Provider.of(context, listen: false); - final files = await downloadFiles(widget.project); + final files = await listDownloadFiles(widget.project); try { await downloadProvider.queue(files, widget.project.modelInfo?.collection.token); diff --git a/lib/inference/inference_page.dart b/lib/inference/inference_page.dart index 9bd08cbb..6e13ac09 100644 --- a/lib/inference/inference_page.dart +++ b/lib/inference/inference_page.dart @@ -2,6 +2,7 @@ import 'package:flutter/material.dart'; import 'package:inference/inference/download_page.dart'; import 'package:inference/inference/image_inference_page.dart'; import 'package:inference/inference/text_inference_page.dart'; +import 'package:inference/inference/text_to_image_inference_page.dart'; import 'package:inference/project.dart'; import 'package:inference/providers/download_provider.dart'; import 'package:provider/provider.dart'; @@ -23,12 +24,14 @@ class _InferencePageState extends State { return ImageInferencePage(widget.project); case ProjectType.text: return TextInferencePage(widget.project); + case ProjectType.textToImage: + return TextToImageInferencePage(widget.project); case ProjectType.speech: return Container(); } } else { return ChangeNotifierProvider( - create: (_) => DownloadProvider(widget.project), + create: (_) => DownloadProvider(), child: DownloadPage(widget.project as PublicProject, onDone: () => setState(() {}), //trigger rerender. ) diff --git a/lib/inference/textToImage/tti_metric_widgets.dart b/lib/inference/textToImage/tti_metric_widgets.dart new file mode 100644 index 00000000..c7cc4e6f --- /dev/null +++ b/lib/inference/textToImage/tti_metric_widgets.dart @@ -0,0 +1,33 @@ +import 'package:flutter/material.dart'; +import 'package:inference/inference/text/metric_widgets.dart'; +import 'package:inference/interop/openvino_bindings.dart'; +import 'package:intl/intl.dart'; + +class TTICirclePropRow extends StatelessWidget { + final TTIMetrics metrics; + + const TTICirclePropRow({super.key, required this.metrics}); + + @override + Widget build(BuildContext context) { + Locale locale = Localizations.localeOf(context); + final nf = NumberFormat.decimalPatternDigits( + locale: locale.languageCode, decimalDigits: 0); + + return Row( + mainAxisAlignment: MainAxisAlignment.spaceBetween, + children: [ + CircleProp( + header: "Time to load model", + value: nf.format(metrics.load_time), + unit: "ms", + ), + CircleProp( + header: "Time to generate image", + value: nf.format(metrics.generate_time), + unit: "ms", + ) + ], + ); + } +} diff --git a/lib/inference/textToImage/tti_performance_metrics.dart b/lib/inference/textToImage/tti_performance_metrics.dart new file mode 100644 index 00000000..7279701c --- /dev/null +++ b/lib/inference/textToImage/tti_performance_metrics.dart @@ -0,0 +1,71 @@ +import 'package:flutter/material.dart'; +import 'package:inference/inference/text/metric_widgets.dart'; +import 'package:inference/inference/textToImage/tti_metric_widgets.dart'; +import 'package:inference/providers/text_to_image_inference_provider.dart'; +import 'package:inference/theme.dart'; +import 'package:intl/intl.dart'; +import 'package:provider/provider.dart'; + +class TTIPerformanceMetricsPage extends StatefulWidget { + const TTIPerformanceMetricsPage({super.key}); + + @override + State createState() => _TTIPerformanceMetricsPageState(); +} + +class _TTIPerformanceMetricsPageState extends State { + + @override + void initState() { + super.initState(); + final provider = Provider.of(context, listen: false); + if (provider.metrics == null) { + provider.loaded.future.then((_) { + provider.message("Generate OpenVINO logo"); + }); + } + } + + @override + Widget build(BuildContext context) { + return Consumer(builder: (context, inference, child) { + if (inference.metrics == null) { + return Center( + child: Column( + mainAxisAlignment: MainAxisAlignment.center, + children: [ + Image.asset('images/intel-loading.gif', width: 100), + const Text("Running benchmark prompt...") + ], + ) + ); + } + + Locale locale = Localizations.localeOf(context); + final nf = NumberFormat.decimalPatternDigits( + locale: locale.languageCode, decimalDigits: 0); + + final metrics = inference.metrics!; + + return Container( + decoration: BoxDecoration( + shape: BoxShape.rectangle, + borderRadius: const BorderRadius.all(Radius.circular(8)), + color: intelGray, + ), + child: Padding( + padding: const EdgeInsets.all(30.0), + child: Column( + mainAxisAlignment: MainAxisAlignment.start, + crossAxisAlignment: CrossAxisAlignment.stretch, + children: [ + TTICirclePropRow(metrics: metrics), + ], + ), + ), + ); + }); + } +} + + diff --git a/lib/inference/textToImage/tti_playground.dart b/lib/inference/textToImage/tti_playground.dart new file mode 100644 index 00000000..3a468f8c --- /dev/null +++ b/lib/inference/textToImage/tti_playground.dart @@ -0,0 +1,463 @@ + +import 'package:flutter/material.dart'; +import 'package:flutter_svg/svg.dart'; +import 'package:inference/config.dart'; +import 'package:inference/hint.dart'; +import 'package:inference/inference/device_selector.dart'; +import 'package:inference/inference/textToImage/tti_metric_widgets.dart'; +import 'package:inference/interop/openvino_bindings.dart'; +import 'package:inference/providers/text_to_image_inference_provider.dart'; +import 'package:inference/theme.dart'; +import 'package:provider/provider.dart'; +import 'package:super_clipboard/super_clipboard.dart'; + +class TTIPlayground extends StatefulWidget { + const TTIPlayground({super.key}); + + @override + State createState() => _PlaygroundState(); +} + +class _PlaygroundState extends State { + final _controller = TextEditingController(); + final _scrollController = ScrollController(); + bool attachedToBottom = true; + + void jumpToBottom({ offset = 0 }) { + if (_scrollController.hasClients) { + _scrollController.jumpTo(_scrollController.position.maxScrollExtent + offset); + } + } + + void message(String message) async { + if (message.isEmpty) { + return; + } + final tti = provider(); + if (!tti.initialized) { + return; + } + + if (tti.response != null) { + return; + } + _controller.text = ""; + jumpToBottom(offset: 110); //move to bottom including both + tti.message(message); + } + + TextToImageInferenceProvider provider() => Provider.of(context, listen: false); + + @override + void initState() { + super.initState(); + _scrollController.addListener(() { + setState(() { + attachedToBottom = _scrollController.position.pixels + 0.001 >= _scrollController.position.maxScrollExtent; + }); + + }); + } + + @override + void dispose() { + super.dispose(); + _controller.dispose(); + _scrollController.dispose(); + } + + + @override + Widget build(BuildContext context) { + return Consumer(builder: (context, inference, child) { + WidgetsBinding.instance.addPostFrameCallback((_) { + if (attachedToBottom) { + jumpToBottom(); + } + }); + + return Column( + crossAxisAlignment: CrossAxisAlignment.start, + children: [ + const Padding( + padding: EdgeInsets.only(left: 8), + child: Row( + mainAxisAlignment: MainAxisAlignment.spaceBetween, + children: [ + DeviceSelector(), + Hint(hint: HintsEnum.intelCoreLLMPerformanceSuggestion), + ] + ), + ), + Builder( + builder: (context) { + if (!inference.initialized){ + return Expanded( + child: Center( + child: Column( + mainAxisAlignment: MainAxisAlignment.center, + children: [ + Image.asset('images/intel-loading.gif', width: 100), + const Text("Loading model...") + ], + ) + ), + ); + } + return Expanded( + child: Container( + decoration: const BoxDecoration( + shape: BoxShape.rectangle, + borderRadius: BorderRadius.all(Radius.circular(8)), + color: intelGray, + ), + child: Column( + children: [ + Expanded( + child: Builder(builder: (context) { + if (inference.messages.isEmpty) { + return Center( + child: Text("Type a message to ${inference.project?.name ?? "assistant"}")); + } + return Stack( + alignment: Alignment.bottomCenter, + children: [ + SingleChildScrollView( + controller: _scrollController, + child: Padding( + padding: const EdgeInsets.all(20), + child: Column( + //mainAxisAlignment: MainAxisAlignment.start, + crossAxisAlignment: CrossAxisAlignment.stretch, + children: inference.messages.map((message) { + switch (message.speaker) { + case Speaker.user: + return UserInputMessage(message); + case Speaker.assistant: + return GeneratedImageMessage(message, inference.project!.name); + } + }).toList()), + ), + ), + Positioned( + bottom: 10, + child: Builder( + builder: (context) { + if (attachedToBottom) { + return Container(); + } + return Center( + child: Padding( + padding: const EdgeInsets.only(top: 2.0), + child: SizedBox( + width: 200, + height: 20, + child: FloatingActionButton( + backgroundColor: intelGray, + child: const Text("Jump to bottom"), + onPressed: () { + jumpToBottom(); + setState(() { + attachedToBottom = true; + }); + } + ), + ), + ), + ); + } + ), + ), + + ], + ); + }), + ), + + // SizedBox( + // height: 30, + // child: Builder( + // builder: (context) { + // if (inference.interimResponse == null){ + // return Container(); + // } + // return Center( + // child: OutlinedButton.icon( + // onPressed: () => inference.forceStop(), + // icon: const Icon(Icons.stop), + // label: const Text("Stop responding") + // ), + // ); + // } + // ), + // ), + Padding( + padding: const EdgeInsets.only(left: 45, right: 45, top: 10, bottom: 25), + child: SizedBox( + height: 40, + child: Row( + crossAxisAlignment: CrossAxisAlignment.end, + children: [ + Padding( + padding: const EdgeInsets.only(right: 8), + child: IconButton( + icon: SvgPicture.asset("images/clear.svg", + colorFilter: const ColorFilter.mode(textColor, BlendMode.srcIn), + width: 20, + ), + tooltip: "Clear chat", + onPressed: () => inference.reset(), + style: IconButton.styleFrom( + backgroundColor: intelGrayReallyDark, + shape: const RoundedRectangleBorder( + borderRadius: BorderRadius.all(Radius.circular(4)), + side: BorderSide( + color: intelGrayLight, + width: 2, + ) + ) + ) + ), + ), + Expanded( + child: TextField( + maxLines: null, + keyboardType: TextInputType.text, + decoration: InputDecoration( + hintText: "Ask me anything...", + suffixIcon: IconButton( + icon: Icon(Icons.send, color: (inference.interimResponse == null ? Colors.white : intelGray)), + onPressed: () => message(_controller.text), + ), + enabledBorder: const OutlineInputBorder( + borderRadius: BorderRadius.all(Radius.circular(4)), + borderSide: BorderSide( + color: intelGrayLight, + width: 2, + ) + ), + ), + style: const TextStyle( + fontSize: 14, + ), + controller: _controller, + onSubmitted: message, + ), + ), + ], + ), + ), + ), + ], + ), + ), + ); + } + ), + ], + ); + }); + } +} + +class UserInputMessage extends StatelessWidget { + final Message message; + const UserInputMessage(this.message, {super.key}); + + @override + Widget build(BuildContext context) { + return Padding( + padding: const EdgeInsets.only(bottom: 20), + child: Column( + crossAxisAlignment: CrossAxisAlignment.start, + children: [ + NameRowWidget(name: "You", icon: SvgPicture.asset("images/user.svg", + colorFilter: const ColorFilter.mode(textColor, BlendMode.srcIn), + width: 20, + ), + ), + MessageWidget(message: message.message), + ], + ), + ); + } +} + +class GeneratedImageMessage extends StatelessWidget { + final Message message; + final String name; + const GeneratedImageMessage(this.message, this.name, {super.key}); + + @override + Widget build(BuildContext context) { + return Padding( + padding: const EdgeInsets.only(bottom: 20), + child: Column( + crossAxisAlignment: CrossAxisAlignment.start, + children: [ + NameRowWidget( + name: name, + icon: SvgPicture.asset("images/network.svg", + colorFilter: const ColorFilter.mode(textColor, BlendMode.srcIn), + width: 20, + ), + ), + ImageWidget(message: message.message, image: Image.memory(message.imageContent!.imageData, width: message.imageContent!.width.toDouble(), height: message.imageContent!.height.toDouble(), fit: message.imageContent!.boxFit)), + Padding( + padding: const EdgeInsets.only(left: 28, top: 5), + child: Builder( + builder: (context) { + if (message.speaker == Speaker.user) { + return Container(); + } + return Row( + children: [ + Opacity( + opacity: message.allowedCopy ? 1.0 : 0.25, + child: + IconButton.filled( + icon: SvgPicture.asset("images/copy.svg", + colorFilter: const ColorFilter.mode(textColor, BlendMode.srcIn), + width: 20, + ), + style: IconButton.styleFrom( + backgroundColor: intelGrayLight, + shape: const RoundedRectangleBorder( + borderRadius: BorderRadius.all(Radius.circular(4)), + ), + ), + padding: const EdgeInsets.all(4), + constraints: const BoxConstraints(), + tooltip: message.allowedCopy ? "Copy to clipboard" : null, + onPressed: message.imageContent?.imageData == null || message.allowedCopy == false ? null : () { + + final clipboard = SystemClipboard.instance; + if (clipboard == null) { + return; // Clipboard API is not supported on this platform. + } + final item = DataWriterItem(); + item.add(Formats.jpeg(message.imageContent!.imageData)); + clipboard.write([item]); + + }, + ) + ), + Padding( + padding: const EdgeInsets.only(left: 8), + child: IconButton( + style: IconButton.styleFrom( + backgroundColor: intelGrayLight, + shape: const RoundedRectangleBorder( + borderRadius: BorderRadius.all(Radius.circular(4)), + ), + ), + padding: const EdgeInsets.all(4), + constraints: const BoxConstraints(), + icon: SvgPicture.asset("images/stats.svg", + colorFilter: const ColorFilter.mode(textColor, BlendMode.srcIn), + width: 20, + ), + tooltip: "Show stats", + onPressed: () { + showMetricsDialog(context, message.metrics!); + }, + ), + ), + ], + ); + } + ), + ), + ], + ), + ); + } +} + +void showMetricsDialog(BuildContext context, TTIMetrics metrics) { + showDialog( + context: context, + builder: (BuildContext context) { + return AlertDialog( + content: TTICirclePropRow( + metrics: metrics + ) + ); + } + ); +} + +class NameRowWidget extends StatelessWidget { + final String name; + final Widget icon; + const NameRowWidget({super.key, required this.name, required this.icon}); + + @override + Widget build(BuildContext context) { + return Row( + children: [ + Container( + padding: const EdgeInsets.all(2), + decoration: BoxDecoration( + borderRadius: BorderRadius.circular(4.0), + color: intelBlueVibrant, + //color: intelGrayLight, + ), + child: icon + ), + Padding( + padding: const EdgeInsets.only(left: 10.0), + child: Text(name), + ) + ] + ); + } +} + +class MessageWidget extends StatelessWidget { + final String message; + const MessageWidget({super.key, required this.message}); + + @override + Widget build(BuildContext context) { + return Padding( + padding: const EdgeInsets.only(left: 34.0, top: 10, right: 26), + child: SelectableText( + message, + style: const TextStyle( + color: textColor, + fontSize: 12, + ), + ), + ); + } + +} + +class ImageWidget extends StatelessWidget { + final String message; + final Image? image; + const ImageWidget({super.key, required this.message, required this.image}); + + @override + Widget build(BuildContext context) { + return Padding( + padding: const EdgeInsets.only(left: 34.0, top: 10, right: 26), + child: Column( + crossAxisAlignment: CrossAxisAlignment.start, + children: [ + // Image widget goes here + image ?? Container(), + const SizedBox(height: 8), // Add some spacing between image and text + SelectableText( + message, + style: const TextStyle( + color: textColor, + fontSize: 12, + ), + ), + ], + ), + ); + } +} diff --git a/lib/inference/text_to_image_inference_page.dart b/lib/inference/text_to_image_inference_page.dart new file mode 100644 index 00000000..509725db --- /dev/null +++ b/lib/inference/text_to_image_inference_page.dart @@ -0,0 +1,187 @@ +import 'package:flutter/material.dart'; +import 'package:inference/header.dart'; +import 'package:inference/inference/model_info.dart'; +import 'package:inference/inference/textToImage/tti_performance_metrics.dart'; +import 'package:inference/inference/textToImage/tti_playground.dart'; +import 'package:inference/project.dart'; +import 'package:inference/providers/preference_provider.dart'; +import 'package:inference/providers/text_to_image_inference_provider.dart'; +import 'package:intl/intl.dart'; +import 'package:provider/provider.dart'; + +class TextToImageInferencePage extends StatefulWidget { + final Project project; + const TextToImageInferencePage(this.project, {super.key}); + + @override + State createState() => _TextToImageInferencePageState(); +} + +class _TextToImageInferencePageState extends State with TickerProviderStateMixin { + + late TabController _tabController; + + @override + void initState() { + super.initState(); + _tabController = TabController(length: 2, animationDuration: Duration.zero, vsync: this); + } + + @override + void dispose() { + _tabController.dispose(); + super.dispose(); + } + + @override + Widget build(BuildContext context) { + Locale locale = Localizations.localeOf(context); + + return ChangeNotifierProxyProvider( + create: (_) { + return TextToImageInferenceProvider(widget.project, null); + }, + update: (_, preferences, textToImageInferenceProvider) { + if (textToImageInferenceProvider == null) { + return TextToImageInferenceProvider(widget.project, preferences.device); + } + if (!textToImageInferenceProvider.sameProps(widget.project, preferences.device)) { + return TextToImageInferenceProvider(widget.project, preferences.device); + } + return textToImageInferenceProvider; + }, + child: Scaffold( + appBar: const Header(true), + body: Padding( + padding: const EdgeInsets.only(left: 58, right: 58, bottom: 30), + child: Row( + crossAxisAlignment: CrossAxisAlignment.start, + children: [ + Consumer( + builder: (context, inference, child) { + final nf = NumberFormat.decimalPatternDigits( + locale: locale.languageCode, decimalDigits: 2); + + return SizedBox( + width: 250, + child: ModelInfo( + widget.project, + children: [ + PropertyItem( + name: "Task", + child: PropertyValue(inference.task), + ), + Padding( + padding: const EdgeInsets.only(left: 12, top: 12, right: 20.0), + child: Column( + children: [ + Row( + mainAxisAlignment: MainAxisAlignment.spaceBetween, + children: [ + const Text("Width"), + Text(nf.format(inference.width)) + ] + ), + Slider( + value: inference.width.toDouble(), + max: 1024.0, + min: 64, + divisions: (1024-64)~/64, + onChanged: (double value) { + inference.width = value.toInt(); + }, + + ), + ], + ), + ), + Padding( + padding: const EdgeInsets.only(left: 12, top: 12, right: 20.0), + child: Column( + children: [ + Row( + mainAxisAlignment: MainAxisAlignment.spaceBetween, + children: [ + const Text("Height"), + Text(nf.format(inference.height)) + ] + ), + Slider( + value: inference.height.toDouble(), + max: 1024.0, + min: 64, + divisions: (1024-64)~/64, + onChanged: (double value) { + inference.height = value.toInt(); + }, + + ), + ], + ), + ), + Padding( + padding: const EdgeInsets.only(left: 12, top: 12, right: 20.0), + child: Column( + children: [ + Row( + mainAxisAlignment: MainAxisAlignment.spaceBetween, + children: [ + const Text("Rounds"), + Text(nf.format(inference.rounds)) + ] + ), + Slider( + value: inference.rounds.toDouble(), + max: 80, + min: 1, + divisions: (80-1)~/1, + onChanged: (double value) { + inference.rounds = value.toInt(); + }, + + ), + ], + ), + ), + ] + ), + ); + }), + Expanded( + child: Column( + crossAxisAlignment: CrossAxisAlignment.start, + children: [ + TabBar( + isScrollable: true, + tabAlignment: TabAlignment.start, + controller: _tabController, + tabs: const [ + Tab(text: "Playground"), + Tab(text: "Performance metrics"), + //Tab(text: "Deploy"), + ] + ), + Expanded( + child: Padding( + padding: const EdgeInsets.only(top: 15.0), + child: TabBarView( + controller: _tabController, + children: const [ + TTIPlayground(), + TTIPerformanceMetricsPage(), + //Container(), + ] + ), + ) + ), + ], + ), + ) + ], + ), + ), + ), + ); + } +} + diff --git a/lib/interop/generated_bindings.dart b/lib/interop/generated_bindings.dart index f37fd89b..08f9547d 100644 --- a/lib/interop/generated_bindings.dart +++ b/lib/interop/generated_bindings.dart @@ -92,6 +92,37 @@ class OpenVINO { late final _freeStatusOrSpeechToText = _freeStatusOrSpeechToTextPtr .asFunction)>(); + void freeStatusOrModelResponse( + ffi.Pointer status, + ) { + return _freeStatusOrModelResponse( + status, + ); + } + + late final _freeStatusOrModelResponsePtr = _lookup< + ffi.NativeFunction< + ffi.Void Function(ffi.Pointer)>>( + 'freeStatusOrModelResponse'); + late final _freeStatusOrModelResponse = _freeStatusOrModelResponsePtr + .asFunction)>(); + + void freeStatusOrWhisperModelResponse( + ffi.Pointer status, + ) { + return _freeStatusOrWhisperModelResponse( + status, + ); + } + + late final _freeStatusOrWhisperModelResponsePtr = _lookup< + ffi.NativeFunction< + ffi.Void Function(ffi.Pointer)>>( + 'freeStatusOrWhisperModelResponse'); + late final _freeStatusOrWhisperModelResponse = + _freeStatusOrWhisperModelResponsePtr.asFunction< + void Function(ffi.Pointer)>(); + void freeStatusOrDevices( ffi.Pointer status, ) { @@ -465,6 +496,81 @@ class OpenVINO { late final _llmInferenceClose = _llmInferenceClosePtr .asFunction Function(CLLMInference)>(); + ffi.Pointer ttiInferenceOpen( + ffi.Pointer model_path, + ffi.Pointer device, + ) { + return _ttiInferenceOpen( + model_path, + device, + ); + } + + late final _ttiInferenceOpenPtr = _lookup< + ffi.NativeFunction< + ffi.Pointer Function(ffi.Pointer, + ffi.Pointer)>>('ttiInferenceOpen'); + late final _ttiInferenceOpen = _ttiInferenceOpenPtr.asFunction< + ffi.Pointer Function( + ffi.Pointer, ffi.Pointer)>(); + + ffi.Pointer ttiInferencePrompt( + CTTIInference instance, + ffi.Pointer message, + int width, + int height, + int rounds, + ) { + return _ttiInferencePrompt( + instance, + message, + width, + height, + rounds, + ); + } + + late final _ttiInferencePromptPtr = _lookup< + ffi.NativeFunction< + ffi.Pointer Function( + CTTIInference, + ffi.Pointer, + ffi.Int, + ffi.Int, + ffi.Int)>>('ttiInferencePrompt'); + late final _ttiInferencePrompt = _ttiInferencePromptPtr.asFunction< + ffi.Pointer Function( + CTTIInference, ffi.Pointer, int, int, int)>(); + + ffi.Pointer ttiInferenceHasModelIndex( + CTTIInference instance, + ) { + return _ttiInferenceHasModelIndex( + instance, + ); + } + + late final _ttiInferenceHasModelIndexPtr = _lookup< + ffi + .NativeFunction Function(CTTIInference)>>( + 'ttiInferenceHasModelIndex'); + late final _ttiInferenceHasModelIndex = _ttiInferenceHasModelIndexPtr + .asFunction Function(CTTIInference)>(); + + ffi.Pointer ttiInferenceClose( + CLLMInference instance, + ) { + return _ttiInferenceClose( + instance, + ); + } + + late final _ttiInferenceClosePtr = + _lookup Function(CLLMInference)>>( + 'ttiInferenceClose'); + late final _ttiInferenceClose = _ttiInferenceClosePtr + .asFunction Function(CLLMInference)>(); + ffi.Pointer graphRunnerOpen( ffi.Pointer graph, ) { @@ -569,6 +675,80 @@ class OpenVINO { late final _graphRunnerStop = _graphRunnerStopPtr .asFunction Function(CGraphRunner)>(); + ffi.Pointer speechToTextOpen( + ffi.Pointer model_path, + ffi.Pointer device, + ) { + return _speechToTextOpen( + model_path, + device, + ); + } + + late final _speechToTextOpenPtr = _lookup< + ffi.NativeFunction< + ffi.Pointer Function(ffi.Pointer, + ffi.Pointer)>>('speechToTextOpen'); + late final _speechToTextOpen = _speechToTextOpenPtr.asFunction< + ffi.Pointer Function( + ffi.Pointer, ffi.Pointer)>(); + + ffi.Pointer speechToTextLoadVideo( + CSpeechToText instance, + ffi.Pointer video_path, + ) { + return _speechToTextLoadVideo( + instance, + video_path, + ); + } + + late final _speechToTextLoadVideoPtr = _lookup< + ffi.NativeFunction< + ffi.Pointer Function(CSpeechToText, + ffi.Pointer)>>('speechToTextLoadVideo'); + late final _speechToTextLoadVideo = _speechToTextLoadVideoPtr.asFunction< + ffi.Pointer Function(CSpeechToText, ffi.Pointer)>(); + + ffi.Pointer speechToTextVideoDuration( + CSpeechToText instance, + ) { + return _speechToTextVideoDuration( + instance, + ); + } + + late final _speechToTextVideoDurationPtr = _lookup< + ffi.NativeFunction Function(CSpeechToText)>>( + 'speechToTextVideoDuration'); + late final _speechToTextVideoDuration = _speechToTextVideoDurationPtr + .asFunction Function(CSpeechToText)>(); + + ffi.Pointer speechToTextTranscribe( + CSpeechToText instance, + int start, + int duration, + ffi.Pointer language, + ) { + return _speechToTextTranscribe( + instance, + start, + duration, + language, + ); + } + + late final _speechToTextTranscribePtr = _lookup< + ffi.NativeFunction< + ffi.Pointer Function( + CSpeechToText, + ffi.Int, + ffi.Int, + ffi.Pointer)>>('speechToTextTranscribe'); + late final _speechToTextTranscribe = _speechToTextTranscribePtr.asFunction< + ffi.Pointer Function( + CSpeechToText, int, int, ffi.Pointer)>(); + ffi.Pointer getAvailableDevices() { return _getAvailableDevices(); } @@ -667,12 +847,36 @@ final class Metrics extends ffi.Struct { external int number_of_input_tokens; } +final class TTIMetrics extends ffi.Struct { + @ffi.Float() + external double load_time; + + @ffi.Float() + external double generate_time; +} + +final class StringWithMetrics extends ffi.Struct { + external ffi.Pointer string; + + external TTIMetrics metrics; +} + final class Device extends ffi.Struct { external ffi.Pointer id; external ffi.Pointer name; } +final class TranscriptionChunk extends ffi.Struct { + @ffi.Float() + external double start_ts; + + @ffi.Float() + external double end_ts; + + external ffi.Pointer text; +} + final class Status extends ffi.Struct { @ffi.Int() external int status; @@ -753,6 +957,15 @@ final class StatusOrLLMInference extends ffi.Struct { typedef CLLMInference = ffi.Pointer; +final class StatusOrTTIInference extends ffi.Struct { + @ffi.Int() + external int status; + + external ffi.Pointer message; + + external CLLMInference value; +} + final class StatusOrModelResponse extends ffi.Struct { @ffi.Int() external int status; @@ -764,6 +977,33 @@ final class StatusOrModelResponse extends ffi.Struct { external ffi.Pointer value; } +final class StatusOrWhisperModelResponse extends ffi.Struct { + @ffi.Int() + external int status; + + external ffi.Pointer message; + + external Metrics metrics; + + external ffi.Pointer value; + + @ffi.Int() + external int size; + + external ffi.Pointer text; +} + +final class StatusOrTTIModelResponse extends ffi.Struct { + @ffi.Int() + external int status; + + external ffi.Pointer message; + + external TTIMetrics metrics; + + external ffi.Pointer value; +} + final class StatusOrDevices extends ffi.Struct { @ffi.Int() external int status; @@ -788,3 +1028,4 @@ typedef LLMInferenceCallbackFunctionFunction = ffi.Void Function( ffi.Pointer); typedef DartLLMInferenceCallbackFunctionFunction = void Function( ffi.Pointer); +typedef CTTIInference = ffi.Pointer; diff --git a/lib/interop/openvino_bindings.dart b/lib/interop/openvino_bindings.dart index e11cc935..d27cd8b7 100644 --- a/lib/interop/openvino_bindings.dart +++ b/lib/interop/openvino_bindings.dart @@ -18,6 +18,20 @@ class SerializationOutput { } +class Chunk { + final double start; + final double end; + final String text; + const Chunk(this.start, this.end, this.text); +} + +class TranscriptionModelResponse { + final List chunks; + final Metrics metrics; + final String text; + const TranscriptionModelResponse(this.chunks, this.metrics, this.text); +} + class ModelResponse { final String content; final Metrics metrics; @@ -25,6 +39,14 @@ class ModelResponse { const ModelResponse(this.content, this.metrics); } +class TTIModelResponse { + final String content; + final TTIMetrics metrics; + + const TTIModelResponse(this.content, this.metrics); +} + + String getLibraryPath() { if (Platform.isWindows) { return "windows_bindings.dll"; diff --git a/lib/interop/speech_to_text.dart b/lib/interop/speech_to_text.dart index c8635ae1..4f57cb2d 100644 --- a/lib/interop/speech_to_text.dart +++ b/lib/interop/speech_to_text.dart @@ -9,72 +9,79 @@ final ov = getBindings(); class SpeechToText { final Pointer instance; - - SpeechToText(this.instance); static Future init(String modelPath, String device) async { - throw UnimplementedError(); - //final result = await Isolate.run(() { - // final modelPathPtr = modelPath.toNativeUtf8(); - // final devicePtr = device.toNativeUtf8(); - // final status = ov.speechToTextOpen(modelPathPtr, devicePtr); - // calloc.free(modelPathPtr); - // calloc.free(devicePtr); - - // return status; - //}); - - //print("${result.ref.status}, ${result.ref.message}"); - //if (StatusEnum.fromValue(result.ref.status) != StatusEnum.OkStatus) { - // throw "SpeechToText open error: ${result.ref.status} ${result.ref.message.toDartString()}"; - //} - - //return SpeechToText(result); + final result = await Isolate.run(() { + final modelPathPtr = modelPath.toNativeUtf8(); + final devicePtr = device.toNativeUtf8(); + final status = ov.speechToTextOpen(modelPathPtr, devicePtr); + calloc.free(modelPathPtr); + calloc.free(devicePtr); + + return status; + }); + + print("${result.ref.status}, ${result.ref.message}"); + if (StatusEnum.fromValue(result.ref.status) != StatusEnum.OkStatus) { + throw "SpeechToText open error: ${result.ref.status} ${result.ref.message.toDartString()}"; + } + + return SpeechToText(result); } Future loadVideo(String videoPath) async{ - throw UnimplementedError(); - //int instanceAddress = instance.ref.value.address; - //{ - // final result = await Isolate.run(() { - // final videoPathPtr = videoPath.toNativeUtf8(); - // final status = ov.speechToTextLoadVideo(Pointer.fromAddress(instanceAddress), videoPathPtr); - // calloc.free(videoPathPtr); - // return status; - // }); - - // if (StatusEnum.fromValue(result.ref.status) != StatusEnum.OkStatus) { - // throw "SpeechToText LoadVideo error: ${result.ref.status} ${result.ref.message.toDartString()}"; - // } - //} - - //{ - // final result = await Isolate.run(() { - // final status = ov.speechToTextVideoDuration(Pointer.fromAddress(instanceAddress)); - // return status; - // }); - // if (StatusEnum.fromValue(result.ref.status) != StatusEnum.OkStatus) { - // throw "SpeechToText VideoDuration error: ${result.ref.status} ${result.ref.message.toDartString()}"; - // } - // return result.ref.value; - //} + int instanceAddress = instance.ref.value.address; + { + final result = await Isolate.run(() { + final videoPathPtr = videoPath.toNativeUtf8(); + final status = ov.speechToTextLoadVideo(Pointer.fromAddress(instanceAddress), videoPathPtr); + calloc.free(videoPathPtr); + return status; + }); + + if (StatusEnum.fromValue(result.ref.status) != StatusEnum.OkStatus) { + throw "SpeechToText LoadVideo error: ${result.ref.status} ${result.ref.message.toDartString()}"; + } + } + + { + final result = await Isolate.run(() { + final status = ov.speechToTextVideoDuration(Pointer.fromAddress(instanceAddress)); + return status; + }); + if (StatusEnum.fromValue(result.ref.status) != StatusEnum.OkStatus) { + throw "SpeechToText VideoDuration error: ${result.ref.status} ${result.ref.message.toDartString()}"; + } + return result.ref.value; + } } - Future transcribe(int start, int duration, String language) async{ - throw UnimplementedError(); - //int instanceAddress = instance.ref.value.address; - //final result = await Isolate.run(() { - // final languagePtr = language.toNativeUtf8(); - // final status = ov.speechToTextTranscribe(Pointer.fromAddress(instanceAddress), start, duration, languagePtr); - // calloc.free(languagePtr); - // return status; - //}); - - //if (StatusEnum.fromValue(result.ref.status) != StatusEnum.OkStatus) { - // throw "SpeechToText LoadVideo error: ${result.ref.status} ${result.ref.message.toDartString()}"; - //} - - //return result.ref.value.toDartString(); + Future transcribe(int start, int duration, String language) async{ + int instanceAddress = instance.ref.value.address; + final result = await Isolate.run(() { + final languagePtr = language.toNativeUtf8(); + final status = ov.speechToTextTranscribe(Pointer.fromAddress(instanceAddress), start, duration, languagePtr); + calloc.free(languagePtr); + return status; + }); + + if (StatusEnum.fromValue(result.ref.status) != StatusEnum.OkStatus) { + throw "SpeechToText LoadVideo error: ${result.ref.status} ${result.ref.message.toDartString()}"; + } + + List chunks = []; + for (int i = 0; i < result.ref.size; i++) { + chunks.add(Chunk( + result.ref.value[i].start_ts, + result.ref.value[i].end_ts, + result.ref.value[i].text.toDartString() + )); + } + final metrics = result.ref.metrics; + final text = result.ref.text.toDartString(); + ov.freeStatusOrWhisperModelResponse(result); + + return TranscriptionModelResponse(chunks, metrics, text); } } diff --git a/lib/interop/tti_inference.dart b/lib/interop/tti_inference.dart new file mode 100644 index 00000000..fbbbc669 --- /dev/null +++ b/lib/interop/tti_inference.dart @@ -0,0 +1,77 @@ +import 'dart:ffi'; +import 'dart:isolate'; + +import 'package:ffi/ffi.dart'; +import 'package:inference/interop/openvino_bindings.dart'; + +final tti_ov = getBindings(); + +class TTIInference { + final Pointer instance; + late bool chatEnabled; + + TTIInference(this.instance) { + chatEnabled = hasModelIndex(); + } + + static Future init(String modelPath, String device) async { + final result = await Isolate.run(() { + final modelPathPtr = modelPath.toNativeUtf8(); + final devicePtr = device.toNativeUtf8(); + final status = tti_ov.ttiInferenceOpen(modelPathPtr, devicePtr); + calloc.free(modelPathPtr); + calloc.free(devicePtr); + + return status; + }); + + print("${result.ref.status}, ${result.ref.message}"); + if (StatusEnum.fromValue(result.ref.status) != StatusEnum.OkStatus) { + throw "TTIInference open error: ${result.ref.status} ${result.ref.message.toDartString()}"; + } + + return TTIInference(result); + } + + Future prompt( + String message, int width, int height, int rounds) async { + int instanceAddress = instance.ref.value.address; + final result = await Isolate.run(() { + final messagePtr = message.toNativeUtf8(); + final status = tti_ov.ttiInferencePrompt( + Pointer.fromAddress(instanceAddress), + messagePtr, + width, + height, + rounds); + calloc.free(messagePtr); + return status; + }); + + if (StatusEnum.fromValue(result.ref.status) != StatusEnum.OkStatus) { + throw "TTIInference prompt error: ${result.ref.status} ${result.ref.message.toDartString()}"; + } + + return TTIModelResponse( + result.ref.value.toDartString(), result.ref.metrics); + } + + bool hasModelIndex() { + final status = tti_ov.ttiInferenceHasModelIndex(instance.ref.value); + + if (StatusEnum.fromValue(status.ref.status) != StatusEnum.OkStatus) { + throw "TTI Chat template error: ${status.ref.status} ${status.ref.message.toDartString()}"; + } + + return status.ref.value; + } + + void close() { + final status = tti_ov.ttiInferenceClose(instance.ref.value); + + if (StatusEnum.fromValue(status.ref.status) != StatusEnum.OkStatus) { + throw "Close error: ${status.ref.status} ${status.ref.message.toDartString()}"; + } + tti_ov.freeStatus(status); + } +} diff --git a/lib/main.dart b/lib/main.dart index 9f019f04..092cccd4 100644 --- a/lib/main.dart +++ b/lib/main.dart @@ -6,6 +6,7 @@ import 'package:inference/theme_fluent.dart'; import 'package:inference/providers/preference_provider.dart'; import 'package:inference/providers/project_provider.dart'; import 'package:inference/public_models.dart'; +import 'package:media_kit/media_kit.dart'; import 'package:provider/provider.dart'; @@ -13,7 +14,7 @@ const String title = 'OpenVINO TestDrive'; void testConnection() async { final dio = Dio(BaseOptions(connectTimeout: Duration(seconds: 10))); - + try { await dio.get(collections[0].path); } on DioException catch(ex) { @@ -25,6 +26,7 @@ void testConnection() async { } void main() { + MediaKit.ensureInitialized(); testConnection(); runApp(const App()); } diff --git a/lib/pages/computer_vision/batch_inference.dart b/lib/pages/computer_vision/batch_inference.dart index d2f34cfa..53c74ff5 100644 --- a/lib/pages/computer_vision/batch_inference.dart +++ b/lib/pages/computer_vision/batch_inference.dart @@ -99,7 +99,7 @@ class BatchInference extends StatelessWidget { ), ), ), - const ModelProperties(), + ModelProperties(project: batchInference.imageInference.project), ], ); } diff --git a/lib/pages/computer_vision/computer_vision.dart b/lib/pages/computer_vision/computer_vision.dart index 8c7522c3..715a270e 100644 --- a/lib/pages/computer_vision/computer_vision.dart +++ b/lib/pages/computer_vision/computer_vision.dart @@ -2,7 +2,6 @@ import 'package:fluent_ui/fluent_ui.dart'; import 'package:go_router/go_router.dart'; import 'package:inference/pages/computer_vision/batch_inference.dart'; import 'package:inference/pages/computer_vision/live_inference.dart'; -import 'package:inference/pages/models/widgets/grid_container.dart'; import 'package:inference/project.dart'; import 'package:inference/providers/image_inference_provider.dart'; import 'package:inference/providers/preference_provider.dart'; diff --git a/lib/pages/computer_vision/live_inference.dart b/lib/pages/computer_vision/live_inference.dart index 0b089bfd..9c78f25c 100644 --- a/lib/pages/computer_vision/live_inference.dart +++ b/lib/pages/computer_vision/live_inference.dart @@ -135,7 +135,7 @@ class _LiveInferenceState extends State { ], ), ), - const ModelProperties(), + ModelProperties(project: widget.project), ], ); } diff --git a/lib/pages/computer_vision/widgets/model_properties.dart b/lib/pages/computer_vision/widgets/model_properties.dart index d333243f..5d7e932d 100644 --- a/lib/pages/computer_vision/widgets/model_properties.dart +++ b/lib/pages/computer_vision/widgets/model_properties.dart @@ -1,70 +1,67 @@ import 'package:fluent_ui/fluent_ui.dart'; +import 'package:inference/project.dart'; import 'package:inference/theme_fluent.dart'; -import 'package:inference/utils.dart'; import 'package:inference/pages/models/widgets/grid_container.dart'; -import 'package:inference/providers/image_inference_provider.dart'; import 'package:intl/intl.dart'; -import 'package:provider/provider.dart'; +import 'package:inference/utils.dart'; class ModelProperties extends StatelessWidget { - const ModelProperties({super.key}); + final Project project; + const ModelProperties({super.key, required this.project}); @override Widget build(BuildContext context) { - return Consumer(builder: (context, inference, child) { - Locale locale = Localizations.localeOf(context); - final formatter = NumberFormat.percentPattern(locale.languageCode); + Locale locale = Localizations.localeOf(context); + final formatter = NumberFormat.percentPattern(locale.languageCode); - return SizedBox( - width: 280, - child: GridContainer( - padding: const EdgeInsets.symmetric(vertical: 18, horizontal: 24), - child: Column( - crossAxisAlignment: CrossAxisAlignment.start, - children: [ - const Text("Model parameters", style: TextStyle( - fontSize: 20, - )), - Container( - padding: const EdgeInsets.only(top: 16), - child: Column( - crossAxisAlignment: CrossAxisAlignment.start, - children: [ - ModelProperty( - title: "Model name", - value: inference.project.name, - ), - ModelProperty( - title: "Task", - value: inference.project.taskName(), - ), - ModelProperty( - title: "Architecture", - value: inference.project.architecture, - ), - ModelProperty( - title: "Size", - value: inference.project.size?.readableFileSize() ?? "", - ), - Builder( - builder: (context) { - if (inference.project.tasks.first.performance == null) { - return Container(); - } - return ModelProperty( - title: "Accuracy", - value: formatter.format(inference.project.tasks.first.performance!.score) - ); - } - ), - ], + return SizedBox( + width: 280, + child: GridContainer( + padding: const EdgeInsets.symmetric(vertical: 18, horizontal: 24), + child: Column( + crossAxisAlignment: CrossAxisAlignment.start, + children: [ + const Text("Model parameters", style: TextStyle( + fontSize: 20, + )), + Container( + padding: const EdgeInsets.only(top: 16), + child: Column( + crossAxisAlignment: CrossAxisAlignment.start, + children: [ + ModelProperty( + title: "Model name", + value: project.name, + ), + ModelProperty( + title: "Task", + value: project.taskName(), + ), + ModelProperty( + title: "Architecture", + value: project.architecture, + ), + ModelProperty( + title: "Size", + value: project.size?.readableFileSize() ?? "", + ), + Builder( + builder: (context) { + if (project.tasks.first.performance == null) { + return Container(); + } + return ModelProperty( + title: "Accuracy", + value: formatter.format(project.tasks.first.performance!.score) + ); + } ), - ) - ], + ], + ), ) - ), - ); - } + ], + ) + ), ); } } diff --git a/lib/pages/download_model/download_model.dart b/lib/pages/download_model/download_model.dart new file mode 100644 index 00000000..a6fb94de --- /dev/null +++ b/lib/pages/download_model/download_model.dart @@ -0,0 +1,229 @@ +import 'dart:math'; + +import 'package:fluent_ui/fluent_ui.dart'; +import 'package:go_router/go_router.dart'; +import 'package:inference/deployment_processor.dart'; +import 'package:inference/project.dart'; +import 'package:inference/providers/download_provider.dart'; +import 'package:inference/providers/project_provider.dart'; +import 'package:inference/public_models.dart'; +import 'package:inference/theme_fluent.dart'; +import 'package:intl/intl.dart'; +import 'package:provider/provider.dart'; + +String formatBytes(int bytes) { + return "${NumberFormat("#,##0").format(bytes / pow(1024, 2))} MB"; +} + + +class DownloadPage extends StatelessWidget { + final PublicProject project; + const DownloadPage({super.key, required this.project}); + + @override + Widget build(BuildContext context) { + return ChangeNotifierProvider( + create: (_) => DownloadProvider(), + child: DownloadModelPage(project: project), + ); + } +} + +class DownloadModelPage extends StatefulWidget { + final PublicProject project; + const DownloadModelPage({super.key, required this.project}); + + @override + State createState() => _DownloadModelPageState(); +} + +class _DownloadModelPageState extends State { + @override + void initState() { + super.initState(); + startDownload(); + } + + void startDownload() async { + final downloadProvider = Provider.of(context, listen: false); + final projectProvider = Provider.of(context, listen: false); + final router = GoRouter.of(context); + late Map files; + + try { + files = await listDownloadFiles(widget.project); + } catch (e) { + await showDialog(context: context, builder: (BuildContext context) => ContentDialog( + title: const Text('Model was not found'), + actions: [ + Button( + onPressed: () { + router.canPop() ? router.pop() : router.go('/home'); + }, + child: const Text('Close'), + ), + ], + )); + return; + } + + try { + downloadProvider.onCancel = () => deleteProjectData(widget.project); + await downloadProvider.queue(files, widget.project.modelInfo?.collection.token); + projectProvider.addProject(widget.project); + await getAdditionalModelInfo(widget.project); + projectProvider.completeLoading(widget.project); + router.go("/models/inference", extra: widget.project); + } catch(e) { + if (mounted) { + await showDialog(context: context, builder: (BuildContext context) => ContentDialog( + title: Text('An error occurred trying to download ${widget.project.name}'), + content: Text(e.toString()), + actions: [ + Button( + onPressed: () { + router.canPop() ? router.pop() : router.go('/home'); + }, + child: const Text('Close'), + ), + ], + )); + } + } + } + + @override + Widget build(BuildContext context) { + final theme = FluentTheme.of(context); + return ScaffoldPage( + padding: const EdgeInsets.symmetric(horizontal: 24, vertical: 8.0), + header: Container( + decoration: BoxDecoration( + border: Border( + bottom: BorderSide( + color: theme.resources.controlStrokeColorDefault, + width: 1.0 + ) + ) + ), + height: 56, + padding: const EdgeInsets.symmetric(horizontal: 12.0), + child: Row( + mainAxisAlignment: MainAxisAlignment.spaceBetween, + crossAxisAlignment: CrossAxisAlignment.center, + children: [ + Row( + children: [ + Padding( + padding: const EdgeInsets.only(left: 12.0, bottom: 8), + child: ClipRRect( + borderRadius: BorderRadius.circular(4.0), + child: Container( + width: 40, + height: 40, + decoration: BoxDecoration( + image: DecorationImage( + image: widget.project.thumbnailImage(), + fit: BoxFit.fitWidth), + ), + ), + ), + ), + Padding( + padding: const EdgeInsets.symmetric(horizontal: 16), + child: Text(widget.project.name, + style: const TextStyle(fontSize: 20, fontWeight: FontWeight.bold), + ), + ), + ], + ), + Button(child: const Text("Close"), onPressed: () { + GoRouter.of(context).canPop() ? GoRouter.of(context).pop() : GoRouter.of(context).go('/home'); + }), + ], + ), + ), + content: Row( + children: [ + Expanded( + child: Column( + mainAxisAlignment: MainAxisAlignment.center, + crossAxisAlignment: CrossAxisAlignment.center, + children: [ + Padding( + padding: const EdgeInsets.all(16.0), + child: Consumer(builder: (context, downloadProvider, child) { + final stats = downloadProvider.stats; + return Column( + children: [ + ProgressRing( + value: stats.percentage * 100, + strokeWidth: 8, + ), + SizedBox( + width: 140, + child: Padding( + padding: const EdgeInsets.only(top: 8.0), + child: Row( + mainAxisAlignment: MainAxisAlignment.spaceBetween, + children: [ + Text(formatBytes(stats.received), textAlign: TextAlign.end,), + const Text("/"), + Text(formatBytes(stats.total)) + ], + ), + ), + ), + const Padding( + padding: EdgeInsets.all(8.0), + child: Text("Downloading model weights"), + ) + ] + ); + } + ), + ) + ], + ), + ), + Container( + width: 280, + decoration: BoxDecoration( + border: Border( + left: BorderSide( + color: theme.resources.controlStrokeColorDefault, + width: 1.0, + ), + ), + ), + child: Padding( + padding: const EdgeInsets.symmetric(horizontal: 24), + child: Column( + crossAxisAlignment: CrossAxisAlignment.start, + children: [ + const Padding( + padding: EdgeInsets.symmetric(vertical: 18.0), + child: Text("Model parameters", style: TextStyle(fontSize: 20, fontWeight: FontWeight.w600)), + ), + const Padding( + padding: EdgeInsets.only(top: 16), + child: Text("Model name", style: TextStyle(fontSize: 14),), + ), + Padding(padding: const EdgeInsets.only(top: 4), + child: Text(widget.project.modelId, style: const TextStyle(fontSize: 14, color: foreground3Color),), + ), + const Padding(padding: EdgeInsets.only(top: 16), + child: Text("Task", style: TextStyle(fontSize: 14),), + ), + Padding(padding: const EdgeInsets.only(top: 4), + child: Text(widget.project.taskName(), style: const TextStyle(fontSize: 14, color: foreground3Color),), + ), + ], + ), + ), + ), + ], + ) + ); + } +} diff --git a/lib/pages/home/widgets/featured_card.dart b/lib/pages/home/widgets/featured_card.dart index 7e24a0c6..c756ac18 100644 --- a/lib/pages/home/widgets/featured_card.dart +++ b/lib/pages/home/widgets/featured_card.dart @@ -1,11 +1,20 @@ import 'package:fluent_ui/fluent_ui.dart'; import 'package:inference/importers/manifest_importer.dart'; import 'package:inference/widgets/elevation.dart'; +import 'package:go_router/go_router.dart'; class FeaturedCard extends StatelessWidget { final Model model; const FeaturedCard({required this.model, super.key}); + void downloadModel(BuildContext context) { + model.convertToProject().then((project) { + if (context.mounted) { + GoRouter.of(context).go('/models/download', extra: project); + } + }); + } + @override Widget build(BuildContext context) { final theme = FluentTheme.of(context); @@ -72,7 +81,7 @@ class FeaturedCard extends StatelessWidget { alignment: Alignment.centerRight, child: Padding( padding: const EdgeInsets.only(top: 2), - child: IconButton(icon: const Icon(FluentIcons.pop_expand, size: 14), onPressed: () {}), + child: IconButton(icon: const Icon(FluentIcons.pop_expand, size: 14), onPressed: () => downloadModel(context)), ), ), ], diff --git a/lib/pages/import/import.dart b/lib/pages/import/import.dart index bf3f540f..82af278c 100644 --- a/lib/pages/import/import.dart +++ b/lib/pages/import/import.dart @@ -42,6 +42,7 @@ class _ImportPageState extends State { @override Widget build(BuildContext context) { final theme = FluentTheme.of(context); + final router = GoRouter.of(context); return ScaffoldPage.scrollable( padding: const EdgeInsets.symmetric(horizontal: 24, vertical: 8.0), header: Container( @@ -68,11 +69,14 @@ class _ImportPageState extends State { Row( children: [ FilledButton(onPressed: selectedModel == null ? (null) : () { - GoRouter.of(context).go('/models/download', extra: selectedModel); + selectedModel?.convertToProject().then((project) { + router.go('/models/download', extra: project); + }); + }, child: const Text('Import selected model'),), Padding( padding: const EdgeInsets.only(left: 8.0), - child: Button(child: const Text('Close'), onPressed: () { GoRouter.of(context).pop(); }), + child: Button(child: const Text('Close'), onPressed: () { router.pop(); }), ) ], ) @@ -89,9 +93,12 @@ class _ImportPageState extends State { children: [ ConstrainedBox( constraints: const BoxConstraints(maxWidth: 280), - child: SearchBar(onChange: (value) { setState(() { - searchValue = value; - }); }, placeholder: 'Find a model',), + child: Semantics( + label: 'Find a model', + child: SearchBar(onChange: (value) { setState(() { + searchValue = value; + }); }, placeholder: 'Find a model',), + ), ), Padding( padding: const EdgeInsets.only(left: 8), @@ -173,4 +180,4 @@ class _ImportPageState extends State { ] ); } -} \ No newline at end of file +} diff --git a/lib/pages/models/inference.dart b/lib/pages/models/inference.dart index 5e7231e9..6b9a64df 100644 --- a/lib/pages/models/inference.dart +++ b/lib/pages/models/inference.dart @@ -1,6 +1,7 @@ import 'package:fluent_ui/fluent_ui.dart'; import 'package:inference/pages/computer_vision/computer_vision.dart'; import 'package:inference/pages/text_generation/text_generation.dart'; +import 'package:inference/pages/transcription/transcription.dart'; import 'package:inference/project.dart'; class InferencePage extends StatelessWidget { @@ -15,6 +16,8 @@ class InferencePage extends StatelessWidget { case ProjectType.text: return TextGenerationPage(project); case ProjectType.speech: + return TranscriptionPage(project); + case ProjectType.textToImage: return Container(); } } diff --git a/lib/pages/models/widgets/model_card.dart b/lib/pages/models/widgets/model_card.dart index e3f0a4c5..d6817f97 100644 --- a/lib/pages/models/widgets/model_card.dart +++ b/lib/pages/models/widgets/model_card.dart @@ -22,7 +22,13 @@ class _ModelCardState extends State{ final theme = FluentTheme.of(context); return GestureDetector( - onTap: () => GoRouter.of(context).go("/models/inference", extra: widget.project), + onTap: () { + if (widget.project.isDownloaded) { + GoRouter.of(context).go("/models/inference", extra: widget.project); + } else { + GoRouter.of(context).go("/models/download", extra: widget.project); + } + }, child: MouseRegion( cursor: SystemMouseCursors.click, child: Elevation( diff --git a/lib/pages/transcription/performance_metrics.dart b/lib/pages/transcription/performance_metrics.dart new file mode 100644 index 00000000..0abf3e19 --- /dev/null +++ b/lib/pages/transcription/performance_metrics.dart @@ -0,0 +1,99 @@ +import 'package:fluent_ui/fluent_ui.dart'; +import 'package:inference/pages/computer_vision/widgets/horizontal_rule.dart'; +import 'package:inference/pages/computer_vision/widgets/model_properties.dart'; +import 'package:inference/pages/models/widgets/grid_container.dart'; +import 'package:inference/pages/transcription/providers/speech_inference_provider.dart'; +import 'package:inference/project.dart'; +import 'package:inference/widgets/performance_tile.dart'; +import 'package:intl/intl.dart'; +import 'package:provider/provider.dart'; + +class PerformanceMetrics extends StatelessWidget { + final Project project; + const PerformanceMetrics({super.key, required this.project}); + + @override + Widget build(BuildContext context) { + return Row( + children: [ + Expanded( + child: GridContainer( + child: Consumer( + builder: (context, inference, child) { + final metrics = inference.metrics; + if (metrics == null) { + return Container(); + } + + Locale locale = Localizations.localeOf(context); + final nf = NumberFormat.decimalPatternDigits( + locale: locale.languageCode, decimalDigits: 0); + + return Padding( + padding: const EdgeInsets.symmetric(vertical: 80), + child: Center( + child: SizedBox( + width: 887, + child: Column( + children: [ + Row( + mainAxisAlignment: MainAxisAlignment.spaceEvenly, + children: [ + PerformanceTile( + title: "Time to first token (TTFT)", + value: nf.format(metrics.ttft), + unit: "ms", + tall: true, + ), + PerformanceTile( + title: "Time per output token (TPOT)", + value: nf.format(metrics.tpot), + unit: "ms", + tall: true, + ), + PerformanceTile( + title: "Generate total duration", + value: nf.format(metrics.generateTime), + unit: "ms", + tall: true, + ), + ], + ), + const Padding( + padding: EdgeInsets.symmetric(horizontal: 16.0, vertical: 16), + child: HorizontalRule(), + ), + Row( + mainAxisAlignment: MainAxisAlignment.spaceEvenly, + children: [ + PerformanceTile( + title: "Load time", + value: nf.format(metrics.loadTime), + unit: "ms", + ), + PerformanceTile( + title: "Detokenization duration", + value: nf.format(metrics.detokenizationTime), + unit: "ms", + ), + PerformanceTile( + title: "Throughput", + value: nf.format(metrics.throughput), + unit: "tokens/sec", + ), + ], + ), + ], + ), + ), + ), + ); + } + ), + ), + ), + ModelProperties(project: project), + ], + ); + } +} diff --git a/lib/pages/transcription/playground.dart b/lib/pages/transcription/playground.dart new file mode 100644 index 00000000..c2224817 --- /dev/null +++ b/lib/pages/transcription/playground.dart @@ -0,0 +1,185 @@ +import 'dart:async'; + +import 'package:file_picker/file_picker.dart'; +import 'package:fluent_ui/fluent_ui.dart'; +import 'package:inference/pages/computer_vision/widgets/model_properties.dart'; +import 'package:inference/pages/models/widgets/grid_container.dart'; +import 'package:inference/pages/transcription/widgets/subtitles.dart'; +import 'package:inference/pages/transcription/widgets/transcription.dart'; +import 'package:inference/pages/transcription/utils/message.dart'; +import 'package:inference/project.dart'; +import 'package:inference/pages/transcription/providers/speech_inference_provider.dart'; +import 'package:inference/theme_fluent.dart'; +import 'package:inference/widgets/controls/drop_area.dart'; +import 'package:inference/widgets/controls/no_outline_button.dart'; +import 'package:inference/widgets/device_selector.dart'; +import 'package:media_kit/media_kit.dart'; +import 'package:media_kit_video/media_kit_video.dart'; +import 'package:provider/provider.dart'; + +class Playground extends StatefulWidget { + final Project project; + const Playground({super.key, required this.project}); + + @override + State createState() => _PlaygroundState(); +} + +class _PlaygroundState extends State with TickerProviderStateMixin{ + final player = Player(); + late final controller = VideoController(player); + int subtitleIndex = 0; + StreamSubscription? listener; + + + void showUploadMenu() async { + FilePickerResult? result = await FilePicker.platform.pickFiles(type: FileType.video); + + if (result != null) { + uploadFile(result.files.single.path!); + } + } + + void positionListener(Duration position) { + int index = (position.inSeconds / transcriptionPeriod).floor(); + if (index != subtitleIndex) { + final inference = Provider.of(context, listen: false); + inference.skipTo(index); + setState(() { + subtitleIndex = index; + }); + } + } + + void initializeVideoAndListeners(String source) async { + await listener?.cancel(); + player.open(Media(source)); + player.setVolume(0); // TODO: Disable this for release. This is for our sanity + listener = player.stream.position.listen(positionListener); + } + + void uploadFile(String file) async { + final inference = Provider.of(context, listen: false); + await inference.loadVideo(file); + initializeVideoAndListeners(file); + } + + @override + void initState() { + super.initState(); + final inference = Provider.of(context, listen: false); + if (inference.videoPath != null) { + initializeVideoAndListeners(inference.videoPath!); + } + } + + @override + void dispose() { + player.dispose(); + super.dispose(); + } + + @override + Widget build(BuildContext context) { + final theme = FluentTheme.of(context); + return Row( + crossAxisAlignment: CrossAxisAlignment.start, + children: [ + Expanded( + child: Column( + children: [ + SizedBox( + height: 64, + child: GridContainer( + child: Padding( + padding: const EdgeInsets.symmetric(horizontal: 16), + child: Row( + children: [ + NoOutlineButton( + onPressed: showUploadMenu, + child: Row( + children: [ + const Text("Choose video"), + const Padding( + padding: EdgeInsets.only(left: 8), + child: Icon(FluentIcons.chevron_down, size: 12), + ), + ], + ), + ), + const DeviceSelector(), + ], + ), + ), + ), + ), + Consumer( + builder: (context, inference, child) { + return Expanded( + child: Builder( + builder: (context) { + return DropArea( + type: "video", + showChild: inference.videoPath != null, + onUpload: (String file) { uploadFile(file); }, + extensions: const [], + child: Builder( + builder: (context) { + if (!inference.loaded.isCompleted) { + return Center(child: Image.asset('images/intel-loading.gif', width: 100)); + } + return Row( + crossAxisAlignment: CrossAxisAlignment.stretch, + children: [ + Expanded( + child: GridContainer( + color: backgroundColor.of(theme), + child: Stack( + alignment: Alignment.bottomCenter, + children: [ + Video(controller: controller), + Subtitles( + transcription: inference.transcription?.data, + subtitleIndex: subtitleIndex, + ), + ] + ), + ), + ), + SizedBox( + width: 360, + child: GridContainer( + color: backgroundColor.of(theme), + child: Builder( + builder: (context) { + if (inference.transcription == null) { + return Container(); + } + return Transcription( + onSeek: player.seek, + transcription: inference.transcription!, + messages: Message.parse(inference.transcription!.data, transcriptionPeriod), + ); + } + ), + ), + ) + ], + ); + } + ), + ); + } + ), + ); + } + ) + ], + ), + ), + ModelProperties(project: widget.project), + ] + ); + } +} + diff --git a/lib/pages/transcription/providers/speech_inference_provider.dart b/lib/pages/transcription/providers/speech_inference_provider.dart new file mode 100644 index 00000000..2f9a7895 --- /dev/null +++ b/lib/pages/transcription/providers/speech_inference_provider.dart @@ -0,0 +1,121 @@ +import 'dart:async'; + +import 'package:flutter/material.dart'; +import 'package:inference/interop/openvino_bindings.dart'; +import 'package:inference/interop/speech_to_text.dart'; +import 'package:inference/pages/transcription/utils/metrics.dart'; +import 'package:inference/pages/transcription/utils/section.dart'; +import 'package:inference/project.dart'; + + +const transcriptionPeriod = 10; + +class SpeechInferenceProvider extends ChangeNotifier { + Completer loaded = Completer(); + + + Project? _project; + String? _device; + + String? _videoPath; + String? get videoPath => _videoPath; + + bool forceStop = false; + + bool get videoLoaded => _videoPath != null; + + DynamicRangeLoading>? transcription; + Future? activeTranscriptionProcess; + DMetrics? metrics; + + bool get transcriptionComplete { + return transcription?.complete ?? false; + } + + String _language = ""; + + String get language => _language; + set language(String val) { + _language = val; + notifyListeners(); + } + + SpeechToText? _inference; + + SpeechInferenceProvider(Project? project, String? device) { + _project = project; + _device = device; + + if (project != null && device != null) { + SpeechToText.init(project.storagePath, device).then((instance) { + _inference = instance; + loaded.complete(); + notifyListeners(); + }); + } + } + + void skipTo(int index) { + transcription!.skipTo(index); + } + + Future loadVideo(String path) async { + await loaded.future; + forceStop = true; + await activeTranscriptionProcess; + _videoPath = path; + final duration = await _inference!.loadVideo(path); + final sections = (duration / transcriptionPeriod).ceil(); + transcription = DynamicRangeLoading>(Section(0, sections)); + activeTranscriptionProcess = startTranscribing(); + notifyListeners(); + } + + void addMetrics(TranscriptionModelResponse response) { + if (metrics == null) { + metrics = DMetrics.fromCMetrics(response.metrics); + } else { + metrics!.addCMetrics(response.metrics); + } + notifyListeners(); + } + + Future startTranscribing() async { + if (transcription == null) { + throw Exception("Can't transcribe before loading video"); + } + + forceStop = false; + + while (!forceStop && (!transcription!.complete)) { + if (transcription == null) { + return; + } + await transcription!.process((int i) { + final request = transcribe(i * transcriptionPeriod, transcriptionPeriod); + request.then(addMetrics); + return request; + }); + if (hasListeners) { + notifyListeners(); + } + } + } + + Future transcribe(int start, int duration) async { + await loaded.future; + return await _inference!.transcribe(start, duration, _language); + } + + bool sameProps(Project? project, String? device) { + return _project == project && _device == device; + } + + @override + void dispose() async { + forceStop = true; + await activeTranscriptionProcess; + super.dispose(); + } + +} diff --git a/lib/pages/transcription/transcription.dart b/lib/pages/transcription/transcription.dart new file mode 100644 index 00000000..46f54af6 --- /dev/null +++ b/lib/pages/transcription/transcription.dart @@ -0,0 +1,121 @@ +import 'package:fluent_ui/fluent_ui.dart'; +import 'package:go_router/go_router.dart'; +import 'package:inference/project.dart'; +import 'package:inference/providers/preference_provider.dart'; +import 'package:inference/pages/transcription/providers/speech_inference_provider.dart'; +import 'package:inference/pages/transcription/performance_metrics.dart'; +import 'package:inference/pages/transcription/playground.dart'; +import 'package:provider/provider.dart'; + +class TranscriptionPage extends StatefulWidget { + final Project project; + const TranscriptionPage(this.project, {super.key}); + + @override + State createState() => _TranscriptionPageState(); +} + +class _TranscriptionPageState extends State { + + + int selected = 0; + @override + Widget build(BuildContext context) { + final theme = FluentTheme.of(context); + final updatedTheme = theme.copyWith( + navigationPaneTheme: theme.navigationPaneTheme.merge(NavigationPaneThemeData( + backgroundColor: theme.scaffoldBackgroundColor, + )) + ); + return ChangeNotifierProxyProvider( + lazy: false, + create: (_) { + final device = Provider.of(context, listen: false).device; + return SpeechInferenceProvider(widget.project, device); + }, + update: (_, preferences, imageInferenceProvider) { + if (imageInferenceProvider != null && imageInferenceProvider.sameProps(widget.project, preferences.device)) { + return imageInferenceProvider; + } + return SpeechInferenceProvider(widget.project, preferences.device); + }, + child: Stack( + children: [ + FluentTheme( + data: updatedTheme, + child: NavigationView( + pane: NavigationPane( + size: const NavigationPaneSize(topHeight: 64), + header: Row( + children: [ + Padding( + padding: const EdgeInsets.only(left: 12.0), + child: ClipRRect( + borderRadius: BorderRadius.circular(4.0), + child: Container( + width: 40, + height: 40, + decoration: BoxDecoration( + image: DecorationImage( + image: widget.project.thumbnailImage(), + fit: BoxFit.cover), + ), + ), + ), + ), + Padding( + padding: const EdgeInsets.symmetric(horizontal: 16), + child: Text(widget.project.name, + style: const TextStyle(fontSize: 20, fontWeight: FontWeight.bold), + ), + ), + ], + ), + //customPane: CustomNavigationPane(), + selected: selected, + onChanged: (i) => setState(() {selected = i;}), + displayMode: PaneDisplayMode.top, + items: [ + PaneItem( + icon: const Icon(FluentIcons.processing), + title: const Text("Playground"), + body: Playground(project: widget.project), + ), + PaneItem( + icon: const Icon(FluentIcons.line_chart), + title: const Text("Performance metrics"), + body: PerformanceMetrics(project: widget.project), + ), + ], + ) + ), + ), + SizedBox( + height: 64, + child: Padding( + padding: const EdgeInsets.symmetric(horizontal: 25), + child: Row( + mainAxisAlignment: MainAxisAlignment.end, + children: [ + Padding( + padding: const EdgeInsets.all(4), + child: OutlinedButton( + style: ButtonStyle( + shape:WidgetStatePropertyAll(RoundedRectangleBorder( + borderRadius: BorderRadius.circular(4.0), + side: const BorderSide(color: Color(0XFF545454)), + )), + ), + child: const Text("Close"), + onPressed: () => GoRouter.of(context).go("/models"), + ), + ), + ] + ), + ), + ) + ], + ) + ); + } +} diff --git a/lib/pages/transcription/utils/message.dart b/lib/pages/transcription/utils/message.dart new file mode 100644 index 00000000..95687320 --- /dev/null +++ b/lib/pages/transcription/utils/message.dart @@ -0,0 +1,38 @@ +import 'dart:async'; + +import 'package:inference/interop/openvino_bindings.dart'; + +class Message { + String message; + final Duration position; + + Message(this.message, this.position); + + static List parse(Map> transcriptions, int indexDuration) { + final indices = transcriptions.keys.toList()..sort(); + if (indices.isEmpty) { + return []; + } + + List output = []; + + bool lastChunkIsOpenEnded = false; + + for (int i in indices) { + if (transcriptions[i] is Future) { + continue; + } + final part = transcriptions[i] as TranscriptionModelResponse; + for (final chunk in part.chunks) { + String text = chunk.text; + if (lastChunkIsOpenEnded) { + output.last.message += text; + } else { + output.add(Message(text.substring(1), Duration(seconds: chunk.start.toInt()))); + } + lastChunkIsOpenEnded = text[text.length - 1] != "."; + } + } + return output; + } +} diff --git a/lib/pages/transcription/utils/metrics.dart b/lib/pages/transcription/utils/metrics.dart new file mode 100644 index 00000000..481c9f30 --- /dev/null +++ b/lib/pages/transcription/utils/metrics.dart @@ -0,0 +1,54 @@ +import 'package:inference/interop/generated_bindings.dart'; + +class DMetrics { + double loadTime; + double generateTime; + double tokenizationTime; + double detokenizationTime; + double ttft; + double tpot; + double throughput; + int numberOfGeneratedTokens; + int numberOfInputTokens; + + int n = 1; // number of added metrics + + DMetrics({ + required this.loadTime, + required this.generateTime, + required this.tokenizationTime, + required this.detokenizationTime, + required this.ttft, + required this.tpot, + required this.throughput, + required this.numberOfGeneratedTokens, + required this.numberOfInputTokens, + }); + + void addCMetrics(Metrics metrics) { + //loadTime = metrics.load_time; + generateTime += metrics.generate_time; + tokenizationTime += metrics.tokenization_time; + detokenizationTime += metrics.detokenization_time; + ttft = (ttft * (n / (n + 1))) + metrics.ttft / n; + tpot = (tpot * (n / (n + 1))) + metrics.tpot / n; + throughput = (throughput * (n / (n + 1))) + metrics.throughput / n; + numberOfGeneratedTokens += metrics.number_of_generated_tokens; + numberOfInputTokens += metrics.number_of_input_tokens; + n += 1; + } + + factory DMetrics.fromCMetrics(Metrics metrics) { + return DMetrics( + loadTime: metrics.load_time, + generateTime: metrics.generate_time, + tokenizationTime: metrics.tokenization_time, + detokenizationTime: metrics.detokenization_time, + ttft: metrics.ttft, + tpot: metrics.tpot, + throughput: metrics.throughput, + numberOfGeneratedTokens: metrics.number_of_generated_tokens, + numberOfInputTokens: metrics.number_of_input_tokens, + ); + } +} diff --git a/lib/pages/transcription/utils/section.dart b/lib/pages/transcription/utils/section.dart new file mode 100644 index 00000000..5c731b13 --- /dev/null +++ b/lib/pages/transcription/utils/section.dart @@ -0,0 +1,99 @@ +void moveToFront(List list, I item) { + list.remove(item); + list.insert(0, item); +} + +void moveToEnd(List list, I item) { + list.remove(item); + list.add(item); +} + +class DynamicRangeLoading { + List
sections = []; + int? size; + Map data = {}; + + DynamicRangeLoading(Section section): sections = [section], size = section.end; + + Section get activeSection => sections.first; + + // The incomplete sections will always be in front + bool get complete => activeSection.complete; + + void skipTo(int i) { + for (var section in sections) { + if (section.contains(i)) { + if (i > section.index) { + // Section has not progressed until the requested index + // Split the section and move the new section to the front + final newSection = section.split(i); + sections.insert(0, newSection); + } else { + // Section is further ahead than requested skipTo + // move section to front since that work has higher prio + if (!section.complete && section != activeSection) { + moveToFront(sections, section); + } + } + return; + } + } + + throw Exception("Out of range"); + } + + int getNextIndex() { + if (complete) { + throw Exception("Cannot get next index. All work is done"); + } + return activeSection.index; + } + + void pumpIndex() { + if (activeSection.pump()) { + //activeSection has ended + if (sections.length > 1) { + moveToEnd(sections,activeSection); + } + } + } + + Future process(Future Function(int) func) async{ + final index = getNextIndex(); + final val = await func(index); + data[index] = val; + pumpIndex(); + return val; + } + + void setData(I value) { + data[activeSection.index] = value; + activeSection.index += 1; + } +} + +class Section { + int begin; + int? end; + int index; + + Section(this.begin, this.end): index = begin; + + bool contains(int i) => begin <= i && (end == null ? true : i < end!); + + Section split(int i) { + final newSection = Section(i, end); + end = i; + return newSection; + } + + bool get complete => index == end; + + //returns false if there is still work to do in the section + bool pump() { + if (end == null || index < end!) { + index += 1; + } + return complete; + } +} diff --git a/lib/pages/transcription/widgets/paragraph.dart b/lib/pages/transcription/widgets/paragraph.dart new file mode 100644 index 00000000..c6ca4f16 --- /dev/null +++ b/lib/pages/transcription/widgets/paragraph.dart @@ -0,0 +1,98 @@ + +import 'package:fluent_ui/fluent_ui.dart'; +import 'package:inference/theme_fluent.dart'; +import '../utils/message.dart'; + +String formatDuration(int totalSeconds) { + final duration = Duration(seconds: totalSeconds); + final minutes = duration.inMinutes; + final seconds = totalSeconds % 60; + + final minutesString = '$minutes'.padLeft(2, '0'); + final secondsString = '$seconds'.padLeft(2, '0'); + return '$minutesString:$secondsString'; +} + +class Paragraph extends StatefulWidget { + final Function(Duration)? onSeek; + final Message message; + final String? highlightedText; + + const Paragraph({super.key, required this.message, this.onSeek, this.highlightedText}); + + @override + State createState() => _ParagraphState(); +} + +class _ParagraphState extends State { + bool hover = false; + + @override + Widget build(BuildContext context) { + final theme = FluentTheme.of(context); + List pieces = []; + if (widget.highlightedText != null) { + final pattern = RegExp(widget.highlightedText!, caseSensitive: false); + final sections = widget.message.message.split(pattern); + if (sections.isNotEmpty) { + pieces.add(TextSpan(text: sections.first)); + for (int i = 1; i < sections.length; i++) { + pieces.add( + TextSpan( + text: widget.highlightedText!, + style: TextStyle(backgroundColor: theme.accentColor), + ) + ); + pieces.add(TextSpan(text: sections[i])); + } + } + } else { + pieces.add(TextSpan(text: widget.message.message)); + } + return MouseRegion( + onEnter: (_) { + setState(() => hover = true); + }, + onExit: (_) { + setState(() => hover = false); + }, + child: GestureDetector( + onTap: () { + widget.onSeek?.call(widget.message.position); + }, + child: Padding( + padding: const EdgeInsets.symmetric(vertical: 20, horizontal: 4), + child: Column( + crossAxisAlignment: CrossAxisAlignment.start, + children: [ + Align( + alignment: Alignment.bottomRight, + child: Text(formatDuration(widget.message.position.inSeconds), + style: TextStyle( + fontSize: 9, + color: subtleTextColor.of(theme), + ) + ) + ), + Container( + decoration: BoxDecoration( + color: hover ? subtleTextColor.of(theme).withOpacity(0.3) : null, + borderRadius: const BorderRadius.all(Radius.circular(4)), + ), + padding: const EdgeInsets.symmetric(horizontal: 4, vertical: 2), + child: RichText( + text: TextSpan( + style: TextStyle( + color: theme.inactiveColor + ), + children: pieces + ) + ) + ), + ], + ), + ), + ), + ); + } +} diff --git a/lib/pages/transcription/widgets/subtitles.dart b/lib/pages/transcription/widgets/subtitles.dart new file mode 100644 index 00000000..da17b0c9 --- /dev/null +++ b/lib/pages/transcription/widgets/subtitles.dart @@ -0,0 +1,60 @@ +import 'dart:async'; + +import 'package:fluent_ui/fluent_ui.dart'; +import 'package:inference/interop/openvino_bindings.dart'; + +class Subtitles extends StatelessWidget { + const Subtitles({ + super.key, + required this.transcription, + required this.subtitleIndex, + }); + + final Map>? transcription; + final int subtitleIndex; + + static const double fontSize = 18; + + @override + Widget build(BuildContext context) { + return Padding( + padding: const EdgeInsets.only(left: 8, right: 8, bottom: 60), + child: SizedBox( + height: 100, + child: Builder( + builder: (context) { + if (transcription == null ) { + return Container(); + } + if (transcription![subtitleIndex] is TranscriptionModelResponse) { + final text = (transcription![subtitleIndex] as TranscriptionModelResponse).text; + return Stack( + alignment: Alignment.bottomCenter, + children: [ + Text(text, + textAlign: TextAlign.center, + style: TextStyle( + fontSize: fontSize, + foreground: Paint() + ..style = PaintingStyle.stroke + ..strokeWidth = 2 + ..color = Colors.black, + ) + ), + Text(text, + textAlign: TextAlign.center, + style: const TextStyle( + fontSize: fontSize, + color: Colors.white, + ) + ) + ], + ); + } + return Container(); + } + ), + ), + ); + } +} diff --git a/lib/pages/transcription/widgets/transcription.dart b/lib/pages/transcription/widgets/transcription.dart new file mode 100644 index 00000000..b02fcdc2 --- /dev/null +++ b/lib/pages/transcription/widgets/transcription.dart @@ -0,0 +1,141 @@ +import 'dart:async'; +import 'dart:io'; + +import 'package:file_picker/file_picker.dart'; +import 'package:fluent_ui/fluent_ui.dart'; +import 'package:inference/interop/openvino_bindings.dart'; +import 'package:inference/pages/transcription/utils/message.dart'; +import 'package:inference/pages/transcription/utils/section.dart'; +import 'package:inference/pages/transcription/widgets/paragraph.dart'; +import 'package:inference/widgets/controls/search_bar.dart'; + + +class Transcription extends StatefulWidget { + final DynamicRangeLoading>? transcription; + final Function(Duration)? onSeek; + final List messages; + const Transcription({super.key, this.onSeek, this.transcription, required this.messages}); + + @override + State createState() => _TranscriptionState(); +} + +class _TranscriptionState extends State { + final List _paragraphKeys = []; + final ScrollController _scrollController = ScrollController(); + final GlobalKey scrollKey = GlobalKey(); + String? searchText; + + void saveTranscript() async { + final file = await FilePicker.platform.saveFile( + dialogTitle: "Please select an output file:", + fileName: "transcription.txt", + ); + if (file == null){ + return; + } + + String contents = ""; + final indices = widget.transcription!.data.keys.toList()..sort(); + for (int i in indices) { + final part = widget.transcription!.data[i] as TranscriptionModelResponse; + for (final chunk in part.chunks) { + contents += chunk.text; + } + } + + await File(file).writeAsString(contents); + } + + void search(String text) { + setState(() { + searchText = text; + }); + + final pattern = RegExp(text, caseSensitive: false); + int? index; + for (int i = 0; i < widget.messages.length; i++) { + if (widget.messages[i].message.contains(pattern)) { + index = i; + break; + } + + } + if (index != null){ + final context = _paragraphKeys[index].currentContext; + + if (context != null) { + final renderBox = context.findRenderObject() as RenderBox?; + if (renderBox != null) { + final position = renderBox.localToGlobal(Offset.zero, ancestor: scrollKey.currentContext?.findRenderObject()); + final offset = _scrollController.offset + position.dy; + _scrollController.animateTo( + offset, + duration: const Duration(milliseconds: 500), + curve: Curves.easeInOut, + ); + } + } + } + } + + @override + Widget build(BuildContext context) { + return Column( + children: [ + Padding( + padding: const EdgeInsets.symmetric(vertical: 25, horizontal: 14), + child: Row( + children: [ + SearchBar(onChange: search, placeholder: "Search in transcript",), + Padding( + padding: const EdgeInsets.only(left: 8.0), + child: Tooltip( + message: widget.transcription!.complete + ? "Download transcript" + : "Transcribing...", + child: Button( + onPressed: widget.transcription?.complete ?? false + ? () => saveTranscript() + : null, + child: const Padding( + padding: EdgeInsets.symmetric(vertical: 2), + child: Icon(FluentIcons.download), + ), + ), + ), + ) + ], + ), + ), + Expanded( + child: SingleChildScrollView( + key: scrollKey, + controller: _scrollController, + child: Padding( + padding: const EdgeInsets.only(left: 10, right: 18), + child: Column( + crossAxisAlignment: CrossAxisAlignment.start, + children: List.generate(widget.messages.length, (index) { + // Adjusting state in render is ugly. But works. + // This is done because we need a global key but the paragraphs are added as you go. + if (_paragraphKeys.length <= index) { + _paragraphKeys.add(GlobalKey()); + } + + return Paragraph( + key: _paragraphKeys[index], + message: widget.messages[index], + highlightedText: searchText, + onSeek: widget.onSeek, + ); + + }), + ), + ), + ), + ), + ], + ); + } +} diff --git a/lib/project.dart b/lib/project.dart index 32502d9b..4159014e 100644 --- a/lib/project.dart +++ b/lib/project.dart @@ -112,25 +112,30 @@ class Task { } } -enum ProjectType { image, text, speech } +enum ProjectType { image, text, textToImage, speech } ProjectType parseProjectType(String name) { if (name == "image") { return ProjectType.image; } - if (name == "text"){ + if (name == "text" || name == "text-generation"){ return ProjectType.text; } + if (name == "textToImage"){ + return ProjectType.textToImage; + } if (name == "speech") { return ProjectType.speech; } - throw UnimplementedError(); + throw UnimplementedError(name); } String projectTypeToString(ProjectType type) { switch(type){ case ProjectType.text: return "text"; + case ProjectType.textToImage: + return "textToImage"; case ProjectType.image: return "image"; case ProjectType.speech: diff --git a/lib/projects/projects_page.dart b/lib/projects/projects_page.dart index 7c80609b..ac6daf5d 100644 --- a/lib/projects/projects_page.dart +++ b/lib/projects/projects_page.dart @@ -9,7 +9,6 @@ import 'package:inference/config.dart'; import 'package:inference/header.dart'; import 'package:inference/importers/importer.dart'; import 'package:inference/project.dart'; -import 'package:inference/projects/task_type_filter.dart'; import 'package:inference/providers/project_filter_provider.dart'; import 'package:inference/providers/project_provider.dart'; import 'package:inference/searchbar.dart'; diff --git a/lib/providers/download_provider.dart b/lib/providers/download_provider.dart index 1a109c41..3da5b828 100644 --- a/lib/providers/download_provider.dart +++ b/lib/providers/download_provider.dart @@ -1,7 +1,4 @@ -import 'dart:io'; - import 'package:dio/dio.dart'; -import 'package:dio/io.dart'; import 'package:flutter/foundation.dart'; import 'package:inference/deployment_processor.dart'; import 'package:inference/project.dart'; @@ -22,11 +19,11 @@ class DownloadStats { } class DownloadProvider extends ChangeNotifier { - final Project? project; final Map _downloads = {}; CancelToken? _cancelToken; - DownloadProvider(this.project); + Function? onCancel; + DownloadProvider(); Future queue(Map downloads, String? token) async{ List promises = []; @@ -39,7 +36,7 @@ class DownloadProvider extends ChangeNotifier { _downloads[url] = state; final destination = downloads[url]; Map headers = {}; - if (token != null) { + if (token != null && token.isNotEmpty) { headers["Authorization"] = "Bearer $token"; } final promise = dio.download(url, destination, @@ -51,15 +48,16 @@ class DownloadProvider extends ChangeNotifier { state.total = total; notifyListeners(); } - }); - promise.catchError((e) { + }, + ).catchError((e) { if (e is DioException && e.type == DioExceptionType.cancel) { print("Download cancelled: $url"); + return Response(requestOptions: RequestOptions(path: url)); } else { _cancelToken?.cancel(); + throw e; } - }); - promise.then((_) => state.done); + }).then((_) => state.done); promises.add(promise); } @@ -92,7 +90,7 @@ class DownloadProvider extends ChangeNotifier { void cancel() { _cancelToken?.cancel(); - deleteProjectData(project!); + onCancel?.call(); } @override diff --git a/lib/providers/text_to_image_inference_provider.dart b/lib/providers/text_to_image_inference_provider.dart new file mode 100644 index 00000000..adafab4c --- /dev/null +++ b/lib/providers/text_to_image_inference_provider.dart @@ -0,0 +1,213 @@ +import 'dart:async'; +import 'dart:convert'; +import 'dart:typed_data'; +import 'dart:ui' as ui; + +import 'package:flutter/material.dart'; +import 'package:flutter/services.dart' show rootBundle; +import 'package:inference/interop/generated_bindings.dart'; +import 'package:inference/interop/tti_inference.dart'; +import 'package:inference/project.dart'; + +enum Speaker { assistant, user } + +class ImageContent { + final Uint8List imageData; + final int width; + final int height; + final BoxFit boxFit; + const ImageContent(this.imageData, this.width, this.height, this.boxFit); + +} + +class Message { + final Speaker speaker; + final String message; + final ImageContent? imageContent; + final TTIMetrics? metrics; + final bool allowedCopy; // Don't allow loading images to be copied + + const Message(this.speaker, this.message, this.imageContent, this.metrics, this.allowedCopy); +} + +class TextToImageInferenceProvider extends ChangeNotifier { + Completer loaded = Completer(); + + Project? _project; + String? _device; + + Project? get project => _project; + + String? get device => _device; + + TTIMetrics? get metrics => _messages.lastOrNull?.metrics; + + Uint8List? _imageBytes; + + int _loadWidth = 512; + int _loadHeight = 512; + + int _width = 512; + + int get width => _width; + + set width(int v) { + _width = v; + notifyListeners(); + } + + int _height = 512; + + int get height => _height; + + set height(int v) { + _height = v; + notifyListeners(); + } + + int _rounds = 20; + + int get rounds => _rounds; + + set rounds(int v) { + _rounds = v; + notifyListeners(); + } + + TTIInference? _inference; + final stopWatch = Stopwatch(); + int n = 0; + + TextToImageInferenceProvider(Project? project, String? device) { + _project = project; + _device = device; + + if (project != null && device != null) { + preloadImageBytes(); + print("instantiating project: ${project.name}"); + print(project.storagePath); + print(device); + TTIInference.init(project.storagePath, device).then((instance) { + print("done loading"); + _inference = instance; + loaded.complete(); + notifyListeners(); + }); + } + } + + void preloadImageBytes() { + rootBundle.load('images/intel-loading.gif').then((data) { + _imageBytes = data.buffer.asUint8List(); + // Optionally notify listeners if you need to update UI + notifyListeners(); + }); + } + + + bool sameProps(Project? project, String? device) { + return _project == project && _device == device; + } + + bool get initialized => loaded.isCompleted; + final List _messages = []; + + double? _speed; + + double? get speed => _speed; + + set speed(double? speed) { + _speed = speed; + notifyListeners(); + } + + String? _response; + + String? get response => _response; + + set response(String? response) { + _response = response; + notifyListeners(); + } + + String get task { + return "Image Generation"; + } + + Message? get interimResponse { + if (_response == null) { + return null; + } + final imageContent = ImageContent(_imageBytes ?? Uint8List(0), _loadWidth, _loadHeight, BoxFit.contain); + + return Message(Speaker.assistant, response!, imageContent, null, false); + } + + List get messages { + if (interimResponse == null) { + return _messages; + } + return [..._messages, interimResponse!]; + } + + Future createImage(Uint8List bytes) async { + return await decodeImageFromList(bytes); + } + + Future message(String message) async { + _response = "Generating image..."; + + _messages.add(Message(Speaker.user, message, null, null, false)); + notifyListeners(); + + _loadWidth = width; + _loadHeight = height; + final response = await _inference!.prompt(message, width, height, rounds); + + final imageData = base64Decode(response.content); + final imageContent = ImageContent(imageData, _loadWidth, _loadHeight, BoxFit.contain); + + if (_messages.isNotEmpty) { + _messages.add(Message(Speaker.assistant, "Generated image", imageContent, response.metrics, true)); + } + _response = null; + + n = 0; + if (hasListeners) { + notifyListeners(); + } + } + + void close() { + _messages.clear(); + _inference?.close(); + _response = null; + if (_inference != null) { + _inference!.close(); + } + } + + void forceStop() { + // Todo + } + + void reset() { + //_inference?.close(); + // _inference?.forceStop(); + // _inference?.clearHistory(); + _messages.clear(); + _response = null; + notifyListeners(); + } + + @override + void dispose() { + if (_inference != null) { + _inference?.close(); + super.dispose(); + } else { + close(); + super.dispose(); + } + } +} diff --git a/lib/public_models.dart b/lib/public_models.dart index 0305bb62..0efd70fe 100644 --- a/lib/public_models.dart +++ b/lib/public_models.dart @@ -30,7 +30,7 @@ void writeProjectJson(PublicProject project) { } Future getAdditionalModelInfo(PublicProject project) async { - final configJsonURL = huggingFaceModelFileUrl(project.id, "config.json"); + final configJsonURL = huggingFaceModelFileUrl(project.modelId, "config.json"); final config = jsonDecode((await http.get( Uri.parse(configJsonURL), headers: { @@ -47,9 +47,9 @@ Future> getFilesForModel(String modelId) async { return List.from(result.data[0]["siblings"].map((m) => m.values.first)); } -Future> downloadFiles(PublicProject project) async { - final files = await getFilesForModel(project.id); - return { for (var v in files) huggingFaceModelFileUrl(project.id, v) : platformContext.join(project.storagePath, v) }; +Future> listDownloadFiles(PublicProject project) async { + final files = await getFilesForModel(project.modelId); + return { for (var v in files) huggingFaceModelFileUrl(project.modelId, v) : platformContext.join(project.storagePath, v) }; } String huggingFaceModelFileUrl(String modelId, String name) { diff --git a/lib/router.dart b/lib/router.dart index 19bae45b..1c32c68f 100644 --- a/lib/router.dart +++ b/lib/router.dart @@ -1,13 +1,15 @@ import 'package:fluent_ui/fluent_ui.dart'; import 'package:go_router/go_router.dart'; -import 'package:inference/importers/manifest_importer.dart'; import 'package:inference/openvino_console_app.dart'; +import 'package:inference/pages/download_model/download_model.dart'; import 'package:inference/pages/home/home.dart'; import 'package:inference/pages/import/import.dart'; import 'package:inference/pages/models/models.dart'; -import 'package:inference/pages/models/inference.dart'; import 'package:inference/project.dart'; +import 'package:inference/providers/download_provider.dart'; +import 'package:provider/provider.dart'; +import 'package:inference/pages/models/inference.dart'; final rootNavigatorKey = GlobalKey(); final _shellNavigatorKey = GlobalKey(); @@ -24,7 +26,7 @@ final router = GoRouter(navigatorKey: rootNavigatorKey, GoRoute(path: '/home', builder: (context, state) => const HomePage()), GoRoute(path: '/models', builder: (context, state) => const ModelsPage()), GoRoute(path: '/models/import', builder: (context, state) => const ImportPage()), - GoRoute(path: '/models/download', builder: (context, state) => Container(color: Colors.blue, child: Text('Downloading model: ${(state.extra as Model).id}'))), + GoRoute(path: '/models/download', builder: (context, state) => DownloadPage(project: state.extra as PublicProject)), GoRoute(path: '/models/inference', builder: (context, state) => InferencePage(state.extra as Project)), ], ) diff --git a/lib/theme_fluent.dart b/lib/theme_fluent.dart index 1c17a629..47111072 100644 --- a/lib/theme_fluent.dart +++ b/lib/theme_fluent.dart @@ -85,6 +85,8 @@ class AppTheme extends ChangeNotifier { } +const foreground3Color = Color(0xFF616161); + class DarkLightColor { final Color light; final Color dark; diff --git a/lib/utils/drop_area.dart b/lib/utils/drop_area.dart index 61bb2f8d..dcdf7619 100644 --- a/lib/utils/drop_area.dart +++ b/lib/utils/drop_area.dart @@ -50,51 +50,49 @@ class _DropAreaState extends State { @override Widget build(BuildContext context) { - return Expanded( - child: DropTarget( - onDragDone: (details) => handleDrop(details), - onDragExited: (val) => hideReleaseMessage(), - onDragEntered: (val) => showReleaseMessage(), - child: Container( - decoration: BoxDecoration( - borderRadius: BorderRadius.circular(4.0), - color: intelGray, - ), - child: Builder( - builder: (context) { - if (!_showReleaseMessage && widget.showChild) { - return widget.child!; - } - return Center( - child: SizedBox( - height: 310, - child: Column( - crossAxisAlignment: CrossAxisAlignment.center, - mainAxisAlignment: MainAxisAlignment.spaceBetween, - children: [ - SvgPicture.asset('images/drop.svg'), - ( _showReleaseMessage - ? const Text("Release to drop media") - : Text("Drop ${widget.type} here") - ), - ElevatedButton( - onPressed: () => showUploadMenu(), - child: const Text("Upload") - ), - Builder( - builder: (context) { - if (widget.extensions == null) { - return Container(); - } - return Text(widget.extensions!.join(", ")); + return DropTarget( + onDragDone: (details) => handleDrop(details), + onDragExited: (val) => hideReleaseMessage(), + onDragEntered: (val) => showReleaseMessage(), + child: Container( + decoration: BoxDecoration( + borderRadius: BorderRadius.circular(4.0), + color: intelGray, + ), + child: Builder( + builder: (context) { + if (!_showReleaseMessage && widget.showChild) { + return widget.child!; + } + return Center( + child: SizedBox( + height: 310, + child: Column( + crossAxisAlignment: CrossAxisAlignment.center, + mainAxisAlignment: MainAxisAlignment.spaceBetween, + children: [ + SvgPicture.asset('images/drop.svg'), + ( _showReleaseMessage + ? const Text("Release to drop media") + : Text("Drop ${widget.type} here") + ), + ElevatedButton( + onPressed: () => showUploadMenu(), + child: const Text("Upload") + ), + Builder( + builder: (context) { + if (widget.extensions == null) { + return Container(); } - ) - ], - ), + return Text(widget.extensions!.join(", ")); + } + ) + ], ), - ); - } - ), + ), + ); + } ), ), ); diff --git a/lib/widgets/controls/search_bar.dart b/lib/widgets/controls/search_bar.dart index 71424061..7b3e6a7a 100644 --- a/lib/widgets/controls/search_bar.dart +++ b/lib/widgets/controls/search_bar.dart @@ -50,4 +50,4 @@ class _SearchBarState extends State { ), ); } -} \ No newline at end of file +} diff --git a/lib/widgets/performance_tile.dart b/lib/widgets/performance_tile.dart new file mode 100644 index 00000000..6ab5dd06 --- /dev/null +++ b/lib/widgets/performance_tile.dart @@ -0,0 +1,65 @@ +import 'package:fluent_ui/fluent_ui.dart'; + +class PerformanceTile extends StatelessWidget { + final String title; + final String value; + final String unit; + final bool tall; + + const PerformanceTile({ + super.key, + required this.title, + required this.value, + required this.unit, + this.tall = false, + }); + + @override + Widget build(BuildContext context) { + final theme = FluentTheme.of(context); + return Padding( + padding: const EdgeInsets.all(8.0), + child: Acrylic( + elevation: 5, + shadowColor: Colors.black, + shape: RoundedRectangleBorder ( + borderRadius: BorderRadius.circular(4), + ), + child: SizedBox( + width: 268, + height: tall ? 200 : 124, + child: Center( + child: Column( + mainAxisAlignment: MainAxisAlignment.center, + crossAxisAlignment: CrossAxisAlignment.center, + children: [ + Text( + title, + style: const TextStyle( + fontSize: 14, + ), + ), + RichText( + text: TextSpan( + style: TextStyle( + color: theme.inactiveColor, + ), + children: [ + TextSpan(text: value, + style: const TextStyle( + fontSize: 30, + ) + ), + TextSpan(text: " $unit"), + ] + ) + ), + ], + ) + ) + ), + ), + ); + } + +} diff --git a/licensing/win-third-party-programs.txt b/licensing/win-third-party-programs.txt new file mode 100644 index 00000000..b13773cb --- /dev/null +++ b/licensing/win-third-party-programs.txt @@ -0,0 +1,703 @@ +OpenVINO™ Test Drive Distribution Third Party Programs File + +This file contains the list of third party software ("third party programs") +contained in the Intel software and their required notices and/or license +terms. This third party software, even if included with the distribution of +the Intel software, may be governed by separate license terms, including +without limitation, third party license terms, other Intel software license +terms, and open source software license terms. These separate license terms +govern your use of the third party programs as set forth in the +"third-party-programs.txt" or other similarly-named text file. + +Third party programs and their corresponding required notices and/or license +terms are listed below. + +------------------------------------------------------------- +Software Released under the Apache License 2.0: + +OpenVINO toolkit © Intel +threading_building_blocks © Intel +clock +desktop_drop Copyright (c) 2021 Mixin Network +fake_async +material_color_utilities Copyright 2021 Google LLC +mediapipe +core_tokenizers.dll Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved. +desktop_drop_plugin.dll Copyright [2021] [Mixin] +opencv_world490.dll Copyright © 2024 , OpenCV team +openvino.dll © Copyright Intel Corporation +openvino_auto_batch_plugin.dll © Copyright Intel Corporation +openvino_auto_plugin.dll © Copyright Intel Corporation +openvino_c.dll © Copyright Intel Corporation +openvino_genai.dll © Copyright Intel Corporation +openvino_hetero_plugin.dll © Copyright Intel Corporation +openvino_intel_cpu_plugin.dll © Copyright Intel Corporation +openvino_intel_gpu_plugin.dll © Copyright Intel Corporation +openvino_intel_npu_plugin.dll © Copyright Intel Corporation +openvino_ir_frontend.dll © Copyright Intel Corporation +openvino_onnx_frontend.dll © Copyright Intel Corporation +openvino_paddle_frontend.dll © Copyright Intel Corporation +openvino_pytorch_frontend.dll © Copyright Intel Corporation +openvino_tensorflow_frontend.dll © Copyright Intel Corporation +openvino_tensorflow_lite_frontend.dll © Copyright Intel Corporation +openvino_tokenizers.dll © Copyright Intel Corporation + + + +Apache License 2.0 +SPDX-License-Identifier: Apache-2.0 + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + +"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. + +"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. + +"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. + +"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. + +"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. + +"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. + +"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). + +"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. + +"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." + +"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: + +You must give any other recipients of the Work or Derivative Works a copy of this License; and + +You must cause any modified files to carry prominent notices stating that You changed the files; and + +You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and + +If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. +You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. +5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work +To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. + +Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. + + + +------------------------------------------------------------- +Software Released under the MIT license: + +fmt Copyright (c) 2012 - present, Victor Zverovich and {fmt} contributors +glew "Copyright (C) 2002-2007, Milan Ikits +Copyright (C) 2002-2007, Marcelo E. Magallon +Copyright (C) 2002, Lev Povalahev" +pugixml Copyright (c) 2006-2024 Arseny Kapoulkine +archive Copyright (c) 2013-2021 Brendan Duncan. +console Copyright (c) 2014 DirectCode +csv Copyright (c) 2014 Christian Loitsch +cupertino_icons Copyright 2013 The Flutter Authors. +dio "Copyright (c) 2018 Wen Du (wendux) +Copyright (c) 2022 The CFUG Team" +dio_web_adapter "Copyright (c) 2018 Wen Du (wendux) +Copyright (c) 2022 The CFUG Team" +file_picker Copyright (c) 2018 Miguel Ruivo +flutter_svg Copyright (c) 2018 Dan Field +get_it Copyright (c) 2018 Thomas Burkhart +image Copyright (c) 2013-2022 Brendan Duncan. +msix Copyright (c) 2022 Yehuda Kremer +nested Copyright (c) 2019 Remi Rousselet +path_parsing Copyright (c) 2018 Dan Field +provider +uuid Copyright (c) 2021 Yulian Kuncheff +xml Copyright (c) 2006-2024 Lukas Renggli. +yaml "Copyright (c) 2014, the Dart project authors. +Copyright (c) 2006, Kirill Simonov." + +MIT license +SPDX-License-Identifier: MIT + +The MIT License +Copyright (c) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +------------------------------------------------------------- +Software Released under BSD 1-Clause License + +flutter Copyright 2014 The Flutter Authors. +glog Copyright © 2024, Google Inc. +ilmbase Copyright © OpenEXR a Series of LF Projects, LLC. +libwebp Copyright (c) 2010, Google Inc +opencv +openexr Copyright (c) Contributors to the OpenEXR Project. +openjpeg " Copyright (c) 2002-2014, Universite catholique de Louvain (UCL), Belgium + * Copyright (c) 2002-2014, Professor Benoit Macq + * Copyright (c) 2003-2014, Antonin Descampe + * Copyright (c) 2003-2009, Francois-Olivier Devaux + * Copyright (c) 2005, Herve Drolon, FreeImage Team + * Copyright (c) 2002-2003, Yannick Verschueren + * Copyright (c) 2001-2003, David Janssens + * Copyright (c) 2011-2012, Centre National d'Etudes Spatiales (CNES), France + * Copyright (c) 2012, CS Systemes d'Information, France" +protobuf Copyright 2008 Google Inc. +re2 Copyright (c) 2009 The RE2 Authors. +skia Copyright (c) 2011 Google Inc. + +SPDX-License-Identifier: BSD-1-Clause + +BSD 1-Clause License + +Copyright (c) All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS AND CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +------------------------------------------------------------- +Software Released under BSD 2-Clause License + +sprintf Copyright (c) 2012, Richard Eames + +SPDX-License-Identifier: BSD-2-Clause + +BSD 2-Clause "Simplified" License + +Copyright (c) . + +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +------------------------------------------------------------- +Software Released under BSD 3-Clause "New" or "Revised" License + +args Copyright 2024, the Dart project authors. +async Copyright 2015, the Dart project authors. +boolean_selector Copyright 2016, the Dart project authors. +characters Copyright 2019, the Dart project authors. +cli_util Copyright 2015, the Dart project authors. +collection Copyright 2015, the Dart project authors. +cross_file Copyright 2013 The Flutter Authors. +crypto Copyright 2015, the Dart project authors. +ffi Copyright 2023, the Dart project authors. +fixnum Copyright 2014, the Dart project authors. +flutter_lints Copyright 2014 The Flutter Authors. +flutter_plugin_android_lifecycle Copyright 2014 The Flutter Authors. +flutter_test Copyright 2014 The Flutter Authors. +flutter_web_plugins Copyright 2014 The Flutter Authors. +go_router Copyright 2014 The Flutter Authors. +http Copyright 2014, the Dart project authors. +http_parser Copyright 2014, the Dart project authors. +intl Copyright 2023, the Dart project authors. +leak_tracker Copyright 2022, the Dart project authors. +leak_tracker_flutter_testing Copyright 2022, the Dart project authors. +leak_tracker_testing Copyright 2022, the Dart project authors. +lints Copyright 2021, the Dart project authors. +logging Copyright 2013, the Dart project authors. +matcher Copyright 2014, the Dart project authors. +meta Copyright 2012, the Dart project authors. +mime Copyright 2023, the Dart project authors. +package_config Copyright 2019, the Dart project authors. +path Copyright 2014, the Dart project authors. +path_provider Copyright 2013 The Flutter Authors. +path_provider_android Copyright 2013 The Flutter Authors. +path_provider_foundation Copyright 2013 The Flutter Authors. +path_provider_linux Copyright 2013 The Flutter Authors. +path_provider_platform_interface Copyright 2013 The Flutter Authors. +path_provider_windows Copyright 2013 The Flutter Authors. +petitparser Copyright (c) 2006-2024 Lukas Renggli. +platform Copyright 2017, the Dart project authors. +plugin_platform_interface Copyright 2013 The Flutter Authors. +pub_semver Copyright 2014, the Dart project authors. +shimmer Copyright 2013, the Dart project authors. +sky_engine Copyright 2014 The Chromium Authors. +source_span Copyright 2014, the Dart project authors. +stack_trace Copyright 2014, the Dart project authors. +stream_channel Copyright 2015, the Dart project authors. +string_scanner Copyright 2014, the Dart project authors. +term_glyph Copyright 2017, the Dart project authors. +test_api Copyright 2018, the Dart project authors. +typed_data Copyright 2015, the Dart project authors. +vector_graphics Copyright 2013 The Flutter Authors. +vector_graphics_codec Copyright 2013 The Flutter Authors. +vector_graphics_compiler Copyright 2013 The Flutter Authors. +vector_math Copyright 2015, Google Inc. +vm_service Copyright 2015, the Dart project authors. +web Copyright 2023, the Dart project authors. +win32 Copyright (c) 2024, Halil Durmus +xdg_directories Copyright 2013 The Flutter Authors. +flutter_windows.dll Copyright 2014 The Flutter Authors. All rights reserved. + + +SPDX-License-Identifier: BSD-3-Clause + +BSD 3-Clause "New" or "Revised" License + +Copyright (c) . + +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. +3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +------------------------------------------------------------- +Software Released under zlib License + +zlib copyright © 1995-2024 Jean-loup Gailly and Mark Adler. +blend2d.dll Copyright (c) 2017-2024 The Blend2D Authors + +SPDX-License-Identifier: Zlib + +zlib License + +Copyright (c) + +This software is provided 'as-is', without any express or implied warranty. In no event will the authors be held liable for any damages arising from the use of this software. + +Permission is granted to anyone to use this software for any purpose, including commercial applications, and to alter it and redistribute it freely, subject to the following restrictions: + +1. The origin of this software must not be misrepresented; you must not claim that you wrote the original software. If you use this software in a product, an acknowledgment in the product documentation would be appreciated but is not required. +2. Altered source versions must be plainly marked as such, and must not be misrepresented as being the original software. +3. This notice may not be removed or altered from any source distribution. + +------------------------------------------------------------- +Software Released under OpenSSL License + +boringssl Copyright (c) 1998-2011 The OpenSSL Project. + +SPDX-License-Identifier: OpenSSL + +OpenSSL License + +Copyright (c) 1998-2008 The OpenSSL Project. All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. +3. All advertising materials mentioning features or use of this software must display the following acknowledgment: "This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit. (http://www.openssl.org/)" +4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to endorse or promote products derived from this software without prior written permission. For written permission, please contact openssl-core@openssl.org. +5. Products derived from this software may not be called "OpenSSL" nor may "OpenSSL" appear in their names without prior written permission of the OpenSSL Project. +6. Redistributions of any form whatsoever must retain the following acknowledgment: "This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/)" +THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE OpenSSL PROJECT OR ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +This product includes cryptographic software written by Eric Young (eay@cryptsoft.com). This product includes software written by Tim Hudson (tjh@cryptsoft.com). + +Original SSLeay License + +Copyright (C) 1995-1998 Eric Young (eay@cryptsoft.com) +All rights reserved. + +This package is an SSL implementation written by Eric Young (eay@cryptsoft.com). The implementation was written so as to conform with Netscapes SSL. + +This library is free for commercial and non-commercial use as long as the following conditions are aheared to. The following conditions apply to all code found in this distribution, be it the RC4, RSA, lhash, DES, etc., code; not just the SSL code. The SSL documentation included with this distribution is covered by the same copyright terms except that the holder is Tim Hudson (tjh@cryptsoft.com). + +Copyright remains Eric Young's, and as such any Copyright notices in the code are not to be removed. If this package is used in a product, Eric Young should be given attribution as the author of the parts of the library used. This can be in the form of a textual message at program startup or in documentation (online or textual) provided with the package. + +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the copyright notice, this list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. +3. All advertising materials mentioning features or use of this software must display the following acknowledgement: +"This product includes cryptographic software written by Eric Young (eay@cryptsoft.com)" +The word 'cryptographic' can be left out if the rouines from the library being used are not cryptographic related :-). +4. If you include any Windows specific code (or a derivative thereof) from the apps directory (application code) you must include an acknowledgement: "This product includes software written by Tim Hudson (tjh@cryptsoft.com)" +THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +The licence and distribution terms for any publically available version or derivative of this code cannot be changed. i.e. this code cannot simply be copied and put under another distribution licence [including the GNU Public Licence.] + +------------------------------------------------------------- +Software Released under Unicode-TOU license + +icu Copyright © 1991-Present Unicode, Inc. + +SPDX-License-Identifier: Unicode-TOU + +Unicode Terms of Use + +For the general privacy policy governing access to this site, see the Unicode Privacy Policy. For trademark usage, see the Unicode® Consortium Name and Trademark Usage Policy. + +A. Unicode Copyright. +1. Copyright © 1991-2014 Unicode, Inc. All rights reserved. +2. Certain documents and files on this website contain a legend indicating that "Modification is permitted." Any person is hereby authorized, without fee, to modify such documents and files to create derivative works conforming to the Unicode® Standard, subject to Terms and Conditions herein. +3. Any person is hereby authorized, without fee, to view, use, reproduce, and distribute all documents and files solely for informational purposes in the creation of products supporting the Unicode Standard, subject to the Terms and Conditions herein. +4. Further specifications of rights and restrictions pertaining to the use of the particular set of data files known as the "Unicode Character Database" can be found in Exhibit 1. +5. Each version of the Unicode Standard has further specifications of rights and restrictions of use. For the book editions (Unicode 5.0 and earlier), these are found on the back of the title page. The online code charts carry specific restrictions. All other files, including online documentation of the core specification for Unicode 6.0 and later, are covered under these general Terms of Use. +6. No license is granted to "mirror" the Unicode website where a fee is charged for access to the "mirror" site. +7. Modification is not permitted with respect to this document. All copies of this document must be verbatim. +B. Restricted Rights Legend. Any technical data or software which is licensed to the United States of America, its agencies and/or instrumentalities under this Agreement is commercial technical data or commercial computer software developed exclusively at private expense as defined in FAR 2.101, or DFARS 252.227-7014 (June 1995), as applicable. For technical data, use, duplication, or disclosure by the Government is subject to restrictions as set forth in DFARS 202.227-7015 Technical Data, Commercial and Items (Nov 1995) and this Agreement. For Software, in accordance with FAR 12-212 or DFARS 227-7202, as applicable, use, duplication or disclosure by the Government is subject to the restrictions set forth in this Agreement. +C. Warranties and Disclaimers. +1. This publication and/or website may include technical or typographical errors or other inaccuracies . Changes are periodically added to the information herein; these changes will be incorporated in new editions of the publication and/or website. Unicode may make improvements and/or changes in the product(s) and/or program(s) described in this publication and/or website at any time. +2. If this file has been purchased on magnetic or optical media from Unicode, Inc. the sole and exclusive remedy for any claim will be exchange of the defective media within ninety (90) days of original purchase. +3. EXCEPT AS PROVIDED IN SECTION C.2, THIS PUBLICATION AND/OR SOFTWARE IS PROVIDED "AS IS" WITHOUT WARRANTY OF ANY KIND EITHER EXPRESS, IMPLIED, OR STATUTORY, INCLUDING, BUT NOT LIMITED TO, ANY WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, OR NON-INFRINGEMENT. UNICODE AND ITS LICENSORS ASSUME NO RESPONSIBILITY FOR ERRORS OR OMISSIONS IN THIS PUBLICATION AND/OR SOFTWARE OR OTHER DOCUMENTS WHICH ARE REFERENCED BY OR LINKED TO THIS PUBLICATION OR THE UNICODE WEBSITE. +D. Waiver of Damages. In no event shall Unicode or its licensors be liable for any special, incidental, indirect or consequential damages of any kind, or any damages whatsoever, whether or not Unicode was advised of the possibility of the damage, including, without limitation, those resulting from the following: loss of use, data or profits, in connection with the use, modification or distribution of this information or its derivatives. +E. Trademarks & Logos. +1. The Unicode Word Mark and the Unicode Logo are trademarks of Unicode, Inc. "The Unicode Consortium" and "Unicode, Inc." are trade names of Unicode, Inc. Use of the information and materials found on this website indicates your acknowledgement of Unicode, Inc.'s exclusive worldwide rights in the Unicode Word Mark, the Unicode Logo, and the Unicode trade names. +2. The Unicode Consortium Name and Trademark Usage Policy ("Trademark Policy") are incorporated herein by reference and you agree to abide by the provisions of the Trademark Policy, which may be changed from time to time in the sole discretion of Unicode, Inc. +3. All third party trademarks referenced herein are the property of their respective owners. +F. Miscellaneous. +1. Jurisdiction and Venue. This server is operated from a location in the State of California, United States of America. Unicode makes no representation that the materials are appropriate for use in other locations. If you access this server from other locations, you are responsible for compliance with local laws. This Agreement, all use of this site and any claims and damages resulting from use of this site are governed solely by the laws of the State of California without regard to any principles which would apply the laws of a different jurisdiction. The user agrees that any disputes regarding this site shall be resolved solely in the courts located in Santa Clara County, California. The user agrees said courts have personal jurisdiction and agree to waive any right to transfer the dispute to any other forum. +2. Modification by Unicode Unicode shall have the right to modify this Agreement at any time by posting it to this site. The user may not assign any part of this Agreement without Unicode's prior written consent. +3. Taxes. The user agrees to pay any taxes arising from access to this website or use of the information herein, except for those based on Unicode's net income. +4. Severability. If any provision of this Agreement is declared invalid or unenforceable, the remaining provisions of this Agreement shall remain in effect. +5. Entire Agreement. This Agreement constitutes the entire agreement between the parties. + +------------------------------------------------------------- +Software Released under libpng License + +libpng " * Copyright (c) 1995-2024 The PNG Reference Library Authors. + * Copyright (c) 2018-2024 Cosmin Truta. + * Copyright (c) 2000-2002, 2004, 2006-2018 Glenn Randers-Pehrson. + * Copyright (c) 1996-1997 Andreas Dilger. + * Copyright (c) 1995-1996 Guy Eric Schalnat, Group 42, Inc." + +SPDX-License-Identifier: Libpng + +This copy of the libpng notices is provided for your convenience. In case of any discrepancy between this copy and the notices in the file png.h that is included in the libpng distribution, the latter shall prevail. + +COPYRIGHT NOTICE, DISCLAIMER, and LICENSE: + +If you modify libpng you may insert additional notices immediately following this sentence. + +This code is released under the libpng license. + +libpng versions 1.2.6, August 15, 2004, through 1.4.5, December 9, 2010, are Copyright (c) 2004, 2006-2010 Glenn Randers-Pehrson, and are distributed according to the same disclaimer and license as libpng-1.2.5 with the following individual added to the list of Contributing Authors + +Cosmin Truta + +libpng versions 1.0.7, July 1, 2000, through 1.2.5 - October 3, 2002, are +Copyright (c) 2000-2002 Glenn Randers-Pehrson, and are distributed according to the same disclaimer and license as libpng-1.0.6 with the following individuals added to the list of Contributing Authors + +Simon-Pierre Cadieux +Eric S. Raymond +Gilles Vollant + +and with the following additions to the disclaimer: + +There is no warranty against interference with your enjoyment of the library or against infringement. There is no warranty that our efforts or the library will fulfill any of your particular purposes or needs. This library is provided with all faults, and the entire risk of satisfactory quality, performance, accuracy, and effort is with the user. + +libpng versions 0.97, January 1998, through 1.0.6, March 20, 2000, are +Copyright (c) 1998, 1999 Glenn Randers-Pehrson, and are distributed according to the same disclaimer and license as libpng-0.96, with the following individuals added to the list of Contributing Authors: + +Tom Lane +Glenn Randers-Pehrson +Willem van Schaik + +libpng versions 0.89, June 1996, through 0.96, May 1997, are +Copyright (c) 1996, 1997 Andreas Digger +Distributed according to the same disclaimer and license as libpng-0.88, with the following individuals added to the list of Contributing Authors: + +John Bowler +Kevin Bracey +Sam Bushell +Magnus Holmgren +Greg Roelofs +Tom Tanner + +libpng versions 0.5, May 1995, through 0.88, January 1996, are +Copyright (c) 1995, 1996 Guy Eric Schalnat, Group 42, Inc. + +For the purposes of this copyright and license, "Contributing Authors" is defined as the following set of individuals: + +Andreas Dilger +Dave Martindale +Guy Eric Schalnat +Paul Schmidt +Tim Wegner + +The PNG Reference Library is supplied "AS IS". The Contributing Authors and Group 42, Inc. disclaim all warranties, expressed or implied, including, without limitation, the warranties of merchantability and of fitness for any purpose. The Contributing Authors and Group 42, Inc. assume no liability for direct, indirect, incidental, special, exemplary, or consequential damages, which may result from the use of the PNG Reference Library, even if advised of the possibility of such damage. + +Permission is hereby granted to use, copy, modify, and distribute this source code, or portions hereof, for any purpose, without fee, subject to the following restrictions: + +1. The origin of this source code must not be misrepresented. +2. Altered versions must be plainly marked as such and must not be misrepresented as being the original source. +3. This Copyright notice may not be removed or altered from any source or altered source distribution. +The Contributing Authors and Group 42, Inc. specifically permit, without fee, and encourage the use of this source code as a component to supporting the PNG file format in commercial products. If you use this source code in a product, acknowledgment is not required but would be appreciated. + +A "png_get_copyright" function is available, for convenient use in "about" boxes and the like: + +printf("%s",png_get_copyright(NULL)); + +Also, the PNG logo (in PNG format, of course) is supplied in the files "pngbar.png" and "pngbar.jpg (88x31) and "pngnow.png" (98x31). + +Libpng is OSI Certified Open Source Software. OSI Certified Open Source is a certification mark of the Open Source Initiative. + +Glenn Randers-Pehrson +glennrp at users.sourceforge.net +December 9, 2010 + +------------------------------------------------------------- +Software Released under libtiff License + +libtiff © Copyright 1988-2022, LibTIFF contributors + +SPDX-License-Identifier: libtiff + +Copyright (c) 1988-1997 Sam Leffler + +Copyright (c) 1991-1997 Silicon Graphics, Inc. + +Permission to use, copy, modify, distribute, and sell this software and its documentation for any purpose is hereby granted without fee, provided that (i) the above copyright notices and this permission notice appear in all copies of the software and related documentation, and (ii) the names of Sam Leffler and Silicon Graphics may not be used in any advertising or publicity relating to the software without the specific, prior written permission of Sam Leffler and Silicon Graphics. + +THE SOFTWARE IS PROVIDED "AS-IS" AND WITHOUT WARRANTY OF ANY KIND, EXPRESS, IMPLIED OR OTHERWISE, INCLUDING WITHOUT LIMITATION, ANY WARRANTY OF MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. + +IN NO EVENT SHALL SAM LEFFLER OR SILICON GRAPHICS BE LIABLE FOR ANY SPECIAL, INCIDENTAL, INDIRECT OR CONSEQUENTIAL DAMAGES OF ANY KIND, OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER OR NOT ADVISED OF THE POSSIBILITY OF DAMAGE, AND ON ANY THEORY OF LIABILITY, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. +------------------------------------------------------------- +Software Released under libjpeg-turbo Licenses + +libjpeg-turbo (C) 1991-2020, Thomas G. Lane, Guido Vollbeding + +SPDX-License-Identifier: n/a + +libjpeg-turbo is covered by two compatible BSD-style open source licenses: + +The IJG (Independent JPEG Group) License, which is listed in README.ijg + +This license applies to the libjpeg API library and associated programs, including any code inherited from libjpeg and any modifications to that code. Note that the libjpeg-turbo SIMD source code bears the zlib License, but in the context of the overall libjpeg API library, the terms of the zlib License are subsumed by the terms of the IJG License. + +The Modified (3-clause) BSD License, which is listed below + +This license applies to the TurboJPEG API library and associated programs, as well as the build system. Note that the TurboJPEG API library wraps the libjpeg API library, so in the context of the overall TurboJPEG API library, both the terms of the IJG License and the terms of the Modified (3-clause) BSD License apply. + + +------------------------------------------------------------- +Software Released under MICROSOFT SOFTWARE Licenses + +msvcp140.dll (C)MICROSOFT +vcruntime140.dll (C)MICROSOFT +vcruntime140_1.dll (C)MICROSOFT + +SPDX-License-Identifier: n/a + +MICROSOFT SOFTWARE LICENSE TERMS + +MICROSOFT VISUAL STUDIO ENTERPRISE 2019, VISUAL STUDIO PROFESSIONAL 2019, VISUAL STUDIO TEST PROFESSIONAL 2019 AND TRIAL EDITION + +These license terms are an agreement between you and Microsoft Corporation (or based on where you live, one of its affiliates). They apply to the software named above. The terms also apply to any Microsoft services and updates for the software, except to the extent those have different terms. + +BY USING THE SOFTWARE, YOU ACCEPT THESE TERMS. IF YOU DO NOT ACCEPT THEM, DO NOT USE THE SOFTWARE. INSTEAD, RETURN IT TO THE RETAILER FOR A REFUND OR CREDIT. If you cannot obtain a refund there, contact Microsoft about Microsoft’s refund policies. See www.microsoft.com/worldwide. In the United States and Canada, call (800) MICROSOFT or see aka.ms/nareturns. + +________________________________________________________________________________________ + +TRIAL EDITION USE RIGHTS. If you have not acquired a valid full-use license, the software is a trial edition, and this Section applies to your use of the trial edition. + +A. GENERAL. You may use any number of copies of the trial edition on your devices. You may only use the trial edition for internal evaluation purposes, and only during the trial period. You may not distribute or deploy any applications you make with the trial edition to a production environment. You may run load tests of up to 250 virtual users during the trial period. + +B. TRIAL PERIOD AND CONVERSION. The trial period lasts for 30 days after you install the trial edition, plus any permitted extension period. After the expiration of the trial period, the trial edition will stop running. You may extend the trial period an additional 90 days if you sign in to the software. You may not be able to access data used with the trial edition after it stops running. You may convert your trial rights at any time to the full-use rights described below by acquiring a valid full-use license. + +C. DISCLAIMER OF WARRANTY. THE TRIAL EDITION IS LICENSED “AS-IS”. YOU BEAR THE RISK OF USING IT. MICROSOFT GIVES NO EXPRESS WARRANTIES, GUARANTEES OR CONDITIONS. TO THE EXTENT PERMITTED UNDER YOUR LOCAL LAWS, MICROSOFT EXCLUDES THE IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. + +FOR AUSTRALIA – YOU HAVE STATUTORY GUARANTEES UNDER THE AUSTRALIAN CONSUMER LAW AND NOTHING IN THESE TERMS IS INTENDED TO AFFECT THOSE RIGHTS. + +D. SUPPORT. Because the trial edition is “as is”, we may not provide support services for it. + +E. LIMITATIONS ON DAMAGES. YOU CAN RECOVER FROM MICROSOFT AND ITS SUPPLIERS ONLY DIRECT DAMAGES UP TO U.S. $5.00. YOU CANNOT RECOVER ANY OTHER DAMAGES, INCLUDING CONSEQUENTIAL, LOST PROFITS, SPECIAL, INDIRECT OR INCIDENTAL DAMAGES. + +This limitation applies to (a) anything related to the trial version, services, content (including code) on third party Internet sites, or third party programs; and (b) claims for breach of contract, breach of warranty, guarantee or condition, strict liability, negligence, or other tort to the extent permitted by applicable law. + +It also applies even if Microsoft knew or should have known about the possibility of the damages. The above limitation or exclusion may not apply to you because your country may not allow the exclusion or limitation of incidental, consequential or other damages. + + + +FULL-USE LICENSE TERMS FOR THE SOFTWARE: When you acquire a valid license and either enter a product key or sign in to the software, the terms below apply. You may not share your product key or access credentials. + +1. OVERVIEW. + +a. Software. The software includes development tools, applications, and documentation. + +b. License Model. The software is licensed on a per user basis. + +2. USE RIGHTS. + +a. General. One user may use copies of the software on your devices to develop and test applications. This includes using copies of the software on your own internal servers that remain fully dedicated to your own use. You may not, however, separate the components of the software (except as otherwise stated in this agreement) and run those in a production environment, or on third party devices, or for any purpose other than developing and testing your applications. Running the software on Microsoft Azure may require separate online usage fees. + +b. Workloads. These license terms apply to your use of the workloads made available to you within the software, except to the extent a workload or a workload component comes with different license terms and support policies. + +c. Backup copy. You may make one backup copy of the software, for reinstalling the software. + +d. Online Services in the Software. Some features of the software make use of online services to provide you with information about updates to the software or extensions, or to enable you to retrieve content, collaborate with others, or otherwise supplement your development experience. As used throughout this agreement, the term “software” includes these online service features. + +e. Demo Use. The use rights permitted above include using the software to demonstrate your applications. + +3. TERMS FOR SPECIFIC COMPONENTS. a. Utilities. The software contains items on the Utilities List at https://aka.ms/vs/16/utilities. You may copy and install those items onto your devices to debug and deploy your applications and databases you developed with the software. The Utilities are designed for temporary use. Microsoft may not be able to patch or update Utilities separately from the rest of the software. Some Utilities by their nature may make it possible for others to access the devices on which the Utilities are installed. You should delete all Utilities you have installed after you finish debugging or deploying your applications and databases. Microsoft is not responsible for any third party use or access of devices, or of the applications or databases on devices, on which Utilities have been installed. + +b. Build Devices and Visual Studio Build Tools. You may copy and install files from the software or from Visual Studio Build Tools onto your build devices, including physical devices and virtual machines or containers on those machines, whether on-premises or remote machines that are owned by you, hosted on Microsoft Azure for you, or dedicated solely to your use (collectively, “Build Devices”). You and others in your organization may use these files on your Build Devices solely to compile, build, and verify applications developed by using the software, or run quality or performance tests of those applications as part of the build process. + +c. Font Components. While the software is running, you may use its fonts to display and print content. You may only: (i) embed fonts in content as permitted by the embedding restrictions in the fonts; and (ii) temporarily download them to a printer or other output device to help print content. + +d. Licenses for Other Components. + +· Microsoft Platforms. The software may include components from Microsoft Windows, Microsoft Windows Server, Microsoft SQL Server, Microsoft Exchange, Microsoft Office, or Microsoft SharePoint. These components are governed by separate agreements and their own product support policies, as described in the Microsoft “Licenses” folder accompanying the software, except that, if separate license terms for those components are included in the associated installation directly, those license terms control. + +· Third Party Components. The software may include third party components with separate legal notices or governed by other agreements, as may be described in the ThirdPartyNotices file(s) accompanying the software. + +e. Package Managers. The software includes package managers, like NuGet, that give you the option to download other Microsoft and third party software packages to use with your applications. Those packages are under their own licenses, and not these license terms. Microsoft does not distribute, license or provide any warranties for any of the third party packages. + +4. DISTRIBUTABLE CODE. The software contains code that you are permitted to distribute in applications you develop as described in this Section. For purposes of this Section 4, the term “distribution” also means deployment of your applications for third parties to access over the Internet. + +a. Right to Use and Distribute. The code and text files listed below are “Distributable Code”. + +· Distributable List. You may copy and distribute the object code form of code listed on the Distributable List located at https://aka.ms/vs/16/redistribution. + +· Sample Code, Templates, and Styles. You may copy, modify, and distribute the source and object code form of code marked as “sample”, “template”, “simple styles”, and “sketch styles”. + +· Third Party Distribution. You may permit distributors of your applications to copy and distribute the Distributable Code as part of those applications. + +b. Distribution Requirements. For any Distributable Code you distribute, you must: + +· add significant primary functionality to it in your applications; + +· require distributors and external end users to agree to terms that protect the Distributable Code at least as much as this agreement; and + +· indemnify, defend, and hold harmless Microsoft from any claims, including attorneys’ fees, related to the distribution or use of your applications, except to the extent that any claim is based solely on the Distributable Code. + +c. Distribution Restrictions. You may not: + +· use Microsoft’s trademarks in your applications’ names or in a way that suggests your applications come from or are endorsed by Microsoft; or + +· modify or distribute the source code of any Distributable Code so that any part of it becomes subject to an Excluded License. An Excluded License is one that requires, as a condition of use, modification or distribution of code, that (i) it be disclosed or distributed in source code form; or (ii) others have the right to modify it. + +5. DEVELOPING EXTENSIONS. + +a. Limits on Extensions. You may not develop or enable others to develop extensions for the software (or any other component of the Visual Studio family of products) which circumvent the technical limitations implemented in the software. If Microsoft technically limits or disables extensibility for the software, you may not extend the software by, among other things, loading or injecting into the software any non-Microsoft add-ins, macros, or packages; modifying the software registry settings; or adding features or functionality equivalent to that found in the Visual Studio family of products. + +b. No Degrading the Software. If you develop an extension for the software (or any other component of the Visual Studio family of products), you must test the installation, uninstallation, and operation of your extension to ensure that such processes do not disable any features or adversely affect the functionality of the software (or such component) or of any previous version or edition of thereof. + +6. DATA. + +a. Data Collection. The software may collect information about you and your use of the software, and send that to Microsoft. Microsoft may use this information to provide services and improve our products and services. You may opt out of many of these scenarios, but not all, as described in the software documentation. There are also some features in the software that may enable you and Microsoft to collect data from users of your applications. If you use these features, you must comply with applicable law, including providing appropriate notices to users of your applications together with Microsoft’s privacy statement. Our privacy statement is located at https://go.microsoft.com/fwlink/?LinkID=824704. You can learn more about data collection and its use from the software documentation and our privacy statement. Your use of the software operates as your consent to these practices. + +b. Processing of Personal Data. To the extent Microsoft is a processor or subprocessor of personal data in connection with the software, Microsoft makes the commitments in the European Union General Data Protection Regulation Terms of the Online Services Terms to all customers effective May 25, 2018, at https://docs.microsoft.com/en-us/legal/gdpr. + +7. SCOPE OF LICENSE. The software is licensed, not sold. These license terms only give you some rights to use the software. Microsoft reserves all other rights. Unless applicable law gives you more rights despite this limitation, you may use the software only as expressly permitted in these license terms. In doing so, you must comply with any technical limitations in the software that only allow you to use it in certain ways. In addition, you may not: + +· work around any technical limitations in the software; + +· reverse engineer, decompile or disassemble the software, or otherwise attempt to derive the source code for the software, except and to the extent required by third party licensing terms governing use of certain open source components that may be included in the software; + +· remove, minimize, block, or modify any notices of Microsoft or its suppliers in the software; + +· use the software in any way that is against the law; + +· share, publish, rent, or lease the software; or + +· provide the software as a stand-alone offering or combine it with any of your applications for others to use. + +8. NOT FOR RESALE SOFTWARE. You may not sell the software if it is marked as “NFR” or “Not for Resale”. + +9. PREVIOUS VERSIONS OR OTHER EDITIONS. These license terms do not supersede your right to use validly licensed previous versions or other editions of the software. You may use the software and those previous versions or other editions of the software concurrently. + +10. PROOF OF LICENSE. If you acquired the software on a disc or other media, your proof of license is the Microsoft certificate of authenticity label, the accompanying product key, and your receipt. If you purchased an online copy of the software, your proof of license is the Microsoft product key you received with your purchase and your receipt and/or being able to access the software service through your Microsoft account. To identify genuine Microsoft software, see www.howtotell.com. + +11. TRANSFER TO A THIRD PARTY. If you are a valid licensee of the software, you may transfer it and this agreement directly to another party. Before the transfer, that party must agree that these license terms apply to the transfer and use of the software. The transfer must include the software, this agreement, the genuine Microsoft product key, and (if applicable) the Proof of License label. The transferor must uninstall all copies of the software after transferring it from the device. The transferor may not retain any copies of the genuine Microsoft product key to be transferred, and may only retain copies of the software if otherwise licensed to do so. If you have acquired a non-perpetual license to use the software or if the software is marked Not for Resale you may not transfer the software or the software license agreement to another party. + +12. EXPORT RESTRICTIONS. You must comply with all domestic and international export laws and regulations that apply to the software, which include restrictions on destinations, end users, and end use. For further information on export restrictions, visit www.microsoft.com/exporting. 13. SUPPORT. Microsoft provides support for the software as described at https://support.microsoft.com. + +14. ENTIRE AGREEMENT. These license terms (including the warranty below), and the terms for supplements, updates, Internet-based services, and support services, are the entire agreement for the software and support services. + +15. APPLICABLE LAW. If you acquired the software in the United States, Washington State law applies to interpretation of and claims for breach of this agreement, and the laws of the state where you live apply to all other claims. If you acquire the software in any other country, its laws apply. + +16. CONSUMER RIGHTS; REGIONAL VARIATIONS. These license terms describe certain legal rights. You may have other rights, including consumer rights, under the laws of your state or country. You may also have rights with respect to the party from which you acquired the software. This agreement does not change those other + +rights if the laws of your state or country do not permit it to do so. For example, if you acquired the software in one of the below regions, or if mandatory country law applies, then the following provisions apply to you: + +a) Australia. References to “Limited Warranty” are references to the express warranty provided by Microsoft. This warranty is given in addition to other rights and remedies you may have under law, including your rights and remedies in accordance with the statutory guarantees in the Australian Consumer Law. + +In this section, “goods” refers to the software for which Microsoft provides the express warranty. Our goods come with guarantees that cannot be excluded under the Australian Consumer Law. You are entitled to a replacement or refund for a major failure and compensation for any other reasonably foreseeable loss or damage. You are also entitled to have the goods repaired or replaced if the goods fail to be of acceptable quality and the failure does not amount to a major failure. + +b) Canada. You may stop receiving updates on your device by turning off Internet access. If and when you re-connect to the Internet, the software will resume checking for and installing updates. + +c) Germany and Austria. + +(i) Warranty. The properly licensed software will perform substantially as described in any Microsoft materials that accompany the software. However, Microsoft gives no contractual guarantee in relation to the software. + +(ii) Limitation of Liability. In case of intentional conduct, gross negligence, claims based on the Product Liability Act, as well as, in the case of death or personal or physical injury, Microsoft is liable according to the statutory law. + +Subject to the preceding sentence (ii), Microsoft will only be liable for slight negligence if Microsoft is in breach of such material contractual obligations, the fulfillment of which facilitate the due performance of this agreement, the breach of which would endanger the purpose of this agreement and the compliance with which a party may constantly trust in (so-called "cardinal obligations"). In other cases of slight negligence, Microsoft will not be liable for slight negligence. + +************************************************************************* + +LIMITED WARRANTY + +Microsoft warrants that properly licensed software will perform substantially as described in any Microsoft materials that accompany the software. This limited warranty does not cover problems that you cause, that arise when you fail to follow instructions, or that are caused by events beyond Microsoft’s reasonable control. The limited warranty starts when the first user acquires the software, and lasts for one year. Any supplements, updates, or replacement software that you may receive from Microsoft during that year are also covered, but only for the remainder of that one-year period or for 30 days, whichever is longer. Transferring the software will not extend the limited warranty. + +Microsoft gives no other express warranties, guarantees, or conditions. Microsoft excludes all implied warranties and conditions, including those of merchantability, fitness for a particular purpose, and non-infringement. If your local law does not allow the exclusion of implied warranties, then any implied warranties, guarantees, or conditions last only during the term of the limited warranty and are limited as much as your local law allows. If your local law requires a longer limited warranty term, despite this agreement, then that longer term will apply, but you can recover only the remedies this agreement allows. + +If Microsoft breaches its limited warranty, it will, at its election, either: (i) repair or replace the software at no charge, or (ii) accept return of the software (or at its election the Microsoft branded device on which the software was preinstalled) for a refund of the amount paid, if any. These are your only remedies for breach of warranty. This limited warranty gives you specific legal rights, and you may also have other rights which vary from state to state or country to country. + +Except for any repair, replacement, or refund Microsoft may provide, you may not recover under this limited warranty, under any other part of this agreement, or under any theory, any damages or other remedy, including lost profits or direct, consequential, special, indirect, or incidental damages. The damage exclusions and remedy limitations in this agreement apply even if repair, replacement or a refund does not fully compensate you for any losses, if Microsoft knew or should have known about the possibility of the damages, or if the remedy fails of its essential purpose. Some states and countries do not allow the exclusion or limitation of incidental, consequential, or other damages, so those limitations or exclusions may not apply to you. If your local law allows you to recover damages from Microsoft even though this agreement does not, you cannot recover more than you paid for the software (or up to $50 USD if you acquired the software for no charge). + + +Warranty Procedures + +For service or a refund, you must provide a copy of your proof of purchase and comply with Microsoft’s return policies, which might require you to uninstall the software and return it to Microsoft or return the software with the entire Microsoft branded device on which the software is installed; the certificate of authenticity label including the product key (if provided with your device) must remain affixed. + +1. United States and Canada. For limited warranty service or information about how to obtain a refund for software acquired in the United States or Canada, contact Microsoft via telephone at (800) MICROSOFT; via mail at Microsoft Customer Service and Support, One Microsoft Way, Redmond, WA 98052-6399; or visit (aka.ms/nareturns). + +2. Europe, Middle East, and Africa. If you acquired the software in Europe, the Middle East, or Africa, Microsoft Ireland Operations Limited makes the limited warranty. To make a claim under the limited warranty, you must contact either Microsoft Ireland Operations Limited, Customer Care Centre, Atrium Building Block B, Carmanhall Road, Sandyford Industrial Estate, Dublin 18, Ireland, or the Microsoft affiliate serving your country (aka.ms/msoffices). + +3. Australia. If you acquired the software in Australia, contact Microsoft to make a claim at 13 20 58; or Microsoft Pty Ltd, 1 Epping Road, North Ryde NSW 2113 Australia. + +4. Other countries. If you acquired the software in another country, contact the Microsoft affiliate serving your country (aka.ms/msoffices). + + +EULAID: VS_2019_ENU.1033 + +------------------------------------------------------------- +Software Released under UNICODE LICENSE V3 + +icudt70.dll Copyright © 1991-Present Unicode, Inc. +icudc70.dll Copyright © 1991-Present Unicode, Inc. + +SPDX-License-Identifier: Unicode-3.0 + +UNICODE LICENSE V3 +COPYRIGHT AND PERMISSION NOTICE + +Copyright © 1991-2023 Unicode, Inc. +NOTICE TO USER: Carefully read the following legal agreement. BY DOWNLOADING, INSTALLING, COPYING OR OTHERWISE USING DATA FILES, AND/OR SOFTWARE, YOU UNEQUIVOCALLY ACCEPT, AND AGREE TO BE BOUND BY, ALL OF THE TERMS AND CONDITIONS OF THIS AGREEMENT. IF YOU DO NOT AGREE, DO NOT DOWNLOAD, INSTALL, COPY, DISTRIBUTE OR USE THE DATA FILES OR SOFTWARE. + +Permission is hereby granted, free of charge, to any person obtaining a copy of data files and any associated documentation (the "Data Files") or software and any associated documentation (the "Software") to deal in the Data Files or Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, and/or sell copies of the Data Files or Software, and to permit persons to whom the Data Files or Software are furnished to do so, provided that either (a) this copyright and permission notice appear with all copies of the Data Files or Software, or (b) this copyright and permission notice appear in associated Documentation. + +THE DATA FILES AND SOFTWARE ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF THIRD PARTY RIGHTS. + +IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS NOTICE BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL DAMAGES, OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THE DATA FILES OR SOFTWARE. + +Except as contained in this notice, the name of a copyright holder shall not be used in advertising or otherwise to promote the sale, use or other dealings in these Data Files or Software without prior written authorization of the copyright holder. + +------------------------------------------------------------- + +* Other names and brands may be claimed as the property of others. \ No newline at end of file diff --git a/linux/flutter/generated_plugin_registrant.cc b/linux/flutter/generated_plugin_registrant.cc index 8e89f019..59809532 100644 --- a/linux/flutter/generated_plugin_registrant.cc +++ b/linux/flutter/generated_plugin_registrant.cc @@ -8,6 +8,10 @@ #include #include +#include +#include +#include +#include #include void fl_register_plugins(FlPluginRegistry* registry) { @@ -17,6 +21,18 @@ void fl_register_plugins(FlPluginRegistry* registry) { g_autoptr(FlPluginRegistrar) flutter_acrylic_registrar = fl_plugin_registry_get_registrar_for_plugin(registry, "FlutterAcrylicPlugin"); flutter_acrylic_plugin_register_with_registrar(flutter_acrylic_registrar); + g_autoptr(FlPluginRegistrar) irondash_engine_context_registrar = + fl_plugin_registry_get_registrar_for_plugin(registry, "IrondashEngineContextPlugin"); + irondash_engine_context_plugin_register_with_registrar(irondash_engine_context_registrar); + g_autoptr(FlPluginRegistrar) media_kit_libs_linux_registrar = + fl_plugin_registry_get_registrar_for_plugin(registry, "MediaKitLibsLinuxPlugin"); + media_kit_libs_linux_plugin_register_with_registrar(media_kit_libs_linux_registrar); + g_autoptr(FlPluginRegistrar) media_kit_video_registrar = + fl_plugin_registry_get_registrar_for_plugin(registry, "MediaKitVideoPlugin"); + media_kit_video_plugin_register_with_registrar(media_kit_video_registrar); + g_autoptr(FlPluginRegistrar) super_native_extensions_registrar = + fl_plugin_registry_get_registrar_for_plugin(registry, "SuperNativeExtensionsPlugin"); + super_native_extensions_plugin_register_with_registrar(super_native_extensions_registrar); g_autoptr(FlPluginRegistrar) system_theme_registrar = fl_plugin_registry_get_registrar_for_plugin(registry, "SystemThemePlugin"); system_theme_plugin_register_with_registrar(system_theme_registrar); diff --git a/linux/flutter/generated_plugins.cmake b/linux/flutter/generated_plugins.cmake index cc87f3ae..26f3ca23 100644 --- a/linux/flutter/generated_plugins.cmake +++ b/linux/flutter/generated_plugins.cmake @@ -5,10 +5,15 @@ list(APPEND FLUTTER_PLUGIN_LIST desktop_drop flutter_acrylic + irondash_engine_context + media_kit_libs_linux + media_kit_video + super_native_extensions system_theme ) list(APPEND FLUTTER_FFI_PLUGIN_LIST + media_kit_native_event_loop ) set(PLUGIN_BUNDLED_LIBRARIES) diff --git a/macos/Flutter/GeneratedPluginRegistrant.swift b/macos/Flutter/GeneratedPluginRegistrant.swift index dc088718..be4f83c6 100644 --- a/macos/Flutter/GeneratedPluginRegistrant.swift +++ b/macos/Flutter/GeneratedPluginRegistrant.swift @@ -6,13 +6,29 @@ import FlutterMacOS import Foundation import desktop_drop +import device_info_plus +import irondash_engine_context import macos_window_utils +import media_kit_libs_macos_video +import media_kit_video +import package_info_plus import path_provider_foundation +import screen_brightness_macos +import super_native_extensions import system_theme +import wakelock_plus func RegisterGeneratedPlugins(registry: FlutterPluginRegistry) { DesktopDropPlugin.register(with: registry.registrar(forPlugin: "DesktopDropPlugin")) + DeviceInfoPlusMacosPlugin.register(with: registry.registrar(forPlugin: "DeviceInfoPlusMacosPlugin")) + IrondashEngineContextPlugin.register(with: registry.registrar(forPlugin: "IrondashEngineContextPlugin")) MacOSWindowUtilsPlugin.register(with: registry.registrar(forPlugin: "MacOSWindowUtilsPlugin")) + MediaKitLibsMacosVideoPlugin.register(with: registry.registrar(forPlugin: "MediaKitLibsMacosVideoPlugin")) + MediaKitVideoPlugin.register(with: registry.registrar(forPlugin: "MediaKitVideoPlugin")) + FPPPackageInfoPlusPlugin.register(with: registry.registrar(forPlugin: "FPPPackageInfoPlusPlugin")) PathProviderPlugin.register(with: registry.registrar(forPlugin: "PathProviderPlugin")) + ScreenBrightnessMacosPlugin.register(with: registry.registrar(forPlugin: "ScreenBrightnessMacosPlugin")) + SuperNativeExtensionsPlugin.register(with: registry.registrar(forPlugin: "SuperNativeExtensionsPlugin")) SystemThemePlugin.register(with: registry.registrar(forPlugin: "SystemThemePlugin")) + WakelockPlusMacosPlugin.register(with: registry.registrar(forPlugin: "WakelockPlusMacosPlugin")) } diff --git a/macos/Podfile.lock b/macos/Podfile.lock index 5d8310ba..79283cb0 100644 --- a/macos/Podfile.lock +++ b/macos/Podfile.lock @@ -1,41 +1,95 @@ PODS: - desktop_drop (0.0.1): - FlutterMacOS + - device_info_plus (0.0.1): + - FlutterMacOS - FlutterMacOS (1.0.0) + - irondash_engine_context (0.0.1): + - FlutterMacOS - macos_window_utils (1.0.0): - FlutterMacOS + - media_kit_libs_macos_video (1.0.4): + - FlutterMacOS + - media_kit_native_event_loop (1.0.0): + - FlutterMacOS + - media_kit_video (0.0.1): + - FlutterMacOS + - package_info_plus (0.0.1): + - FlutterMacOS - path_provider_foundation (0.0.1): - Flutter - FlutterMacOS + - screen_brightness_macos (0.1.0): + - FlutterMacOS + - super_native_extensions (0.0.1): + - FlutterMacOS - system_theme (0.0.1): - FlutterMacOS + - wakelock_plus (0.0.1): + - FlutterMacOS DEPENDENCIES: - desktop_drop (from `Flutter/ephemeral/.symlinks/plugins/desktop_drop/macos`) + - device_info_plus (from `Flutter/ephemeral/.symlinks/plugins/device_info_plus/macos`) - FlutterMacOS (from `Flutter/ephemeral`) + - irondash_engine_context (from `Flutter/ephemeral/.symlinks/plugins/irondash_engine_context/macos`) - macos_window_utils (from `Flutter/ephemeral/.symlinks/plugins/macos_window_utils/macos`) + - media_kit_libs_macos_video (from `Flutter/ephemeral/.symlinks/plugins/media_kit_libs_macos_video/macos`) + - media_kit_native_event_loop (from `Flutter/ephemeral/.symlinks/plugins/media_kit_native_event_loop/macos`) + - media_kit_video (from `Flutter/ephemeral/.symlinks/plugins/media_kit_video/macos`) + - package_info_plus (from `Flutter/ephemeral/.symlinks/plugins/package_info_plus/macos`) - path_provider_foundation (from `Flutter/ephemeral/.symlinks/plugins/path_provider_foundation/darwin`) + - screen_brightness_macos (from `Flutter/ephemeral/.symlinks/plugins/screen_brightness_macos/macos`) + - super_native_extensions (from `Flutter/ephemeral/.symlinks/plugins/super_native_extensions/macos`) - system_theme (from `Flutter/ephemeral/.symlinks/plugins/system_theme/macos`) + - wakelock_plus (from `Flutter/ephemeral/.symlinks/plugins/wakelock_plus/macos`) EXTERNAL SOURCES: desktop_drop: :path: Flutter/ephemeral/.symlinks/plugins/desktop_drop/macos + device_info_plus: + :path: Flutter/ephemeral/.symlinks/plugins/device_info_plus/macos FlutterMacOS: :path: Flutter/ephemeral + irondash_engine_context: + :path: Flutter/ephemeral/.symlinks/plugins/irondash_engine_context/macos macos_window_utils: :path: Flutter/ephemeral/.symlinks/plugins/macos_window_utils/macos + media_kit_libs_macos_video: + :path: Flutter/ephemeral/.symlinks/plugins/media_kit_libs_macos_video/macos + media_kit_native_event_loop: + :path: Flutter/ephemeral/.symlinks/plugins/media_kit_native_event_loop/macos + media_kit_video: + :path: Flutter/ephemeral/.symlinks/plugins/media_kit_video/macos + package_info_plus: + :path: Flutter/ephemeral/.symlinks/plugins/package_info_plus/macos path_provider_foundation: :path: Flutter/ephemeral/.symlinks/plugins/path_provider_foundation/darwin + screen_brightness_macos: + :path: Flutter/ephemeral/.symlinks/plugins/screen_brightness_macos/macos + super_native_extensions: + :path: Flutter/ephemeral/.symlinks/plugins/super_native_extensions/macos system_theme: :path: Flutter/ephemeral/.symlinks/plugins/system_theme/macos + wakelock_plus: + :path: Flutter/ephemeral/.symlinks/plugins/wakelock_plus/macos SPEC CHECKSUMS: desktop_drop: 69eeff437544aa619c8db7f4481b3a65f7696898 + device_info_plus: ce1b7762849d3ec103d0e0517299f2db7ad60720 FlutterMacOS: 8f6f14fa908a6fb3fba0cd85dbd81ec4b251fb24 + irondash_engine_context: da62996ee25616d2f01bbeb85dc115d813359478 macos_window_utils: 933f91f64805e2eb91a5bd057cf97cd097276663 + media_kit_libs_macos_video: b3e2bbec2eef97c285f2b1baa7963c67c753fb82 + media_kit_native_event_loop: 81fd5b45192b72f8b5b69eaf5b540f45777eb8d5 + media_kit_video: c75b07f14d59706c775778e4dd47dd027de8d1e5 + package_info_plus: 12f1c5c2cfe8727ca46cbd0b26677728972d9a5b path_provider_foundation: 2b6b4c569c0fb62ec74538f866245ac84301af46 + screen_brightness_macos: 2d6d3af2165592d9a55ffcd95b7550970e41ebda + super_native_extensions: 85efee3a7495b46b04befcfc86ed12069264ebf3 system_theme: c7b9f6659a5caa26c9bc2284da096781e9a6fcbc + wakelock_plus: 4783562c9a43d209c458cb9b30692134af456269 PODFILE CHECKSUM: 16208599a12443d53889ba2270a4985981cfb204 -COCOAPODS: 1.15.2 +COCOAPODS: 1.16.2 diff --git a/macos/Runner.xcodeproj/project.pbxproj b/macos/Runner.xcodeproj/project.pbxproj index 8e01daa8..9d53cd0c 100644 --- a/macos/Runner.xcodeproj/project.pbxproj +++ b/macos/Runner.xcodeproj/project.pbxproj @@ -39,6 +39,16 @@ 0C42C76A2CE386680079F72B /* libopenvino_tensorflow_lite_frontend.2450.dylib in Bundle Framework */ = {isa = PBXBuildFile; fileRef = 0C42C7592CE386520079F72B /* libopenvino_tensorflow_lite_frontend.2450.dylib */; }; 0C42C76B2CE388D90079F72B /* libopenvino_c.2450.dylib in Bundle Framework */ = {isa = PBXBuildFile; fileRef = 0C42C7522CE386520079F72B /* libopenvino_c.2450.dylib */; }; 0C42C76C2CE388DC0079F72B /* libopenvino.2450.dylib in Bundle Framework */ = {isa = PBXBuildFile; fileRef = 0C42C75A2CE386520079F72B /* libopenvino.2450.dylib */; }; + 0C4E1F6C2CECC22800124339 /* libavformat.60.dylib in Frameworks */ = {isa = PBXBuildFile; fileRef = 0C4E1F692CECC22800124339 /* libavformat.60.dylib */; }; + 0C4E1F6D2CECC22800124339 /* libavutil.58.dylib in Frameworks */ = {isa = PBXBuildFile; fileRef = 0C4E1F6A2CECC22800124339 /* libavutil.58.dylib */; }; + 0C4E1F6E2CECC22800124339 /* libswresample.4.dylib in Frameworks */ = {isa = PBXBuildFile; fileRef = 0C4E1F6B2CECC22800124339 /* libswresample.4.dylib */; }; + 0C4E1F6F2CECC22800124339 /* libavcodec.60.dylib in Frameworks */ = {isa = PBXBuildFile; fileRef = 0C4E1F672CECC22800124339 /* libavcodec.60.dylib */; }; + 0C4E1F702CECC22800124339 /* libavdevice.60.dylib in Frameworks */ = {isa = PBXBuildFile; fileRef = 0C4E1F682CECC22800124339 /* libavdevice.60.dylib */; }; + 0C4E1F712CECC24900124339 /* libswresample.4.dylib in Bundle Framework */ = {isa = PBXBuildFile; fileRef = 0C4E1F6B2CECC22800124339 /* libswresample.4.dylib */; }; + 0C4E1F722CECC25400124339 /* libavcodec.60.dylib in Bundle Framework */ = {isa = PBXBuildFile; fileRef = 0C4E1F672CECC22800124339 /* libavcodec.60.dylib */; }; + 0C4E1F732CECC25400124339 /* libavdevice.60.dylib in Bundle Framework */ = {isa = PBXBuildFile; fileRef = 0C4E1F682CECC22800124339 /* libavdevice.60.dylib */; }; + 0C4E1F742CECC25400124339 /* libavformat.60.dylib in Bundle Framework */ = {isa = PBXBuildFile; fileRef = 0C4E1F692CECC22800124339 /* libavformat.60.dylib */; }; + 0C4E1F752CECC25400124339 /* libavutil.58.dylib in Bundle Framework */ = {isa = PBXBuildFile; fileRef = 0C4E1F6A2CECC22800124339 /* libavutil.58.dylib */; }; 0C5D47382C6F2F9500307B37 /* libmacos_bindings.dylib in Frameworks */ = {isa = PBXBuildFile; fileRef = 0C5D47372C6F2F9500307B37 /* libmacos_bindings.dylib */; settings = {ATTRIBUTES = (Weak, ); }; }; 0C5D47392C6F2FB200307B37 /* libmacos_bindings.dylib in Resources */ = {isa = PBXBuildFile; fileRef = 0C5D47372C6F2F9500307B37 /* libmacos_bindings.dylib */; }; 0C5D473A2C6F308000307B37 /* libmacos_bindings.dylib in Bundle Framework */ = {isa = PBXBuildFile; fileRef = 0C5D47372C6F2F9500307B37 /* libmacos_bindings.dylib */; settings = {ATTRIBUTES = (CodeSignOnCopy, ); }; }; @@ -117,15 +127,19 @@ dstSubfolderSpec = 10; files = ( 0C42C7672CE386680079F72B /* libopenvino_paddle_frontend.2450.dylib in Bundle Framework */, + 0C4E1F712CECC24900124339 /* libswresample.4.dylib in Bundle Framework */, 0C5D47B32C6F5C1300307B37 /* libopenvino_hetero_plugin.so in Bundle Framework */, + 0C4E1F752CECC25400124339 /* libavutil.58.dylib in Bundle Framework */, 0C42C76C2CE388DC0079F72B /* libopenvino.2450.dylib in Bundle Framework */, 0C42C7662CE386680079F72B /* libopenvino_onnx_frontend.2450.dylib in Bundle Framework */, 0C5D47B12C6F5C0A00307B37 /* libopenvino_auto_batch_plugin.so in Bundle Framework */, 0C5D47B22C6F5C0E00307B37 /* libopenvino_auto_plugin.so in Bundle Framework */, 0C5D473E2C6F35E500307B37 /* libblend2d.dylib in Bundle Framework */, + 0C4E1F732CECC25400124339 /* libavdevice.60.dylib in Bundle Framework */, 0C5D47782C6F398400307B37 /* libopencv_core.407.dylib in Bundle Framework */, 0C42C7642CE386680079F72B /* libopenvino_genai.2450.dylib in Bundle Framework */, 0C5D47B02C6F5C0200307B37 /* libopenvino_arm_cpu_plugin.so in Bundle Framework */, + 0C4E1F742CECC25400124339 /* libavformat.60.dylib in Bundle Framework */, 0C5D47802C6F398400307B37 /* libopencv_videoio.407.dylib in Bundle Framework */, 0C5D47792C6F398400307B37 /* libopencv_features2d.407.dylib in Bundle Framework */, 0C42C7682CE386680079F72B /* libopenvino_pytorch_frontend.2450.dylib in Bundle Framework */, @@ -139,6 +153,7 @@ 0C5D477F2C6F398400307B37 /* libopencv_video.407.dylib in Bundle Framework */, 0C5D47812C6F398400307B37 /* libopencv_ximgproc.407.dylib in Bundle Framework */, 0C5D473A2C6F308000307B37 /* libmacos_bindings.dylib in Bundle Framework */, + 0C4E1F722CECC25400124339 /* libavcodec.60.dylib in Bundle Framework */, 0C42C7692CE386680079F72B /* libopenvino_tensorflow_frontend.2450.dylib in Bundle Framework */, 0C5D47A52C6F3B7700307B37 /* libtbb.12.dylib in Bundle Framework */, 0C5D477C2C6F398400307B37 /* libopencv_imgcodecs.407.dylib in Bundle Framework */, @@ -161,6 +176,11 @@ 0C42C7582CE386520079F72B /* libopenvino_tensorflow_frontend.2450.dylib */ = {isa = PBXFileReference; lastKnownFileType = "compiled.mach-o.dylib"; name = libopenvino_tensorflow_frontend.2450.dylib; path = ../bindings/libopenvino_tensorflow_frontend.2450.dylib; sourceTree = SOURCE_ROOT; }; 0C42C7592CE386520079F72B /* libopenvino_tensorflow_lite_frontend.2450.dylib */ = {isa = PBXFileReference; lastKnownFileType = "compiled.mach-o.dylib"; name = libopenvino_tensorflow_lite_frontend.2450.dylib; path = ../bindings/libopenvino_tensorflow_lite_frontend.2450.dylib; sourceTree = SOURCE_ROOT; }; 0C42C75A2CE386520079F72B /* libopenvino.2450.dylib */ = {isa = PBXFileReference; lastKnownFileType = "compiled.mach-o.dylib"; name = libopenvino.2450.dylib; path = ../bindings/libopenvino.2450.dylib; sourceTree = SOURCE_ROOT; }; + 0C4E1F672CECC22800124339 /* libavcodec.60.dylib */ = {isa = PBXFileReference; lastKnownFileType = "compiled.mach-o.dylib"; name = libavcodec.60.dylib; path = ../bindings/libavcodec.60.dylib; sourceTree = SOURCE_ROOT; }; + 0C4E1F682CECC22800124339 /* libavdevice.60.dylib */ = {isa = PBXFileReference; lastKnownFileType = "compiled.mach-o.dylib"; name = libavdevice.60.dylib; path = ../bindings/libavdevice.60.dylib; sourceTree = SOURCE_ROOT; }; + 0C4E1F692CECC22800124339 /* libavformat.60.dylib */ = {isa = PBXFileReference; lastKnownFileType = "compiled.mach-o.dylib"; name = libavformat.60.dylib; path = ../bindings/libavformat.60.dylib; sourceTree = SOURCE_ROOT; }; + 0C4E1F6A2CECC22800124339 /* libavutil.58.dylib */ = {isa = PBXFileReference; lastKnownFileType = "compiled.mach-o.dylib"; name = libavutil.58.dylib; path = ../bindings/libavutil.58.dylib; sourceTree = SOURCE_ROOT; }; + 0C4E1F6B2CECC22800124339 /* libswresample.4.dylib */ = {isa = PBXFileReference; lastKnownFileType = "compiled.mach-o.dylib"; name = libswresample.4.dylib; path = ../bindings/libswresample.4.dylib; sourceTree = SOURCE_ROOT; }; 0C5D47372C6F2F9500307B37 /* libmacos_bindings.dylib */ = {isa = PBXFileReference; lastKnownFileType = "compiled.mach-o.dylib"; name = libmacos_bindings.dylib; path = ../bindings/libmacos_bindings.dylib; sourceTree = ""; }; 0C5D473B2C6F357C00307B37 /* libblend2d.dylib */ = {isa = PBXFileReference; lastKnownFileType = "compiled.mach-o.dylib"; name = libblend2d.dylib; path = ../bindings/libblend2d.dylib; sourceTree = ""; }; 0C5D47602C6F382800307B37 /* libopencv_calib3d.407.dylib */ = {isa = PBXFileReference; lastKnownFileType = "compiled.mach-o.dylib"; name = libopencv_calib3d.407.dylib; path = ../bindings/libopencv_calib3d.407.dylib; sourceTree = ""; }; @@ -241,6 +261,11 @@ 0C42C75D2CE386520079F72B /* libopenvino_paddle_frontend.2450.dylib in Frameworks */, 0C42C75E2CE386520079F72B /* libopenvino_onnx_frontend.2450.dylib in Frameworks */, 0C42C75F2CE386520079F72B /* libopenvino_c.2450.dylib in Frameworks */, + 0C4E1F6C2CECC22800124339 /* libavformat.60.dylib in Frameworks */, + 0C4E1F6D2CECC22800124339 /* libavutil.58.dylib in Frameworks */, + 0C4E1F6E2CECC22800124339 /* libswresample.4.dylib in Frameworks */, + 0C4E1F6F2CECC22800124339 /* libavcodec.60.dylib in Frameworks */, + 0C4E1F702CECC22800124339 /* libavdevice.60.dylib in Frameworks */, 0C42C7602CE386520079F72B /* libopenvino_genai.2450.dylib in Frameworks */, 0C42C7612CE386520079F72B /* libopenvino.2450.dylib in Frameworks */, 0C42C7622CE386520079F72B /* libopenvino_ir_frontend.2450.dylib in Frameworks */, @@ -361,6 +386,11 @@ 0C5D47642C6F397900307B37 /* libopencv_ximgproc.407.dylib */, 0C5D473B2C6F357C00307B37 /* libblend2d.dylib */, 0C5D47372C6F2F9500307B37 /* libmacos_bindings.dylib */, + 0C4E1F672CECC22800124339 /* libavcodec.60.dylib */, + 0C4E1F682CECC22800124339 /* libavdevice.60.dylib */, + 0C4E1F692CECC22800124339 /* libavformat.60.dylib */, + 0C4E1F6A2CECC22800124339 /* libavutil.58.dylib */, + 0C4E1F6B2CECC22800124339 /* libswresample.4.dylib */, 11E6C6B7198D7B3B20F4A75C /* Pods_Runner.framework */, CB5E7865DB70376BADAAEAE6 /* Pods_RunnerTests.framework */, ); diff --git a/openvino_bindings/README.md b/openvino_bindings/README.md index 952b68b7..4715bef7 100644 --- a/openvino_bindings/README.md +++ b/openvino_bindings/README.md @@ -95,7 +95,11 @@ A step by step guide can be found [here]('./docs/WINDOWS.md'). [Install OpenVINO Runtime 24.5.0]( https://docs.openvino.ai/2024/get-started/install-openvino.html?PACKAGE=OPENVINO_GENAI&VERSION=v_2024_4_0&OP_SYSTEM=WINDOWS&DISTRIBUTION=ARCHIVE) with GenAI flavor in `C:/Intel/openvino_24.5.0`. Build OpenCV in `C:/opencv/build`. -Install ffmpeg: `vcpkg install ffmpeg`. +Install ffmpeg: +```sh +cd openvino_bindings/third_party +vcpkg install +``` Install [mediapipe requirements](https://ai.google.dev/edge/mediapipe/framework/getting_started/install#installing_on_windows) and setup the environment variables. @@ -108,7 +112,7 @@ The DLLs (with dependencies) will be in `bazel-bin/windows_bindings.tar` [Install OpenVINO Runtime 24.5.0](https://docs.openvino.ai/2024/get-started/install-openvino.html?PACKAGE=OPENVINO_GENAI&VERSION=v_2024_4_0&OP_SYSTEM=MACOS&DISTRIBUTION=ARCHIVE) with GenAI flavor in `/opt/intel/openvino_24.5.0` and symlink to `/opt/intel/openvino`. Install OpenCV: `brew install opencv` -Install ffmpeg: `brew install ffmpeg@6` +Install ffmpeg: `brew install ffmpeg@6 && brew link ffmpeg@6` Run: `bazel build :macos_bindings` diff --git a/openvino_bindings/WORKSPACE b/openvino_bindings/WORKSPACE index 17409650..1238a9d8 100644 --- a/openvino_bindings/WORKSPACE +++ b/openvino_bindings/WORKSPACE @@ -106,23 +106,23 @@ git_repository( tag = "v3.11.3", ) -#new_local_repository( -# name = "linux_ffmpeg", -# build_file = "//third_party/ffmpeg:linux.BUILD", -# path = "/usr" -#) -# +new_local_repository( + name = "linux_ffmpeg", + build_file = "//third_party/ffmpeg:linux.BUILD", + path = "/usr" +) + new_local_repository( name = "mac_ffmpeg", build_file = "//third_party/ffmpeg:mac.BUILD", - path = "/opt/homebrew/Cellar/ffmpeg@6/6.1.2_3", -) -# -#new_local_repository( -# name = "windows_ffmpeg", -# build_file = "//third_party/ffmpeg:windows.BUILD", -# path = "C:/vcpkg/packages/ffmpeg_x64-windows", -#) + path = "/opt/homebrew/opt/ffmpeg@6", +) + +new_local_repository( + name = "windows_ffmpeg", + build_file = "//third_party/ffmpeg:windows.BUILD", + path = "./third_party/vcpkg_installed/x64-windows", +) http_archive( name = "rules_pkg", diff --git a/openvino_bindings/security.md b/openvino_bindings/security.md deleted file mode 100644 index d5f1e5ea..00000000 --- a/openvino_bindings/security.md +++ /dev/null @@ -1,6 +0,0 @@ -# Security Policy -Intel is committed to rapidly addressing security vulnerabilities affecting our customers and providing clear guidance on the solution, impact, severity and mitigation. - -## Reporting a Vulnerability -Please report any security vulnerabilities in this project [utilizing the guidelines here](https://www.intel.com/content/www/us/en/security-center/vulnerability-handling-guidelines.html). - diff --git a/openvino_bindings/src/BUILD b/openvino_bindings/src/BUILD index 13b1000b..756d22dc 100644 --- a/openvino_bindings/src/BUILD +++ b/openvino_bindings/src/BUILD @@ -9,7 +9,9 @@ cc_library( "//src/utils:utils", "//src/image:image_inference", "//src/llm:llm_inference", + "//src/audio:speech_to_text", "//src/mediapipe:graph_runner", + "//src/tti:tti_inference", ], ) @@ -20,6 +22,7 @@ cc_library( deps = [ ":bindings_deps", "//src/utils:metrics", + "//src/utils:tti_metrics", ], copts = ["-fPIC"], alwayslink=1, diff --git a/openvino_bindings/src/audio/speech_to_text.cc b/openvino_bindings/src/audio/speech_to_text.cc index e39cb4d4..4a2e1012 100644 --- a/openvino_bindings/src/audio/speech_to_text.cc +++ b/openvino_bindings/src/audio/speech_to_text.cc @@ -8,7 +8,7 @@ void SpeechToText::load_video(std::string video_path) { audio_grabber = std::make_unique(video_path); } -ov::genai::DecodedResults SpeechToText::transcribe(int start, int duration, std::string language) { +ov::genai::WhisperDecodedResults SpeechToText::transcribe(int start, int duration, std::string language) { auto video_duration = audio_grabber->get_duration(); if (start > video_duration) { throw api_error(SpeechToTextChunkOutOfBounds); @@ -23,6 +23,7 @@ ov::genai::DecodedResults SpeechToText::transcribe(int start, int duration, std: if (data.empty()) { throw api_error(SpeechToTextChunkHasNoData); } + config.return_timestamps = true; config.max_new_tokens = 100; if (!language.empty()){ config.language = language; diff --git a/openvino_bindings/src/audio/speech_to_text.h b/openvino_bindings/src/audio/speech_to_text.h index c0c7c1ed..f119ca72 100644 --- a/openvino_bindings/src/audio/speech_to_text.h +++ b/openvino_bindings/src/audio/speech_to_text.h @@ -1,6 +1,7 @@ #ifndef SPEECH_TO_TEXT_H_ #define SPEECH_TO_TEXT_H_ + #include #include "openvino/genai/whisper_pipeline.hpp" #include "audio_grabber.h" @@ -14,7 +15,7 @@ class SpeechToText { SpeechToText(std::string model_path, std::string device): pipe(model_path, device), config(model_path + "/generation_config.json") {} void load_video(std::string video_path); int64_t video_duration(); - ov::genai::DecodedResults transcribe(int start, int duration, std::string language); + ov::genai::WhisperDecodedResults transcribe(int start, int duration, std::string language); }; diff --git a/openvino_bindings/src/bindings.cc b/openvino_bindings/src/bindings.cc index fcefbec0..93ccfc42 100644 --- a/openvino_bindings/src/bindings.cc +++ b/openvino_bindings/src/bindings.cc @@ -4,11 +4,12 @@ #include #include -//#include "src/audio/speech_to_text.h" +#include "src/audio/speech_to_text.h" #include "src/image/image_inference.h" #include "src/mediapipe/graph_runner.h" #include "src/mediapipe/serialization/serialization_calculators.h" #include "src/llm/llm_inference.h" +#include "src/tti/tti_inference.h" #include "src/utils/errors.h" #include "src/utils/utils.h" #include "src/utils/status.h" @@ -39,6 +40,19 @@ void freeStatusOrImageInference(StatusOrString *status) { delete status; } +void freeStatusOrModelResponse(StatusOrModelResponse *status) { + //std::cout << "Freeing StatusOrImageInference" << std::endl; + delete status; +} + +void freeStatusOrWhisperModelResponse(StatusOrWhisperModelResponse *status) { + if (status->status == StatusEnum::OkStatus) { + delete [] status->value; + status->value = NULL; // Prevent dangling pointers + } + delete status; +} + void freeStatusOrDevices(StatusOrDevices *status) { if (status->status == StatusEnum::OkStatus) { delete [] status->value; @@ -235,6 +249,48 @@ Status* llmInferenceClose(CLLMInference instance) { return new Status{OkStatus}; } + +StatusOrTTIInference* ttiInferenceOpen(const char* model_path, const char* device) { + try { + auto instance = new TTIInference(model_path, device); + return new StatusOrTTIInference{OkStatus, "", instance}; + } catch (...) { + auto except = handle_exceptions(); + return new StatusOrTTIInference{except->status, except->message}; + } +} + +StatusOrTTIModelResponse* ttiInferencePrompt(CTTIInference instance, const char* message, int width, int height, int rounds) { + try { + auto inference = reinterpret_cast(instance); + auto result = inference->prompt(message, width, height, rounds); + auto text = result.string; + auto metrics = result.metrics; + return new StatusOrTTIModelResponse{OkStatus, {}, metrics, text}; + } catch (...) { + auto except = handle_exceptions(); + return new StatusOrTTIModelResponse{except->status, except->message, {}, {}}; + } +} + +StatusOrBool* ttiInferenceHasModelIndex(CTTIInference instance) { + try { + bool has_chat_template = reinterpret_cast(instance)->has_model_index(); + return new StatusOrBool{OkStatus, "", has_chat_template}; + } catch (...) { + auto except = handle_exceptions(); + return new StatusOrBool{except->status, except->message}; + } +} + +Status* ttiInferenceClose(CTTIInference instance) { + auto inference = reinterpret_cast(instance); + //inference->stop(); + delete inference; + return new Status{OkStatus}; +} + + StatusOrGraphRunner* graphRunnerOpen(const char* graph) { try { auto instance = new GraphRunner(); @@ -290,48 +346,54 @@ Status* graphRunnerStop(CGraphRunner instance) { } } -//StatusOrSpeechToText* speechToTextOpen(const char* model_path, const char* device) { -// try { -// auto instance = new SpeechToText(model_path, device); -// return new StatusOrSpeechToText{OkStatus, "", instance}; -// } catch (...) { -// auto except = handle_exceptions(); -// return new StatusOrSpeechToText{except->status, except->message}; -// } -//} -// -//Status* speechToTextLoadVideo(CSpeechToText instance, const char* video_path) { -// try { -// auto object = reinterpret_cast(instance); -// object->load_video(video_path); -// return new Status{OkStatus, ""}; -// } catch (...) { -// return handle_exceptions(); -// } -//} -// -//StatusOrInt* speechToTextVideoDuration(CSpeechToText instance) { -// try { -// auto object = reinterpret_cast(instance); -// object->video_duration(); -// // Deal with long in the future -// return new StatusOrInt{OkStatus, "", (int)object->video_duration()}; -// } catch (...) { -// return new StatusOrInt{OkStatus, ""}; -// } -//} -// -//StatusOrModelResponse* speechToTextTranscribe(CSpeechToText instance, int start, int duration, const char* language) { -// try { -// auto object = reinterpret_cast(instance); -// auto result = object->transcribe(start, duration, language); -// std::string text = result; -// return new StatusOrModelResponse{OkStatus, "", convertToMetricsStruct(result.perf_metrics), strdup(text.c_str())}; -// } catch (...) { -// auto except = handle_exceptions(); -// return new StatusOrModelResponse{except->status, except->message}; -// } -//} +StatusOrSpeechToText* speechToTextOpen(const char* model_path, const char* device) { + try { + auto instance = new SpeechToText(model_path, device); + return new StatusOrSpeechToText{OkStatus, "", instance}; + } catch (...) { + auto except = handle_exceptions(); + return new StatusOrSpeechToText{except->status, except->message}; + } +} + +Status* speechToTextLoadVideo(CSpeechToText instance, const char* video_path) { + try { + auto object = reinterpret_cast(instance); + object->load_video(video_path); + return new Status{OkStatus, ""}; + } catch (...) { + return handle_exceptions(); + } +} + +StatusOrInt* speechToTextVideoDuration(CSpeechToText instance) { + try { + auto object = reinterpret_cast(instance); + object->video_duration(); + // Deal with long in the future + return new StatusOrInt{OkStatus, "", (int)object->video_duration()}; + } catch (...) { + return new StatusOrInt{OkStatus, ""}; + } +} + +StatusOrWhisperModelResponse* speechToTextTranscribe(CSpeechToText instance, int start, int duration, const char* language) { + try { + auto object = reinterpret_cast(instance); + auto transcription_result = object->transcribe(start, duration, language); + auto chunks = transcription_result.chunks.value(); + std::string text = transcription_result; + TranscriptionChunk* result = new TranscriptionChunk[chunks.size()]; + for (int i = 0; i < chunks.size(); i++) { + auto r = chunks[i]; + result[i] = TranscriptionChunk{r.start_ts + start, r.end_ts + start, strdup(r.text.c_str())}; + } + return new StatusOrWhisperModelResponse{OkStatus, "", convertToMetricsStruct(transcription_result.perf_metrics), result, (int)chunks.size(), strdup(text.c_str())}; + } catch (...) { + auto except = handle_exceptions(); + return new StatusOrWhisperModelResponse{except->status, except->message}; + } +} //void report_rss() { // struct rusage r_usage; diff --git a/openvino_bindings/src/bindings.h b/openvino_bindings/src/bindings.h index e496c5aa..5781dda4 100644 --- a/openvino_bindings/src/bindings.h +++ b/openvino_bindings/src/bindings.h @@ -15,17 +15,24 @@ #include #include "src/utils/status.h" #include "src/utils/metrics.h" +#include "utils/tti_metrics.h" typedef void* CImageInference; typedef void* CGraphRunner; typedef void* CSpeechToText; typedef void* CLLMInference; +typedef void* CTTIInference; typedef struct { const char* id; const char* name; } Device; +typedef struct { + float start_ts; + float end_ts; + const char* text; +} TranscriptionChunk; typedef struct { enum StatusEnum status; @@ -74,6 +81,12 @@ typedef struct { CLLMInference value; } StatusOrLLMInference; +typedef struct { + enum StatusEnum status; + const char* message; + CLLMInference value; +} StatusOrTTIInference; + typedef struct { enum StatusEnum status; const char* message; @@ -81,6 +94,22 @@ typedef struct { const char* value; } StatusOrModelResponse; +typedef struct { + enum StatusEnum status; + const char* message; + Metrics metrics; + TranscriptionChunk* value; + int size; + const char* text; +} StatusOrWhisperModelResponse; + +typedef struct { + enum StatusEnum status; + const char* message; + TTIMetrics metrics; + const char* value; +} StatusOrTTIModelResponse; + typedef struct { enum StatusEnum status; const char* message; @@ -96,6 +125,8 @@ EXPORT void freeStatusOrString(StatusOrString *status); EXPORT void freeStatusOrImageInference(StatusOrImageInference *status); EXPORT void freeStatusOrLLMInference(StatusOrLLMInference *status); EXPORT void freeStatusOrSpeechToText(StatusOrSpeechToText *status); +EXPORT void freeStatusOrModelResponse(StatusOrModelResponse *status); +EXPORT void freeStatusOrWhisperModelResponse(StatusOrWhisperModelResponse *status); EXPORT void freeStatusOrDevices(StatusOrDevices *status); EXPORT StatusOrImageInference* imageInferenceOpen(const char* model_path, const char* task, const char* device, const char* label_definitions_json); @@ -117,16 +148,21 @@ EXPORT Status* llmInferenceForceStop(CLLMInference instance); EXPORT StatusOrBool* llmInferenceHasChatTemplate(CLLMInference instance); EXPORT Status* llmInferenceClose(CLLMInference instance); +EXPORT StatusOrTTIInference* ttiInferenceOpen(const char* model_path, const char* device); +EXPORT StatusOrTTIModelResponse* ttiInferencePrompt(CTTIInference instance, const char* message, int width, int height, int rounds); +EXPORT StatusOrBool* ttiInferenceHasModelIndex(CTTIInference instance); +EXPORT Status* ttiInferenceClose(CLLMInference instance); + EXPORT StatusOrGraphRunner* graphRunnerOpen(const char* graph); EXPORT Status* graphRunnerQueueImage(CGraphRunner instance, const char* name, int timestamp, unsigned char* image_data, const size_t data_length); EXPORT Status* graphRunnerQueueSerializationOutput(CGraphRunner instance, const char* name, int timestamp, bool json, bool csv, bool overlay); EXPORT StatusOrString* graphRunnerGet(CGraphRunner instance); EXPORT Status* graphRunnerStop(CGraphRunner instance); -//EXPORT StatusOrSpeechToText* speechToTextOpen(const char* model_path, const char* device); -//EXPORT Status* speechToTextLoadVideo(CSpeechToText instance, const char* video_path); -//EXPORT StatusOrInt* speechToTextVideoDuration(CSpeechToText instance); -//EXPORT StatusOrModelResponse* speechToTextTranscribe(CSpeechToText instance, int start, int duration, const char* language); +EXPORT StatusOrSpeechToText* speechToTextOpen(const char* model_path, const char* device); +EXPORT Status* speechToTextLoadVideo(CSpeechToText instance, const char* video_path); +EXPORT StatusOrInt* speechToTextVideoDuration(CSpeechToText instance); +EXPORT StatusOrWhisperModelResponse* speechToTextTranscribe(CSpeechToText instance, int start, int duration, const char* language); EXPORT StatusOrDevices* getAvailableDevices(); Status* handle_exceptions(); diff --git a/openvino_bindings/src/tti/BUILD b/openvino_bindings/src/tti/BUILD new file mode 100644 index 00000000..27c89e32 --- /dev/null +++ b/openvino_bindings/src/tti/BUILD @@ -0,0 +1,33 @@ +cc_library( + name = "tti_inference", + srcs = [ + "tti_inference.cc", + ], + hdrs = [ + "tti_inference.h", + ], + visibility = ["//visibility:public"], + deps = [ + "//src/image:serialization", + "//src/utils", + "//src/utils:errors", + "//src/utils:tti_metrics", + "//third_party:opencv", + "//third_party:openvino", + "@nlohmann_json//:json", + ], +) + +cc_test( + name = "tti_inference_test", + srcs = [ + "tti_inference_test.cc", + ], + data = [ + "//data:models", + ], + deps = [ + ":tti_inference", + "@gtest//:gtest_main", + ], +) diff --git a/openvino_bindings/src/tti/tti_inference.cc b/openvino_bindings/src/tti/tti_inference.cc new file mode 100644 index 00000000..23d3cabc --- /dev/null +++ b/openvino_bindings/src/tti/tti_inference.cc @@ -0,0 +1,63 @@ +#include +#include + +#include "tti_inference.h" +#include + +#include "src/image/json_serialization.h" + +StringWithMetrics TTIInference::prompt(std::string message, int width, int height, int rounds) +{ + const auto t1 = std::chrono::steady_clock::now(); + + const ov::Tensor tensor = pipe.generate(message, + ov::genai::width(width), + ov::genai::height(height), + ov::genai::num_inference_steps(rounds), + ov::genai::num_images_per_prompt(1)); + + + auto* tensor_data = tensor.data(); + + // Get the shape of the tensor [1, 512, 512, 3] + const auto shape = tensor.get_shape(); + const auto batch_size_ = shape[0]; + const auto height_ = shape[1]; + const auto width_ = shape[2]; + const auto channels_ = shape[3]; + + // Ensure the tensor has the shape [1, 512, 512, 3] + if (batch_size_ != 1 || channels_ != 3) + { + std::cerr << "Unsupported tensor shape" << std::endl; + return StringWithMetrics{"", {}}; + } + + // Reshape the uint8_t data into a 512x512 3-channel OpenCV Mat + const cv::Mat image(static_cast(height_), static_cast(width_), CV_8UC3, tensor_data); + const auto imgDataString = geti::base64_encode_mat(image); + + // Make Metrics + const auto t2 = std::chrono::steady_clock::now(); + + const auto generate_time = std::chrono::duration_cast(t2 - t1).count(); + + const auto load_time_f = static_cast(load_time); + const auto generate_time_f = static_cast(generate_time); + const auto metrics = TTIMetrics{ + !std::isnan(load_time_f) ? load_time_f : 0.0f, + !std::isnan(generate_time_f) ? generate_time_f : 0.0f, + }; + + // Return + auto res = StringWithMetrics{strdup(imgDataString.c_str()), metrics}; + return res; +} + + +bool TTIInference::has_model_index() const +{ + std::ifstream ifs(model_path + "/model_index.json"); + auto r = nlohmann::json::parse(ifs); + return r.find("chat_template") != r.end(); +} diff --git a/openvino_bindings/src/tti/tti_inference.h b/openvino_bindings/src/tti/tti_inference.h new file mode 100644 index 00000000..90ae2bc9 --- /dev/null +++ b/openvino_bindings/src/tti/tti_inference.h @@ -0,0 +1,44 @@ +#ifndef TTI_INFERENCE_H_ +#define TTI_INFERENCE_H_ + +#include +#include +#include +#include // Include for time measurement + +#include "src/utils/tti_metrics.h" +#include "openvino/genai/image_generation/text2image_pipeline.hpp" + +class TTIInference +{ + long load_time = 9999; + ov::genai::Text2ImagePipeline pipe; + ov::genai::ChatHistory history; + std::function streamer; + +public: + TTIInference(std::string model_path, std::string device): + // Use a lambda to initialize the 'pipe' and measure the construction time in one step + pipe([&]() { + auto start_time = std::chrono::steady_clock::now(); + ov::genai::Text2ImagePipeline temp_pipe(model_path, device); // Construct the pipe + auto end_time = std::chrono::steady_clock::now(); + + // Calculate load time + this->load_time = std::chrono::duration_cast(end_time - start_time).count(); + + return temp_pipe; // Return the initialized pipe + }()), + model_path(model_path) + { + // Constructor body can remain empty unless additional initialization is required + } + + StringWithMetrics prompt(std::string message, int width, int height, int rounds); + bool has_model_index() const; + +private: + std::string model_path; +}; + +#endif // TTI_INFERENCE_H_ diff --git a/openvino_bindings/src/tti/tti_inference_test.cc b/openvino_bindings/src/tti/tti_inference_test.cc new file mode 100644 index 00000000..95ec400f --- /dev/null +++ b/openvino_bindings/src/tti/tti_inference_test.cc @@ -0,0 +1,9 @@ +#include "gtest/gtest.h" +#include "tti_inference.h" + +TEST(TTIInference, Sanity) { + std::string model_path = "data/TinyLlama-1.1B-Chat-v1.0-int4-ov"; + LLMInference inference(model_path, "CPU"); + std::string output = inference.prompt("What is the color of the sun?", 1.0f, 1.0f); + EXPECT_STREQ(output.c_str(), "The color of the sun is a beautiful and awe-inspiring yellow-amber color. It is a natural, radiant, and beautiful color that is associated with warmth, light, and lightning. The sun is often depicted as a radiant, yellow-amber ball of light that shines down on the earth, illuminating the world and inspiring wonder and awe in all who see it."); +} diff --git a/openvino_bindings/src/utils/BUILD b/openvino_bindings/src/utils/BUILD index 88a5298b..8936bd5a 100644 --- a/openvino_bindings/src/utils/BUILD +++ b/openvino_bindings/src/utils/BUILD @@ -14,26 +14,34 @@ cc_library( name = "status", hdrs = [ "status.h", - ] + ], ) cc_library( name = "metrics", hdrs = [ - "metrics.h" + "metrics.h", ], ) cc_library( - name = "utils", + name = "tti_metrics", hdrs = [ - "utils.h" + "tti_metrics.h", ], +) + +cc_library( + name = "utils", srcs = [ - "utils.cc" + "utils.cc", + ], + hdrs = [ + "utils.h", ], deps = [ - "//third_party:openvino", ":metrics", + ":tti_metrics", + "//third_party:openvino", ], ) diff --git a/openvino_bindings/src/utils/tti_metrics.h b/openvino_bindings/src/utils/tti_metrics.h new file mode 100644 index 00000000..a6bb9756 --- /dev/null +++ b/openvino_bindings/src/utils/tti_metrics.h @@ -0,0 +1,18 @@ +// +// Created by akramer on 24-10-24. +// + +#ifndef TTI_METRICS_H +#define TTI_METRICS_H + +typedef struct { + float load_time; + float generate_time; +} TTIMetrics; + +typedef struct { + const char* string; + TTIMetrics metrics; +} StringWithMetrics; + +#endif //TTI_METRICS_H diff --git a/openvino_bindings/third_party/.gitignore b/openvino_bindings/third_party/.gitignore new file mode 100644 index 00000000..8a1403ea --- /dev/null +++ b/openvino_bindings/third_party/.gitignore @@ -0,0 +1 @@ +vcpkg_installed diff --git a/openvino_bindings/third_party/vcpkg.json b/openvino_bindings/third_party/vcpkg.json new file mode 100644 index 00000000..2497f47c --- /dev/null +++ b/openvino_bindings/third_party/vcpkg.json @@ -0,0 +1,10 @@ +{ + "name": "openvinotestdrivebindings", + "builtin-baseline": "c8582b4d83dbd36e1bebc08bf166b5eb807996b0", + "dependencies": [ + "ffmpeg" + ], + "overrides": [ + { "name": "ffmpeg", "version": "6.1.1" } + ] +} diff --git a/preview.png b/preview.png deleted file mode 100644 index 9d07d1e0..00000000 Binary files a/preview.png and /dev/null differ diff --git a/pubspec.lock b/pubspec.lock index 0bb8d121..e5fd0af8 100644 --- a/pubspec.lock +++ b/pubspec.lock @@ -230,6 +230,14 @@ packages: url: "https://pub.dev" source: hosted version: "2.3.7" + dbus: + dependency: transitive + description: + name: dbus + sha256: "365c771ac3b0e58845f39ec6deebc76e3276aa9922b0cc60840712094d9047ac" + url: "https://pub.dev" + source: hosted + version: "0.7.10" desktop_drop: dependency: "direct main" description: @@ -238,6 +246,22 @@ packages: url: "https://pub.dev" source: hosted version: "0.4.4" + device_info_plus: + dependency: transitive + description: + name: device_info_plus + sha256: a7fd703482b391a87d60b6061d04dfdeab07826b96f9abd8f5ed98068acc0074 + url: "https://pub.dev" + source: hosted + version: "10.1.2" + device_info_plus_platform_interface: + dependency: transitive + description: + name: device_info_plus_platform_interface + sha256: "282d3cf731045a2feb66abfe61bbc40870ae50a3ed10a4d3d217556c35c8c2ba" + url: "https://pub.dev" + source: hosted + version: "7.0.1" dio: dependency: "direct main" description: @@ -481,14 +505,30 @@ packages: url: "https://pub.dev" source: hosted version: "1.0.4" + irondash_engine_context: + dependency: transitive + description: + name: irondash_engine_context + sha256: cd7b769db11a2b5243b037c8a9b1ecaef02e1ae27a2d909ffa78c1dad747bb10 + url: "https://pub.dev" + source: hosted + version: "0.5.4" + irondash_message_channel: + dependency: transitive + description: + name: irondash_message_channel + sha256: b4101669776509c76133b8917ab8cfc704d3ad92a8c450b92934dd8884a2f060 + url: "https://pub.dev" + source: hosted + version: "0.7.0" js: dependency: transitive description: name: js - sha256: c1b2e9b5ea78c45e1a0788d29606ba27dc5f71f019f32ca5140f61ef071838cf + sha256: f2c445dce49627136094980615a031419f7f3eb393237e4ecd97ac15dea343f3 url: "https://pub.dev" source: hosted - version: "0.7.1" + version: "0.6.7" json_annotation: dependency: transitive description: @@ -585,6 +625,78 @@ packages: url: "https://pub.dev" source: hosted version: "2.6.0" + media_kit: + dependency: "direct main" + description: + name: media_kit + sha256: "1f1deee148533d75129a6f38251ff8388e33ee05fc2d20a6a80e57d6051b7b62" + url: "https://pub.dev" + source: hosted + version: "1.1.11" + media_kit_libs_android_video: + dependency: transitive + description: + name: media_kit_libs_android_video + sha256: "9dd8012572e4aff47516e55f2597998f0a378e3d588d0fad0ca1f11a53ae090c" + url: "https://pub.dev" + source: hosted + version: "1.3.6" + media_kit_libs_ios_video: + dependency: transitive + description: + name: media_kit_libs_ios_video + sha256: b5382994eb37a4564c368386c154ad70ba0cc78dacdd3fb0cd9f30db6d837991 + url: "https://pub.dev" + source: hosted + version: "1.1.4" + media_kit_libs_linux: + dependency: transitive + description: + name: media_kit_libs_linux + sha256: e186891c31daa6bedab4d74dcdb4e8adfccc7d786bfed6ad81fe24a3b3010310 + url: "https://pub.dev" + source: hosted + version: "1.1.3" + media_kit_libs_macos_video: + dependency: transitive + description: + name: media_kit_libs_macos_video + sha256: f26aa1452b665df288e360393758f84b911f70ffb3878032e1aabba23aa1032d + url: "https://pub.dev" + source: hosted + version: "1.1.4" + media_kit_libs_video: + dependency: "direct main" + description: + name: media_kit_libs_video + sha256: "20bb4aefa8fece282b59580e1cd8528117297083a6640c98c2e98cfc96b93288" + url: "https://pub.dev" + source: hosted + version: "1.0.5" + media_kit_libs_windows_video: + dependency: transitive + description: + name: media_kit_libs_windows_video + sha256: "32654572167825c42c55466f5d08eee23ea11061c84aa91b09d0e0f69bdd0887" + url: "https://pub.dev" + source: hosted + version: "1.0.10" + media_kit_native_event_loop: + dependency: transitive + description: + name: media_kit_native_event_loop + sha256: "7d82e3b3e9ded5c35c3146c5ba1da3118d1dd8ac3435bac7f29f458181471b40" + url: "https://pub.dev" + source: hosted + version: "1.0.9" + media_kit_video: + dependency: "direct main" + description: + name: media_kit_video + sha256: "2cc3b966679963ba25a4ce5b771e532a521ebde7c6aa20e9802bec95d9916c8f" + url: "https://pub.dev" + source: hosted + version: "1.2.5" meta: dependency: transitive description: @@ -625,6 +737,22 @@ packages: url: "https://pub.dev" source: hosted version: "2.1.0" + package_info_plus: + dependency: transitive + description: + name: package_info_plus + sha256: da8d9ac8c4b1df253d1a328b7bf01ae77ef132833479ab40763334db13b91cce + url: "https://pub.dev" + source: hosted + version: "8.1.1" + package_info_plus_platform_interface: + dependency: transitive + description: + name: package_info_plus_platform_interface + sha256: ac1f4a4847f1ade8e6a87d1f39f5d7c67490738642e2542f559ec38c37489a66 + url: "https://pub.dev" + source: hosted + version: "3.0.1" path: dependency: "direct main" description: @@ -697,6 +825,14 @@ packages: url: "https://pub.dev" source: hosted version: "6.0.2" + pixel_snap: + dependency: transitive + description: + name: pixel_snap + sha256: "677410ea37b07cd37ecb6d5e6c0d8d7615a7cf3bd92ba406fd1ac57e937d1fb0" + url: "https://pub.dev" + source: hosted + version: "0.1.5" platform: dependency: transitive description: @@ -769,6 +905,62 @@ packages: url: "https://pub.dev" source: hosted version: "4.1.0" + safe_local_storage: + dependency: transitive + description: + name: safe_local_storage + sha256: ede4eb6cb7d88a116b3d3bf1df70790b9e2038bc37cb19112e381217c74d9440 + url: "https://pub.dev" + source: hosted + version: "1.0.2" + screen_brightness: + dependency: transitive + description: + name: screen_brightness + sha256: ed8da4a4511e79422fc1aa88138e920e4008cd312b72cdaa15ccb426c0faaedd + url: "https://pub.dev" + source: hosted + version: "0.2.2+1" + screen_brightness_android: + dependency: transitive + description: + name: screen_brightness_android + sha256: "3df10961e3a9e968a5e076fe27e7f4741fa8a1d3950bdeb48cf121ed529d0caf" + url: "https://pub.dev" + source: hosted + version: "0.1.0+2" + screen_brightness_ios: + dependency: transitive + description: + name: screen_brightness_ios + sha256: "99adc3ca5490b8294284aad5fcc87f061ad685050e03cf45d3d018fe398fd9a2" + url: "https://pub.dev" + source: hosted + version: "0.1.0" + screen_brightness_macos: + dependency: transitive + description: + name: screen_brightness_macos + sha256: "64b34e7e3f4900d7687c8e8fb514246845a73ecec05ab53483ed025bd4a899fd" + url: "https://pub.dev" + source: hosted + version: "0.1.0+1" + screen_brightness_platform_interface: + dependency: transitive + description: + name: screen_brightness_platform_interface + sha256: b211d07f0c96637a15fb06f6168617e18030d5d74ad03795dd8547a52717c171 + url: "https://pub.dev" + source: hosted + version: "0.1.0" + screen_brightness_windows: + dependency: transitive + description: + name: screen_brightness_windows + sha256: "9261bf33d0fc2707d8cf16339ce25768100a65e70af0fcabaf032fc12408ba86" + url: "https://pub.dev" + source: hosted + version: "0.1.3" scroll_pos: dependency: transitive description: @@ -854,6 +1046,22 @@ packages: url: "https://pub.dev" source: hosted version: "1.2.0" + super_clipboard: + dependency: "direct main" + description: + name: super_clipboard + sha256: "4a6ae6dfaa282ec1f2bff750976f535517ed8ca842d5deae13985eb11c00ac1f" + url: "https://pub.dev" + source: hosted + version: "0.8.24" + super_native_extensions: + dependency: transitive + description: + name: super_native_extensions + sha256: a433bba8186cd6b707560c42535bf284804665231c00bca86faf1aa4968b7637 + url: "https://pub.dev" + source: hosted + version: "0.8.24" sync_http: dependency: transitive description: @@ -862,6 +1070,14 @@ packages: url: "https://pub.dev" source: hosted version: "0.3.1" + synchronized: + dependency: transitive + description: + name: synchronized + sha256: "69fe30f3a8b04a0be0c15ae6490fc859a78ef4c43ae2dd5e8a623d45bfcf9225" + url: "https://pub.dev" + source: hosted + version: "3.3.0+3" system_theme: dependency: "direct main" description: @@ -910,6 +1126,22 @@ packages: url: "https://pub.dev" source: hosted version: "1.4.0" + universal_platform: + dependency: transitive + description: + name: universal_platform + sha256: "64e16458a0ea9b99260ceb5467a214c1f298d647c659af1bff6d3bf82536b1ec" + url: "https://pub.dev" + source: hosted + version: "1.1.0" + uri_parser: + dependency: transitive + description: + name: uri_parser + sha256: "6543c9fd86d2862fac55d800a43e67c0dcd1a41677cb69c2f8edfe73bbcf1835" + url: "https://pub.dev" + source: hosted + version: "2.0.2" uuid: dependency: "direct main" description: @@ -958,6 +1190,30 @@ packages: url: "https://pub.dev" source: hosted version: "14.2.5" + volume_controller: + dependency: transitive + description: + name: volume_controller + sha256: c71d4c62631305df63b72da79089e078af2659649301807fa746088f365cb48e + url: "https://pub.dev" + source: hosted + version: "2.0.8" + wakelock_plus: + dependency: transitive + description: + name: wakelock_plus + sha256: bf4ee6f17a2fa373ed3753ad0e602b7603f8c75af006d5b9bdade263928c0484 + url: "https://pub.dev" + source: hosted + version: "1.2.8" + wakelock_plus_platform_interface: + dependency: transitive + description: + name: wakelock_plus_platform_interface + sha256: "422d1cdbb448079a8a62a5a770b69baa489f8f7ca21aef47800c726d404f9d16" + url: "https://pub.dev" + source: hosted + version: "1.2.1" watcher: dependency: transitive description: @@ -1006,6 +1262,14 @@ packages: url: "https://pub.dev" source: hosted version: "5.8.0" + win32_registry: + dependency: transitive + description: + name: win32_registry + sha256: "21ec76dfc731550fd3e2ce7a33a9ea90b828fdf19a5c3bcf556fa992cfa99852" + url: "https://pub.dev" + source: hosted + version: "1.1.5" xdg_directories: dependency: transitive description: @@ -1040,4 +1304,4 @@ packages: version: "2.2.1" sdks: dart: ">=3.5.0 <4.0.0" - flutter: ">=3.24.3" + flutter: ">=3.24.5" diff --git a/pubspec.yaml b/pubspec.yaml index 6ecc19bd..aece2933 100644 --- a/pubspec.yaml +++ b/pubspec.yaml @@ -20,7 +20,7 @@ version: 1.0.2+1 environment: sdk: '>=3.3.4 <4.0.0' - flutter: 3.24.3 + flutter: 3.24.5 # Dependencies specify other packages that your package needs in order to work. # To automatically upgrade your package dependencies to the latest versions @@ -58,6 +58,10 @@ dependencies: flutter_acrylic: ^1.1.4 flutter_markdown: ^0.7.4+3 markdown: ^7.2.2 + media_kit: ^1.1.11 # Primary package. + media_kit_video: ^1.2.5 # For video rendering. + media_kit_libs_video: ^1.0.5 # Native video dependencies. + super_clipboard: ^0.8.24 dev_dependencies: flutter_test: diff --git a/scripts/create_manifest.dart b/scripts/create_manifest.dart index 1ef50973..5013639b 100644 --- a/scripts/create_manifest.dart +++ b/scripts/create_manifest.dart @@ -5,8 +5,16 @@ import 'package:http/http.dart' as http; import 'package:html/dom.dart'; import 'package:collection/collection.dart'; -Future>> getCollectionConfig(String collectionId, String author) async { - final url = "https://huggingface.co/api/collections/$author/$collectionId"; +class Collection { + final String path; + final String author; + final String fallbackTask; + final String Function(String) descriptionFormat; + const Collection(this.path, this.author, this.fallbackTask, this.descriptionFormat); +} + +Future>> getCollectionConfig(Collection collection) async { + final url = "https://huggingface.co/api/collections/${collection.author}/${collection.path}"; final request = await http.get(Uri.parse(url)); return List>.from(jsonDecode(request.body)["items"]); } @@ -28,6 +36,8 @@ class ModelInfo { final int contextWindow; final String description; final String task; + final String author; + final String collection; const ModelInfo({ required this.name, @@ -36,7 +46,9 @@ class ModelInfo { required this.optimizationPrecision, required this.contextWindow, required this.description, - required this.task + required this.task, + required this.author, + required this.collection, }); Object toMap() { @@ -48,16 +60,18 @@ class ModelInfo { "contextWindow": contextWindow, "description": description, "task": task, + "author": author, + "collection": collection, }; } - static Future fromCollectionConfig(Map collectionConfig, String author) async { + static Future fromCollectionConfig(Map collectionConfig, Collection collection) async { final id = getIdFromHuggingFaceId(collectionConfig["id"]); final name = getNameFromId(id); - final config = await getConfigFromRepo(id, author); - final fileSize = await getModelSizeCrawler(id, author); - final description = "Chat with $name model"; + final config = await getConfigFromRepo(id, collection.author); + final fileSize = await getModelSizeCrawler(id, collection.author); + final description = collection.descriptionFormat(name); int contextWindow = config["max_position_embeddings"] ?? config["max_seq_len"] @@ -71,7 +85,9 @@ class ModelInfo { optimizationPrecision: getOptimizationFromId(id) ?? "", contextWindow: contextWindow, description: description, - task: collectionConfig["pipeline_tag"] ?? "unknown", + task: collectionConfig["pipeline_tag"] ?? collection.fallbackTask, + author: collection.author, + collection: collection.path, ); } @@ -85,7 +101,11 @@ class ModelInfo { int total = 0; for (final v in files) { - final text = v.nodes.whereType().first.text.trim(); + final text = v.nodes.first.text?.trim(); + if (text == null) { + continue; + } + //print(text); final match = pattern.firstMatch(text); if (match == null){ continue; @@ -149,24 +169,29 @@ void generate() async { final popular = [ "mistral-7b-instruct-v0.1-int8-ov", "Phi-3-mini-4k-instruct-int4-ov", + "whisper-base-fp16-ov", "open_llama_3b_v2-int8-ov", - "open_llama_3b_v2-int8-ov", - "open_llama_3b_v2-int8-ov", ]; - final collectionModels = await getCollectionConfig("llm-6687aaa2abca3bbcec71a9bd", "OpenVINO"); + final List collections = [ + Collection("speech-to-text-672321d5c070537a178a8aeb", "OpenVINO", "speech", (String name) => "Transcribe video with $name"), + Collection("llm-6687aaa2abca3bbcec71a9bd", "OpenVINO", "text-generation", (String name) => "Chat with $name"), + ]; List models = []; - for (final collectionModel in collectionModels) { - models.add(await ModelInfo.fromCollectionConfig(collectionModel, "OpenVINO")); + for (final collection in collections) { + final collectionModels = await getCollectionConfig(collection); + for (final collectionModel in collectionModels) { + models.add(await ModelInfo.fromCollectionConfig(collectionModel, collection)); + } } Map result = {}; final popularModels = popular.map((id) => models.firstWhereOrNull((r) => r.id == id)).whereType(); - const encoder = JsonEncoder.withIndent(" "); result['popular_models'] = popularModels.map((m) => m.toMap()).toList(); result['all_models'] = models.map((m) => m.toMap()).toList(); + const encoder = JsonEncoder.withIndent(" "); print(encoder.convert(result)); } diff --git a/test/pages/computer_vision/model_properties_test.dart b/test/pages/computer_vision/model_properties_test.dart index 70d1e6ef..d034774f 100644 --- a/test/pages/computer_vision/model_properties_test.dart +++ b/test/pages/computer_vision/model_properties_test.dart @@ -22,8 +22,8 @@ Widget testWidget(ImageInferenceProvider provider) { ), ], child: FluentApp( - home: const Center( - child: ModelProperties() + home: Center( + child: ModelProperties(project: provider.project) ), ), ); diff --git a/test/pages/transcriptions/utils/section_test.dart b/test/pages/transcriptions/utils/section_test.dart new file mode 100644 index 00000000..3452a625 --- /dev/null +++ b/test/pages/transcriptions/utils/section_test.dart @@ -0,0 +1,99 @@ +import 'package:flutter_test/flutter_test.dart'; +import 'package:inference/pages/transcription/utils/section.dart'; + +void main() { + group("Section", () { + group("process", () { + test("process sets values in data", () async { + final state = DynamicRangeLoading(Section(0, 10)); + for (int j = 0; j < 10; j++) { + await state.process((i) async { + return j; + }); + + expect(state.data[j], j); + } + }); + + test("process out of bounds throws error", () async { + final state = DynamicRangeLoading(Section(0, 10)); + for (int j = 0; j < 10; j++) { + await state.process((i) async { + return j; + }); + } + + expect(() async { + await state.process((i) async { + return 1; + }); + }, throwsException); + }); + + test("process continues after skip is done", () async { + final state = DynamicRangeLoading(Section(0, 10)); + state.skipTo(8); + for (int j = 0; j < 2; j++) { + await state.process((i) async { + return j; + }); + } + expect(state.getNextIndex(), 0); + }); + + }); + + test('getNextIndex throws error when state is complete', () { + final state = DynamicRangeLoading(Section(0, 0)); + expect(() { + state.getNextIndex(); + },throwsException); + }); + + test('complete', () async { + final state = DynamicRangeLoading(Section(0, 10)); + for (int j = 0; j < 10; j++) { + expect(state.complete, false); + await state.process((i) async { + return j; + }); + } + expect(state.complete, true); + }); + + group("skip", () { + test("skips to specific index", () async { + final state = DynamicRangeLoading(Section(0, 10)); + state.skipTo(5); + expect(state.getNextIndex(), 5); + expect(state.activeSection.begin, 5); + expect(state.activeSection.end, 10); + }); + + test("skips to partially complete section will go to end of that section ", () async { + final state = DynamicRangeLoading(Section(0, 10)); + + for (int j = 0; j < 8; j++) { + await state.process((i) async { + return j; + }); + } + state.skipTo(5); + expect(state.getNextIndex(), 8); + }); + + test("skips to fully complete section will not shift next index", () async { + final state = DynamicRangeLoading(Section(0, 10)); + state.skipTo(5); + + for (int j = 0; j < 5; j++) { + await state.process((i) async { + return j; + }); + } + state.skipTo(5); + expect(state.getNextIndex(), 0); + }); + }); + }); +} diff --git a/test/section_test.dart b/test/section_test.dart deleted file mode 100644 index 7aebd689..00000000 --- a/test/section_test.dart +++ /dev/null @@ -1,100 +0,0 @@ -import 'package:flutter_test/flutter_test.dart'; - -void main() { - /* - group("Section", () { - group("process", () { - // test("process sets values in data", () async { - // final state = DynamicRangeLoading(Section(0, 10)); - // for (int j = 0; j < 10; j++) { - // await state.process((i) async { - // return j; - // }); - - // expect(state.data[j], j); - // } - // }); - - // test("process out of bounds throws error", () async { - // final state = DynamicRangeLoading(Section(0, 10)); - // for (int j = 0; j < 10; j++) { - // await state.process((i) async { - // return j; - // }); - // } - - // expect(() async { - // await state.process((i) async { - // return 1; - // }); - // }, throwsException); - // }); - - // test("process continues after skip is done", () async { - // final state = DynamicRangeLoading(Section(0, 10)); - // state.skipTo(8); - // for (int j = 0; j < 2; j++) { - // await state.process((i) async { - // return j; - // }); - // } - // expect(state.getNextIndex(), 0); - // }); - - // }); - - // test('getNextIndex throws error when state is complete', () { - // final state = DynamicRangeLoading(Section(0, 0)); - // expect(() { - // state.getNextIndex(); - // },throwsException); - // }); - - // test('complete', () async { - // final state = DynamicRangeLoading(Section(0, 10)); - // for (int j = 0; j < 10; j++) { - // expect(state.complete, false); - // await state.process((i) async { - // return j; - // }); - // } - // expect(state.complete, true); - // }); - - // group("skip", () { - // test("skips to specific index", () async { - // final state = DynamicRangeLoading(Section(0, 10)); - // state.skipTo(5); - // expect(state.getNextIndex(), 5); - // expect(state.activeSection.begin, 5); - // expect(state.activeSection.end, 10); - // }); - - // test("skips to partially complete section will go to end of that section ", () async { - // final state = DynamicRangeLoading(Section(0, 10)); - - // for (int j = 0; j < 8; j++) { - // await state.process((i) async { - // return j; - // }); - // } - // state.skipTo(5); - // expect(state.getNextIndex(), 8); - // }); - - // test("skips to fully complete section will not shift next index", () async { - // final state = DynamicRangeLoading(Section(0, 10)); - // state.skipTo(5); - - // for (int j = 0; j < 5; j++) { - // await state.process((i) async { - // return j; - // }); - // } - // state.skipTo(5); - // expect(state.getNextIndex(), 0); - // }); - }); - }); - */ -} diff --git a/windows/flutter/generated_plugin_registrant.cc b/windows/flutter/generated_plugin_registrant.cc index 909a92e0..201a8b8f 100644 --- a/windows/flutter/generated_plugin_registrant.cc +++ b/windows/flutter/generated_plugin_registrant.cc @@ -8,6 +8,11 @@ #include #include +#include +#include +#include +#include +#include #include void RegisterPlugins(flutter::PluginRegistry* registry) { @@ -15,6 +20,16 @@ void RegisterPlugins(flutter::PluginRegistry* registry) { registry->GetRegistrarForPlugin("DesktopDropPlugin")); FlutterAcrylicPluginRegisterWithRegistrar( registry->GetRegistrarForPlugin("FlutterAcrylicPlugin")); + IrondashEngineContextPluginCApiRegisterWithRegistrar( + registry->GetRegistrarForPlugin("IrondashEngineContextPluginCApi")); + MediaKitLibsWindowsVideoPluginCApiRegisterWithRegistrar( + registry->GetRegistrarForPlugin("MediaKitLibsWindowsVideoPluginCApi")); + MediaKitVideoPluginCApiRegisterWithRegistrar( + registry->GetRegistrarForPlugin("MediaKitVideoPluginCApi")); + ScreenBrightnessWindowsPluginRegisterWithRegistrar( + registry->GetRegistrarForPlugin("ScreenBrightnessWindowsPlugin")); + SuperNativeExtensionsPluginCApiRegisterWithRegistrar( + registry->GetRegistrarForPlugin("SuperNativeExtensionsPluginCApi")); SystemThemePluginRegisterWithRegistrar( registry->GetRegistrarForPlugin("SystemThemePlugin")); } diff --git a/windows/flutter/generated_plugins.cmake b/windows/flutter/generated_plugins.cmake index 1f4b61fd..02d7ea45 100644 --- a/windows/flutter/generated_plugins.cmake +++ b/windows/flutter/generated_plugins.cmake @@ -5,10 +5,16 @@ list(APPEND FLUTTER_PLUGIN_LIST desktop_drop flutter_acrylic + irondash_engine_context + media_kit_libs_windows_video + media_kit_video + screen_brightness_windows + super_native_extensions system_theme ) list(APPEND FLUTTER_FFI_PLUGIN_LIST + media_kit_native_event_loop ) set(PLUGIN_BUNDLED_LIBRARIES)