Compare commits
75 Commits
4d2d2c9938
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1b88bd1960 | ||
| 1aa2476779 | |||
|
|
7ed144bc39 | ||
| 3b42a73b73 | |||
|
|
6f6ded1d90 | ||
| cda028f213 | |||
| 0582ff9a6c | |||
|
|
51d34cb06c | ||
| dbc5e9e6e8 | |||
| 820ecbc83b | |||
|
|
cda8c61429 | ||
| f537912e4e | |||
|
|
7a0363a470 | ||
| 4aabc3bae0 | |||
| 41fd8a067e | |||
|
|
047526dc3c | ||
| 329af1c103 | |||
|
|
5869eeabd6 | ||
| 7fffd74f26 | |||
| a9dada01c0 | |||
| 01b0934d6e | |||
| c0189016e8 | |||
| 7d16f90c71 | |||
|
|
d7c248945d | ||
|
|
059bf147dc | ||
| ffe15e211b | |||
| 255395b582 | |||
| 6390dbc9ab | |||
| 7f2a14609f | |||
|
|
6d39540e8d | ||
| 328615be97 | |||
|
|
20cbbfd06c | ||
| cfeefa385a | |||
| 49ecb06fb0 | |||
|
|
a15548ea77 | ||
| e2cfe56b49 | |||
|
|
9c306a0917 | ||
| 5f05aac909 | |||
|
|
76c9913485 | ||
| 4f257a745b | |||
|
|
59945cb523 | ||
| c13214c4e9 | |||
|
|
6e9e795a16 | ||
| 337782661e | |||
|
|
b6692770c1 | ||
| 141a567927 | |||
|
|
ba41c1cd82 | ||
| b6b812f458 | |||
|
|
9d5f53c5f4 | ||
| a9a5ee4cb6 | |||
|
|
17cc8f41d5 | ||
| a01985d1b8 | |||
|
|
4c1f0305fc | ||
| e49a7c83ba | |||
| e83ce61877 | |||
|
|
c09514c657 | ||
| 3dfcaa19e6 | |||
| 88d1b27394 | |||
| 027a9244ad | |||
| 063c81e8dc | |||
|
|
ad84efb611 | ||
| ecaa640ec0 | |||
|
|
37f1b285d8 | ||
| 71b273f5d7 | |||
|
|
1a823bb1e7 | ||
| aa4fc03c3d | |||
|
|
09832d1c0b | ||
| 68630fdbef | |||
|
|
c9907da846 | ||
| cddd305d26 | |||
| 6f4ffbcaa6 | |||
|
|
3e433c3cbe | ||
| 8cbc77eb1d | |||
|
|
977a8f1637 | ||
| 65ed78462d |
38
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
38
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
---
|
||||||
|
name: Bug report
|
||||||
|
about: Create a report to help us improve
|
||||||
|
title: ''
|
||||||
|
labels: bug
|
||||||
|
assignees: ''
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
**Describe the bug**
|
||||||
|
A clear and concise description of what the bug is.
|
||||||
|
|
||||||
|
**To Reproduce**
|
||||||
|
Steps to reproduce the behavior:
|
||||||
|
1. Go to '...'
|
||||||
|
2. Click on '....'
|
||||||
|
3. Scroll down to '....'
|
||||||
|
4. See error
|
||||||
|
|
||||||
|
**Expected behavior**
|
||||||
|
A clear and concise description of what you expected to happen.
|
||||||
|
|
||||||
|
**Screenshots**
|
||||||
|
If applicable, add screenshots to help explain your problem.
|
||||||
|
|
||||||
|
**Desktop (please complete the following information):**
|
||||||
|
- OS: [e.g. iOS]
|
||||||
|
- Browser [e.g. chrome, safari]
|
||||||
|
- Version / Commit ID [e.g. 22]
|
||||||
|
|
||||||
|
**Smartphone (please complete the following information):**
|
||||||
|
- Device: [e.g. iPhone6]
|
||||||
|
- OS: [e.g. iOS8.1]
|
||||||
|
- Browser [e.g. stock browser, safari]
|
||||||
|
- Version [e.g. 22]
|
||||||
|
|
||||||
|
**Additional context**
|
||||||
|
Add any other context about the problem here.
|
||||||
20
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
20
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
---
|
||||||
|
name: Feature request
|
||||||
|
about: Suggest an idea for this project
|
||||||
|
title: ''
|
||||||
|
labels: enhancement
|
||||||
|
assignees: ''
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
**Is your feature request related to a problem? Please describe.**
|
||||||
|
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
|
||||||
|
|
||||||
|
**Describe the solution you'd like**
|
||||||
|
A clear and concise description of what you want to happen.
|
||||||
|
|
||||||
|
**Describe alternatives you've considered**
|
||||||
|
A clear and concise description of any alternative solutions or features you've considered.
|
||||||
|
|
||||||
|
**Additional context**
|
||||||
|
Add any other context or screenshots about the feature request here.
|
||||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -18,5 +18,6 @@ src/Server/logs
|
|||||||
src/Shared/bin
|
src/Shared/bin
|
||||||
src/Shared/obj
|
src/Shared/obj
|
||||||
src/Server/wwwroot/logs/*
|
src/Server/wwwroot/logs/*
|
||||||
src/Server/CriticalCSS/node_modules
|
src/Server/Tools/CriticalCSS/node_modules
|
||||||
src/Server/CriticalCSS/package*.json
|
src/Server/Tools/CriticalCSS/package*.json
|
||||||
|
*.db*
|
||||||
|
|||||||
115
README.md
115
README.md
@@ -1,92 +1,59 @@
|
|||||||
# embeddingsearch
|
# embeddingsearch<img src="docs/logo.png" alt="Logo" width="50" align="left">
|
||||||
<img src="https://github.com/LD-Reborn/embeddingsearch/blob/main/logo.png" alt="Logo" width="100">
|
embeddingsearch is a self-hosted semantic search server built on vector embeddings.<br/>It lets you index and semantically search text using modern embedding models.
|
||||||
|
<br/><br/>
|
||||||
|
It's designed to be flexible, extensible, and easy to use.
|
||||||
|
|
||||||
embeddingsearch is a search server that uses Embedding Similarity Search (similiarly to [Magna](https://github.com/yousef-rafat/Magna/tree/main)) to semantically compare a given input to a database of indexed entries.
|
# Project outline
|
||||||
|
<img src="docs/ProjectOutline/ProjectOutlineDiagram.excalidraw.svg" alt="Logo">
|
||||||
|
|
||||||
embeddingsearch offers:
|
## What embeddingsearch offers:
|
||||||
- Privacy and flexibility through self-hosted solutions like:
|
- Privacy and flexibility by allowing one to self-host everything, including:
|
||||||
- ollama
|
- Ollama
|
||||||
- OpenAI-compatible APIs (like LocalAI)
|
- OpenAI-compatible APIs (like LocalAI)
|
||||||
- Great flexibility through deep control over
|
- Astonishing accuracy when using multiple models for single indices
|
||||||
- the amount of datapoints per entity (i.e. the thing you're trying to find)
|
- Ease-of-use and ease-of-implementation
|
||||||
- which models are used (multiple per datapoint possible to improve accuracy)
|
- The server offers a front-end for management and status information, as well as a decorated swagger back-end
|
||||||
- which models are sourced from where (multiple Ollama/OpenAI-compatible sources possible)
|
- The indexer can also be self-hosted and serves as a host for executing indexing scripts
|
||||||
- similarity calculation methods
|
- The client library can be used to develop your own client software that posts queries or creates indices
|
||||||
- aggregation of results (when multiple models are used per datapoint)
|
- Caching & persistency
|
||||||
|
- Generating embeddings is expensive. So why not cache AND store them?
|
||||||
|
- Query results can also be cached.
|
||||||
|
- "Doesn't that eat a lot of precious RAM?" - My own testing showed: embeddings take up around 4200-5200 bytes each depending on the request string size. So around 4-5 GB per million cached embeddings.
|
||||||
|
|
||||||
This repository comes with a
|
This repository comes with a:
|
||||||
- server (accessible via API calls & swagger)
|
- Server
|
||||||
- clientside library (C#)
|
- Client library (C#)
|
||||||
- scripting based indexer service that supports the use of
|
- Scripting based indexer service that supports the use of
|
||||||
- Python
|
- Python
|
||||||
- CSharp (Roslyn)
|
- CSharp (Roslyn - at-runtime evaluation)
|
||||||
- Golang (Planned)
|
- CSharp (Reflection - compiled)
|
||||||
|
- Lua (Planned)
|
||||||
- Javascript (Planned)
|
- Javascript (Planned)
|
||||||
|
|
||||||
# How to set up / use
|
# How to set up
|
||||||
## Server
|
## Server
|
||||||
(Docker now available! See [Docker installation](docs/Server.md#docker-installation))
|
(Docker also available! See [Docker installation](docs/Server.md#docker-installation))
|
||||||
1. Install [ollama](https://ollama.com/download)
|
1. Install the inferencing tool of your choice, (e.g. [ollama](https://ollama.com/download)) and pull a few models that support generating embeddings.
|
||||||
2. Pull a few models using ollama (e.g. `paraphrase-multilingual`, `bge-m3`, `mxbai-embed-large`, `nomic-embed-text`)
|
2. [Install the depencencies](docs/Server.md#installing-the-dependencies)
|
||||||
3. [Install the depencencies](docs/Server.md#installing-the-dependencies)
|
3. [Set up a mysql database](docs/Server.md#mysql-database-setup)
|
||||||
4. [Set up a local mysql database](docs/Server.md#mysql-database-setup)
|
4. [Set up the configuration](docs/Server.md#configuration)
|
||||||
5. [Set up the configuration](docs/Server.md#setup)
|
5. In `src/Server` execute `dotnet build && dotnet run` to start the server
|
||||||
6. In `src/server` execute `dotnet build && dotnet run` to start the server
|
6. (optional) Create a searchdomain using the web interface
|
||||||
7. (optional) [Create a searchdomain using the web interface](docs/Server.md#accessing-the-api)
|
|
||||||
## Client
|
|
||||||
1. Download the package and add it to your project (TODO: NuGet)
|
|
||||||
2. Create a new client by either:
|
|
||||||
1. By injecting IConfiguration (e.g. `services.AddSingleton<Client>();`)
|
|
||||||
2. By specifying the baseUri, apiKey, and searchdomain (e.g. `new Client.Client(baseUri, apiKey, searchdomain)`)
|
|
||||||
## Indexer
|
## Indexer
|
||||||
(Docker now available! See [Docker installation](docs/Indexer.md#docker-installation))
|
(Docker now available! See [Docker installation](docs/Indexer.md#docker-installation))
|
||||||
1. [Install the dependencies](docs/Indexer.md#installing-the-dependencies)
|
1. [Install the dependencies](docs/Indexer.md#installing-the-dependencies)
|
||||||
2. [Set up the server](#server)
|
2. [Configure the indexer](docs/Indexer.md#configuration)
|
||||||
3. [Configure the indexer](docs/Indexer.md#configuration)
|
3. [Set up your indexing script(s)](docs/Indexer.md#scripting)
|
||||||
4. [Set up your indexing script(s)](docs/Indexer.md#scripting)
|
4. In `src/Indexer` execute `dotnet build && dotnet run` to start the indexer
|
||||||
5. Run with `dotnet build && dotnet run` (Or `/usr/bin/dotnet build && /usr/bin/dotnet run`)
|
|
||||||
# Known issues
|
# Known issues
|
||||||
| Issue | Solution |
|
| Issue | Solution |
|
||||||
| --- | --- |
|
| --- | --- |
|
||||||
| Unhandled exception. MySql.Data.MySqlClient.MySqlException (0x80004005): Invalid attempt to access a field before calling Read() | The searchdomain you entered does not exist |
|
| System.DllNotFoundException: Could not load libpython3.13.so with flags RTLD_NOW \| RTLD_GLOBAL: libpython3.12.so: cannot open shared object file: No such file or directory | Install python3.13-dev via apt. Also: try running the indexer using `/usr/bin/dotnet` instead of `dotnet` (to make sure dotnet is not running as a snap) |
|
||||||
| Unhandled exception. MySql.Data.MySqlClient.MySqlException (0x80004005): Authentication to host 'localhost' for user 'embeddingsearch' using method 'caching_sha2_password' failed with message: Access denied for user 'embeddingsearch'@'localhost' (using password: YES) | TBD |
|
|
||||||
| System.DllNotFoundException: Could not load libpython3.12.so with flags RTLD_NOW \| RTLD_GLOBAL: libpython3.12.so: cannot open shared object file: No such file or directory | Install python3.12-dev via apt. Also: try running the indexer using `/usr/bin/dotnet` instead of `dotnet` (make sure dotnet is installed via apt) |
|
|
||||||
# To-do
|
|
||||||
- (High priority) Add default indexer
|
|
||||||
- Library
|
|
||||||
- Processing:
|
|
||||||
- Text / Markdown documents: file name, full text, paragraphs
|
|
||||||
- Documents
|
|
||||||
- PDF: file name, full text, headline?, paragraphs, images?
|
|
||||||
- odt/docx: file name, full text, headline?, images?
|
|
||||||
- msg/eml: file name, title, recipients, cc, text
|
|
||||||
- Images: file name, OCR, image description?
|
|
||||||
- Videos?
|
|
||||||
- Presentations (Impress/Powerpoint): file name, full text, first slide title, titles, slide texts
|
|
||||||
- Tables (Calc / Excel): file name, tab/page names?, full text (per tab/page)
|
|
||||||
- Other? (TBD)
|
|
||||||
- Server
|
|
||||||
- ~~Scripting capability (Python; perhaps also lua)~~ (Done with the latest commits)
|
|
||||||
- ~~Intended sourcing possibilities:~~
|
|
||||||
- ~~Local/Remote files (CIFS, SMB, FTP)~~
|
|
||||||
- ~~Database contents (MySQL, MSSQL)~~
|
|
||||||
- ~~Web requests (E.g. manual crawling)~~
|
|
||||||
- ~~Script call management (interval based & event based)~~
|
|
||||||
- Implement [ReaderWriterLock](https://learn.microsoft.com/en-us/dotnet/api/system.threading.readerwriterlockslim?view=net-9.0&redirectedfrom=MSDN) for entityCache to allow for multithreaded read access while retaining single-threaded write access.
|
|
||||||
- NuGet packaging and corresponding README documentation
|
|
||||||
- Add option for query result detail levels. e.g.:
|
|
||||||
- Level 0: `{"Name": "...", "Value": 0.53}`
|
|
||||||
- Level 1: `{"Name": "...", "Value": 0.53, "Datapoints": [{"Name": "title", "Value": 0.65}, {...}]}`
|
|
||||||
- Level 2: `{"Name": "...", "Value": 0.53, "Datapoints": [{"Name": "title", "Value": 0.65, "Embeddings": [{"Model": "bge-m3", "Value": 0.87}, {...}]}, {...}]}`
|
|
||||||
- Add "Click-Through" result evaluation (For each entity: store a list of queries that led to the entity being chosen by the user. Then at query-time choose the best-fitting entry and maybe use it as another datapoint? Or use a separate weight function?)
|
|
||||||
- Reranker/Crossencoder/RAG (or anything else beyond initial retrieval) support
|
|
||||||
- Remove the `id` collumns from the database tables where the table is actually identified (and should be unique by) the name, which should become the new primary key.
|
|
||||||
- Improve performance & latency (Create ready-to-go processes where each contain an n'th share of the entity cache, ready to perform a query. Prepare it after creating the entity cache.)
|
|
||||||
- Implement dynamic invocation based database migrations
|
|
||||||
|
|
||||||
# Future features
|
|
||||||
- Support for other database types (MSSQL, SQLite)
|
|
||||||
|
|
||||||
|
# Planned features and support
|
||||||
|
- Document processor with automatic chunking (e.g.: .md, .pdf, .docx, .xlsx, .png, .mp4)
|
||||||
|
- Indexer front-end
|
||||||
|
- Support for other database types (MSSQL, SQLite, PostgreSQL, MongoDB, Redis)
|
||||||
|
|
||||||
# Community
|
# Community
|
||||||
<a href="https://discord.gg/MUKeZM3k"><img src="https://img.shields.io/badge/Join%20Discord-7289DA?style=flat&logo=discord&logoColor=whiteServer" alt="Discord"></img></a>
|
<a href="https://discord.gg/MUKeZM3k"><img src="https://img.shields.io/badge/Join%20Discord-7289DA?style=flat&logo=discord&logoColor=whiteServer" alt="Discord"></img></a>
|
||||||
@@ -8,15 +8,18 @@ The indexer by default
|
|||||||
- Uses HealthChecks (endpoint: `/healthz`)
|
- Uses HealthChecks (endpoint: `/healthz`)
|
||||||
## Docker installation
|
## Docker installation
|
||||||
(On Linux you might need root privileges, thus use `sudo` where necessary)
|
(On Linux you might need root privileges, thus use `sudo` where necessary)
|
||||||
1. Navigate to the `src` directory
|
1. [Configure the indexer](docs/Indexer.md#configuration)
|
||||||
2. Build the docker container: `docker build -t embeddingsearch-indexer -f Indexer/Dockerfile .`
|
2. [Set up your indexing script(s)](docs/Indexer.md#scripting)
|
||||||
3. Run the docker container: `docker run --net=host -t embeddingsearch-indexer` (the `-t` is optional, but you get more meaningful output. Or use `-d` to run it in the background)
|
3. Navigate to the `src` directory
|
||||||
|
4. Build the docker container: `docker build -t embeddingsearch-indexer -f Indexer/Dockerfile .`
|
||||||
|
5. Run the docker container: `docker run --net=host -t embeddingsearch-indexer` (the `-t` is optional, but you get more meaningful output. Or use `-d` to run it in the background)
|
||||||
## Installing the dependencies
|
## Installing the dependencies
|
||||||
## Ubuntu 24.04
|
## Ubuntu 24.04
|
||||||
1. Install the .NET SDK: `sudo apt update && sudo apt install dotnet-sdk-8.0 -y`
|
1. Install the .NET SDK: `sudo apt update && sudo apt install dotnet-sdk-10.0 -y`
|
||||||
2. Install the python SDK: `sudo apt install python3 python3.12 python3.12-dev`
|
2. Install the python SDK: `sudo apt install python3 python3.13 python3.13-dev`
|
||||||
|
- Note: Python 3.14 is not supported yet
|
||||||
## Windows
|
## Windows
|
||||||
Download the [.NET SDK](https://dotnet.microsoft.com/en-us/download) or follow these steps to use WSL:
|
Download and install the [.NET SDK](https://dotnet.microsoft.com/en-us/download) or follow these steps to use WSL:
|
||||||
1. Install Ubuntu in WSL (`wsl --install` and `wsl --install -d Ubuntu`)
|
1. Install Ubuntu in WSL (`wsl --install` and `wsl --install -d Ubuntu`)
|
||||||
2. Enter your WSL environment `wsl.exe` and configure it
|
2. Enter your WSL environment `wsl.exe` and configure it
|
||||||
3. Update via `sudo apt update && sudo apt upgrade -y && sudo snap refresh`
|
3. Update via `sudo apt update && sudo apt upgrade -y && sudo snap refresh`
|
||||||
@@ -26,15 +29,15 @@ The configuration is located in `src/Indexer` and conforms to the [ASP.NET confi
|
|||||||
|
|
||||||
If you plan to use multiple environments, create any `appsettings.{YourEnvironment}.json` (e.g. `Development`, `Staging`, `Prod`) and set the environment variable `DOTNET_ENVIRONMENT` accordingly on the target machine.
|
If you plan to use multiple environments, create any `appsettings.{YourEnvironment}.json` (e.g. `Development`, `Staging`, `Prod`) and set the environment variable `DOTNET_ENVIRONMENT` accordingly on the target machine.
|
||||||
## Setup
|
## Setup
|
||||||
If you just installed the server and want to configure it:
|
If you just installed the indexer and want to configure it:
|
||||||
1. Open `src/Server/appsettings.Development.json`
|
1. Open `src/Indexer/appsettings.Development.json`
|
||||||
2. If your search server is not on the same machine as the indexer, update "BaseUri" to reflect the URL to the server.
|
2. If your search server is not on the same machine as the indexer, update "BaseUri" to reflect the URL to the server.
|
||||||
3. If your search server requires API keys, (i.e. it's operating outside of the "Development" environment) set `"ApiKey": "<your key here>"` beneath `"BaseUri"` in the `"Embeddingsearch"` section.
|
3. If you configured API keys for the search server, set `"ApiKey": "<your key here>"` beneath `"BaseUri"` in the `"Server"` section.
|
||||||
4. Create your own indexing script(s) in `src/Indexer/Scripts/` and configure their use as
|
4. Create your own indexing script(s) in `src/Indexer/Scripts/` and configure them as shown below
|
||||||
## Structure
|
## Structure
|
||||||
```json
|
```json
|
||||||
"EmbeddingsearchIndexer": {
|
"Indexer": {
|
||||||
"Worker":
|
"Workers":
|
||||||
[ // This is a list; you can have as many "workers" as you want
|
[ // This is a list; you can have as many "workers" as you want
|
||||||
{
|
{
|
||||||
"Name": "example",
|
"Name": "example",
|
||||||
@@ -50,7 +53,12 @@ If you just installed the server and want to configure it:
|
|||||||
"Name": "secondWorker",
|
"Name": "secondWorker",
|
||||||
/* ... */
|
/* ... */
|
||||||
}
|
}
|
||||||
]
|
],
|
||||||
|
"ApiKeys": ["YourApiKeysHereForTheIndexer"], // API Keys for if you want to protect the indexer's API
|
||||||
|
"Server": {
|
||||||
|
"BaseUri": "http://localhost:5000", // URL to the embeddingsearch server
|
||||||
|
"ApiKey": "ServerApiKeyHere" // API Key set in the server
|
||||||
|
}
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
## Call types
|
## Call types
|
||||||
@@ -71,6 +79,13 @@ If you just installed the server and want to configure it:
|
|||||||
- Parameters:
|
- Parameters:
|
||||||
- Path (e.g. "Scripts/example_content")
|
- Path (e.g. "Scripts/example_content")
|
||||||
# Scripting
|
# Scripting
|
||||||
|
Scripts should be put in `src/Indexer/Scripts/`. If you look there, by default you will find some example scripts that can be taken as reference when building your own.
|
||||||
|
|
||||||
|
For configuration of the scripts see: [Structure](#structure)
|
||||||
|
|
||||||
|
The next few sections explain some core concepts/patterns. If you want to skip to explicit code examples, look here:
|
||||||
|
- [Python](#python)
|
||||||
|
- [Roslyn](#c-roslyn)
|
||||||
## General
|
## General
|
||||||
Scripts need to define the following functions:
|
Scripts need to define the following functions:
|
||||||
- `init()`
|
- `init()`
|
||||||
@@ -186,7 +201,7 @@ from tools import * # Import all tools that are provided for ease of scripting
|
|||||||
|
|
||||||
def init(toolset: Toolset): # defining an init() function with 1 parameter is required.
|
def init(toolset: Toolset): # defining an init() function with 1 parameter is required.
|
||||||
pass # Your code would go here.
|
pass # Your code would go here.
|
||||||
# DO NOT put a main loop here! Why?
|
# Don't put a main loop here! Why?
|
||||||
# This function prevents the application from initializing and maintains exclusive control over the GIL
|
# This function prevents the application from initializing and maintains exclusive control over the GIL
|
||||||
|
|
||||||
def update(toolset: Toolset): # defining an update() function with 1 parameter is required.
|
def update(toolset: Toolset): # defining an update() function with 1 parameter is required.
|
||||||
@@ -261,7 +276,7 @@ public class ExampleScript : Indexer.Models.IScript
|
|||||||
// Required: return an instance of your IScript-extending class
|
// Required: return an instance of your IScript-extending class
|
||||||
return new ExampleScript();
|
return new ExampleScript();
|
||||||
```
|
```
|
||||||
## Golang
|
## Lua
|
||||||
TODO
|
TODO
|
||||||
## Javascript
|
## Javascript
|
||||||
TODO
|
TODO
|
||||||
190
docs/ProjectOutline/ProjectOutlineDiagram.excalidraw.md
Normal file
190
docs/ProjectOutline/ProjectOutlineDiagram.excalidraw.md
Normal file
@@ -0,0 +1,190 @@
|
|||||||
|
---
|
||||||
|
|
||||||
|
excalidraw-plugin: parsed
|
||||||
|
tags: [excalidraw]
|
||||||
|
|
||||||
|
---
|
||||||
|
==⚠ Switch to EXCALIDRAW VIEW in the MORE OPTIONS menu of this document. ⚠== You can decompress Drawing data with the command palette: 'Decompress current Excalidraw file'. For more info check in plugin settings under 'Saving'
|
||||||
|
|
||||||
|
|
||||||
|
# Excalidraw Data
|
||||||
|
|
||||||
|
## Text Elements
|
||||||
|
Server ^TJzgO4nS
|
||||||
|
|
||||||
|
Indexer ^rgrd8gyy
|
||||||
|
|
||||||
|
embeddingsearch ^jB1B7xr7
|
||||||
|
|
||||||
|
Client ^ZttcBOXC
|
||||||
|
|
||||||
|
embeddings
|
||||||
|
provider ^mEIPhpn1
|
||||||
|
|
||||||
|
✔️ Ollama
|
||||||
|
✔️ OpenAI-compatible
|
||||||
|
(e.g. LocalAI) ^o6rED2fi
|
||||||
|
|
||||||
|
uses ^QkKnkGvS
|
||||||
|
|
||||||
|
Database ^yaSaChsK
|
||||||
|
|
||||||
|
✔️ MySQL
|
||||||
|
⚒️ SQLite
|
||||||
|
⚒️ MSSQL
|
||||||
|
⚒️ PostgreSQL
|
||||||
|
⚒️ MongoDB
|
||||||
|
⚒️ Redis ^LHP4PU6V
|
||||||
|
|
||||||
|
Stores
|
||||||
|
data in ^FP2xPhxz
|
||||||
|
|
||||||
|
Listens on port 5146
|
||||||
|
^CJG2peC6
|
||||||
|
|
||||||
|
Listens on port 5210 ^iLZT5hca
|
||||||
|
|
||||||
|
Workers ^33rXJfFb
|
||||||
|
|
||||||
|
- example.py
|
||||||
|
- example.csx
|
||||||
|
- ... ^e1BVqXa2
|
||||||
|
|
||||||
|
✔️ Front-end
|
||||||
|
✔️ Swagger
|
||||||
|
✔️ Elmah ^6UTNDntp
|
||||||
|
|
||||||
|
⚒️ Front-end
|
||||||
|
✔️ Swagger
|
||||||
|
✔️ Elmah ^tlLF3R27
|
||||||
|
|
||||||
|
✔️ Caches embeddings
|
||||||
|
✔️ Caches queries ^I2lN1U82
|
||||||
|
|
||||||
|
✔️ C# library
|
||||||
|
⚒️ NuGet
|
||||||
|
✔️ Searchdomain CRUD
|
||||||
|
✔️ Entity CRUD
|
||||||
|
✔️ Management operations ^4Ab3XHhK
|
||||||
|
|
||||||
|
Uses ^KvuBRV2K
|
||||||
|
|
||||||
|
Accesses ^ikhSH5rs
|
||||||
|
|
||||||
|
✔️ Multiple provider
|
||||||
|
configuration ^ipkoadg8
|
||||||
|
|
||||||
|
%%
|
||||||
|
## Drawing
|
||||||
|
```compressed-json
|
||||||
|
N4KAkARALgngDgUwgLgAQQQDwMYEMA2AlgCYBOuA7hADTgQBuCpAzoQPYB2KqATLZMzYBXUtiRoIACyhQ4zZAHoFAc0JRJQgEYA6bGwC2CgF7N6hbEcK4OCtptbErHALRY8RMpWdx8Q1TdIEfARcZgRmBShcZQUebQBGAE5tHho6IIR9BA4oZm4AbXAwUDBSiBJuCBghAHUAeQA1QgAzAE000shYRErA7CiOZWCOssxuZwA2HgntCYAGAHYADh4l
|
||||||
|
|
||||||
|
hYAWAGY5ub4iyBhx9YBWY+1d9cSp+Pi5pdvj/jKKEnVueO2Z6eOrpeO544neLrJ6QSQIQjKaTcRI/WanCabY4TdY8TYLdGgiDWIbiVBzLHMKCkNgAawQAGE2Pg2KRKgBieIIJlMkaQTS4bCk5QkoQcYhUml0iSM5litkQZqEfD4ADKsGGEkEHglRJJ5Jqr0k3Dm2ke+wgarJCHlMEV6GVFSxvKhHHCeTQBINbDgnLUhzQtydnQgPOEcAAksQHah8
|
||||||
|
|
||||||
|
gBdLHNchZIPcDhCGVYwj8rCVXBzCW8/l25ghko+7p4zb7AC+hIQCGI3DROySgKWWMYLHYXE9sMbTFYnAAcpwxNwlkt1hN7lck8wACIZKCV7jNAhhLGaYT8gCiwSyORD8cTBqEcGIuBnVc9CwmiR4C3ixyvP0SWKIHFJcYT+AfbC5s7Q8/wYSKZaKfNIAqCQAEcGgWZRNlJAAlAAJCVC16BB+hxYYsTGNBnHiFZtE2RFVniHh9R9D1UGcb5tGHBYM
|
||||||
|
|
||||||
|
R4eINhOOZEk2LEXmIN40CRBY9T+ZiFhIspwUhaFOPWdZtCuHgtimQEuImLE0Lxb0yiNclBVpBlmi07SJQ5Lk/T5AVqQ0iRiWsZhXUCHIJSlGVTXNQ1qStA1VIQTV2O1NA9h9Vz7LxRyVWtYRbXtd4sRdN1YHeHYsQMwNgwKCMDSjXAYxPVAdzfA1k2IVMJFweJMxXYgczzfYungItS3LL9UHiRFYUWeqQQNJsu1bOrlg7Zsez7PEbxRY5gQbbLJ2
|
||||||
|
|
||||||
|
nWqf0XA1l0M9dMmyXICnKoCyhA9AAEFCEkCcAC0AwAIQWNkyiQ/LSBJKhyoAzpinK4D0ogAAxXJlCMGB6HaUETsqtNzrYS6bpLfYkp9fdD2Pd4zwvW96rRbyykfZ80Ey99P3SyaECxSRQgAFSwKAABlkyR1AMf/J4gPKB6cYAKSMZQ6nWDhZUQn7TPxiVMPI4cUh2dYaJ2QXaxa0jxjhvCMTmYd/iF3ZWK1AczmmOZNm2Hha1hTZ4ixiEoSgbgkQ
|
||||||
|
|
||||||
|
UwZzWUgRiWNdThXQeltJ0pdOW5LMjKFSozI4CzcCs/XI2lOUFX8y0q0Jc2NQVrzg/VE1/cqQOir8SRSrC51XWwd1otN31eXikNw0jaMEFjZHXyTFMuexVIgsMxOi93HyK3S4EeEvP4phGn02pbbh1jubr2t7Dh+09H4z2BRJ1jb1axuCCHvwXTHpuKubN0WtBc73A8j1q+jz0va9Yc2eHIBpNG5znrEZ0wfWJFlJgm0zSg8cvyob9IO/I04KBZUI
|
||||||
|
|
||||||
|
Iw8VWd+ckeqlaUZEBJdHxutIgygOoQGCM0b2rUmBQHMAQCBkJoFQBdBKPQORcDJiYIXDKxdnSkEhMmAgj8r7oBfm/A0uAhAYJguEb+eJiRCHnj6R8CA4K6xEnVFIoCpC43xkTJ8p9fzsIRsTF8MpyaAWyg9XA+0jAAHF8AAEUajYBgA0DgzQYBqMmJBKAcwuDnzZugPoAxcSc2rBMM4CxEhDk2IkAETElgsQNGRCidiqITBolrc8Sw5gjgWPLDyB
|
||||||
|
|
||||||
|
sbw8R+OiARQk9bcCGvECSKwmIHxVure8tDjZKQjhbYyVsIA21ts0XSDsDL8ktq7cg7tLILRsr7PyMcnJBxciHNyYdeC5PJI0pUzS44hVzEnH0EVU5RU9DFA0cUgw5xBmUFKaVpFZR9DlPK6BcCbDjtmUKaAVrQHMcWTo10VL12rIsZxQ1gkT0gB3Tg7wxKZPbp2Fs/dB51T+Gsa8QTxxTmnhNM+C9ZobgWjnZad0qaVAABowTqMo7+AArTQx0Ko9
|
||||||
|
|
||||||
|
DOhdCAV1gZYjBpvBuUNd5DSmAfB8Uia5LIRh+ck6N/k+mxswChIiSZk1KEcyma0ICkB5MQJYygYCHDMSi9AF94E+i5s4uIUsLy7G2G47YIsyhePFokRINwRxiSCSOeSBo2IcTqgCc4Q51gqroqk/iOthKUMNlk6xjoumUnyZpYppT9JO0qaZapHsvb1LstHXpgVWmR3cjqw+ho2k9ItH0yuAyQza2TpFMiXpYpZ2mYlPOqUC7pRRtlUuaZ1ibJKt
|
||||||
|
|
||||||
|
swhtdjlby2FsVWw1e6d09BMOxlbeoD1/irZE7jNXLKnggGepNqVlBmmuIFW4U3r3BlvPFMNCXBuPpSsRU0CwcwkAGUuTB74UAoZUBduVMBLv/p/Zh1YrmSg/oA/QwCEnn3AZA6BsDhVlCbEg9wqCoGu0wVibBUQ8GkAIZm4ZJD/DkLnegddWAt20PoWwRhrAf7cFYRIo+eDuHmvePwrGQjL4MunTBmBJLC34FkaUVlD04X7XiIdTApAjr8v8kKmx
|
||||||
|
|
||||||
|
WFgTcWIgCK4EwcLNQBFiLxqJuKbHuKca8YSg1LBmDReYF5jg8AvFcB5gkeGUJOEba1+JbWuutsUkp9tnXFSU9Ad1tTrI+29WaAOEb/XGkDZ5Tpxnuk+vDX6n0NoE4FpjcMlOadxkZymQlVeszIDzPTYskuG60zHDzdXbDNV0oOJ+LDUTdaOp/GDTcjgLy8RETVSrVEGdCDts7RjJci9+0r1DF5iA2LO3b2hnvcdqMp2z3EWep+EhMiaErI4QYYRP
|
||||||
|
|
||||||
|
bYG1NaB+/6MD6Ca8QFryg2uiE68lD+X9IOcVbXMw9QD8AgLq1AB9l6EBwIlLe5B+BltPrgFgj+uC7QfozUQ79pCOB/vq+gRrzXkxDZCCNiUdCGFMMm6gaDxK7TwfiZ6JDBpaX0qwzlg0iNFm4dussh6xxsDrGaPQfQph1gIDqJlgAqnBVcxAaiSGwLtijyFULZOo+Rbe2gFiXi1k3ZVdFlXTYOKc5Iw55g/BVhifCknIDarM1xKJfFYnSerDzOYt
|
||||||
|
|
||||||
|
wuOnBcS45YcrICKR1Ip+1IoVNOsdhpuXgrtOezqXpv2Bmmk2ZUm00z1ZbVhoCs5WzwV7ODM9OFZzYzdVuaTR5wrqaFmkv86s7EExgsFt2adVABzmVheiiOdJmxx4xfeCEmLSX3ha1D9eZYGWst/NqwCvt80B2r1BTde6kLoWwqMAipFeyBXYj+gDQ5mKh04shjvMdcMPsky/eSk+NWZ2CRQ4TQHc8wf4cqDtGQ2B9p1AhRSVmJeqMYXGEiCS2x/j
|
||||||
|
|
||||||
|
zENdTxYtOqgC6SWPaYTdVis53nxsz/EJIXgPuPMTtxrzL7ibwy1Pppc2os3al28vHVqaV4ZTTbsPUa+Sg0qzJuWk+X1x0sGr5D/rHJGhbtGtbnGunImv6Mmp5s7r5q7lmgFvlORgaE7CFk3gICctWiiEsI4qJpLgwE8rcl5LsPFiQYln1O8IOHcPMClt8uNFSinj6L2sQEvMCoOqDBvKVqOhVvXsDhSsnm3mApdhABSEQJ/rZt1mIRIYQFITNjkB
|
||||||
|
|
||||||
|
Nr/Pus0LNsevNqegaEKtthIFeutogptroYKs+gaK+gdvgsdkWpALSGdhdpQuIZIbpiBs9hBiwqQGwh9lwvzj9sRMhnSsIl3iwZIqIqSj3vIpUAALLrB1AwAcAfqRFj7+SWLS6T5YQ/BnD0TDgqwCaGqJBXhsYJIzDIiERBLMQ7x+K757q6iSp+KIg0RDjuJEEX6UKXgzAtzOJBJNyXDqxEE34KZ36aZFK2yK7lLOwmSq7mQ6bXrebf7a6+qm564B
|
||||||
|
|
||||||
|
qAFG4gFGZm7xwhaOZlAjIuZ24wH7hwFO7JT5yfonarTZr5SJBe6W6oA+77LVQuQ4GoBTCXhKpXjL4JYJITKPI9RUENo0EHy8S3D7qZY/IdrCEYZsEcEZ6hhZ43TgoSDPTMCvTvSfRgq+6l5ooYqdBFYlYjq178FErA5YZYEwJCHMEiGCIBGoZBF/gB5yIQ6VD6CrgBgAAKkgcAHAhUeO7MT8aR5ExwB8eERE6IGqeRZOhRaAQSpOqsAIZ4TEqw7O
|
||||||
|
|
||||||
|
EAnO7w/weETOsICwKsREfOCGaA2pcmJssuD+ymT+00ZSLqKuWmUx6uzhPotkWuDkoBd+Bu4cd+xuLpGxUaQyOxNu8aPxZQ7mMyCBZx1h5Qlxayo+lcWytxZJYQtUMsDwg44eaAomypCW0ebYtwfiSQKIjBvylJUJeW6eBWa83Bw6uKhJBKAhHCFJ6Gi2lQ12A2t2zAAAOhwHACSGYLlHSF1iuj1s2YNu2Z2d2SQMBg6eNrul5KoeoSemmYtsYRAG
|
||||||
|
|
||||||
|
IDkBOTeoYfehejtntjgu+mGWSjYT+mQvgKug1n1jdq1h2V2WwD2WuVLqBuBtOW9h4Rhpwl9rwkkn4X9h3mhq3q+aSa+OEUyRIGwBMKQKuBODwFKEka7BzAKc4MiJsHhM4lLC2hKVKXVFMPqlrPgcJkqVLFUV5ERFRB8GPFLGJiEnqd9qgKalasaYMdacMXbBaepq/tae/tMV6k6YZrrmbMseEu6f/pHJ6esWUHZlsZAaMgGfbrAY7uWXMqcVYQeR
|
||||||
|
|
||||||
|
GSgWsquDcduOcdgbVP4jsGiMSb8e1NwCiBQX8VmbwMEkxlsMcCmaNOCdlt2uyCWcvCGVXrwdWfvBOvWX+Y2RIIACjkgA8H+oB1AyipS4AdmBXBWIAcDrQBjOB6D6CuhIKaDBAdmoDpWoAAAUCA2gyg2gqABMH4KCAYAAlMuqeegJFSFfgGFRFUFXUNFbFfFQYElYQClQgGlRldlblflYVfeqVTZFOa9urNukevOagAIjoVuXoatjMcQaQHeigtNS
|
||||||
|
|
||||||
|
YbjmYftnuUpeFEeedieT1lVaFfoOFRwFVY1XFQla1e1Z1eld1XlQVUVVtgNQpA+S9u4Z4SSZ9j4Xwl+TSj+XSf+aEdhkBatA9MiaiR9DBaiv9ETthO4toPcKrKHoiDWjsPmZ4twDRFRBsJ8KrFsH8EQaqZ6EkFRN3D8KsJcBsPcOfp9bcNxIiH8ORUKaJkiH0YTrfkJXkqaYUgrs/mMW/mrp6priJbxSGvxUGqsfMdZosZAOJQ5pJXsQmpMg7q5Q
|
||||||
|
|
||||||
|
6YpX5sge7rgI9BpdwPcQKjwI8XXLVKsAJheCNQgn8WqRiFHtQZ6FxteKLgZZPPZZCbloCqWSCjdCtMipRnBWCmymoqSAANJPjKL0Asw4mlB4k8EEnlY1mO2wYA1kmTou0GhwBsDJhlnlSFDZ6mylBzDlReZgDZ03TYRE1iQMZk35Hjz1RfRgA02zAi4M1IhoiAgF2V7/6exQD7Q5S3Y63lQYD5YEIQDgSQTQTwTHQQD6BsC5SVC0iaBqAT1SibrE
|
||||||
|
|
||||||
|
Bsnp3WSZ450ak2UqyuJjy8So2b2Ih5HniXAAjAgs5t2HJYjZDEDd38i907L90ZDLxD3RGxHxEBiJFfST3T3+Rz0L3f1L2Vir0Z1LSb0YiDj4ErArDOInDDi126gQODiOKrBohjyAjrCX0Mkd0LXrRl7gi4CbUGg314MXQEMKJl4ShBDLgUAp0+hT2MCREkAgNbiajqAwmUJA4/U0md4A1MpgAsoRESAB3B2kih0sy8mCq+0ipiw+JST4SLDERCZc
|
||||||
|
|
||||||
|
YYU3BLCk7MSnBDhMa0bAiEWoBbAkVKpM0ibLC6lmrUXfVlD9EZyuRDHc0sUv4VLsX80KGzH6bOmiV8UmYrEelrHC0y23HbE2H+nQGK2yXK0KVpr7lu5ly4DKLa1IGG3pR0RiRQz4GmVGVoCXDW0AmE0n6Djb0FkQlFmu1p4uVcFlD4lVmx2eVVZ0MnQ9ZCBhB5D9kVXFYtODVKFPlm2TkAJzYLbaHnpoKVArkzh9nm2LVbbLWT0kDEDoRrW7mHZD
|
||||||
|
|
||||||
|
2g1vTg1bV2G7ViHNPhCPbPVuFQYvleHvmUKfmxK/V8OOWYaJ2AXYPg7A2VCaAcCtA4xqKgSygwQQ0WIoRWILMyPpFwhXin1cbNwH3ypixiZw04RDQuInBTC1paqAFC4pDMQAjOJIg716N/afXiz4HWVXhjzMRBIs3ya2NtL2PmmsGWnK6c0cV2lzWOlC1S0i0+MCXmbs2WYS2/79LgG+khNQGuYHHZyVPeaq1JMXGqXYgIQxn5q3G61VRX1PFbyn
|
||||||
|
|
||||||
|
A3j1QBKpl1SXCfGUHmX0QOL8zpnBpglMENmp7sH5Ye2dBe2IkbRbS7QHRoHZ7F7+SezYmAzt1VPR01P4p1PvWN5aXkkt5drBFggXOMrd53O94SAwC4Cyi4AUiSDMCB1fPQDSOjDjCEFQvTD8xL67CLAYXYT5ESyyp2IYhk5ngeI+gE11RaxUSFPdwohjzn1U36l+4CI2MmkTFc2Us9rUtsW0uuP2lzJzGePC2uRunstLHGhMt/5iXm4SWxpSVhM+
|
||||||
|
|
||||||
|
jBkiuShiuhbq1xM0yJObvJMR54tKr4REFfEGkXi5OvLqw7ACYbB/x2Ums+VmscOROQDVM161OVaCFBtcONNiEThHi4AchhDlU9b/tRBAdIDbrKF7qjUDNaGzqXxLn6HdRTNLkYKrU+jmEbVq2na/pbMOFgeAehCQcuFgYvWHNvUcJwbU2/bcMA6XMhvXP+syKRtCPoAExwRsnrBsko4TANApsT4GhcwIU4RUSOIfCXhMTIg5No00aoizBTDSw1r4
|
||||||
|
|
||||||
|
Qgn6NqNw0ZGikn5C7IgWMfm9PWOs0DEcv35dtMVaSjFWkDu2kC1f4eM8XMvju+Mmczs8sLtOYCv7HhOHFyVFY+YxNbtpjJsyuYEBsJkNxCl3A8bx3zVZN+75EXt4j6v1g4SglJ6lNPsWtrtvungeWft1nftXNUb+VBWREwCyhqIEwdmABJZEFRV0TDODV6V7KPV016gKvUSDyCaJV215EZwMoGwBOPtG14wo4K0+gTIQ4ZFWV61xwLV6gPV2oB1X
|
||||||
|
|
||||||
|
N817N/Nx11AF12t6V/14N8Nyt6gKN5ll0zuq9jXWNv0xoYMwh0tjM8h5M0YTM+hzuW+ss0Q7h8ee09N+Vz1wd4t41wd5ES179+t2wJ14ENt6gH14MHtyN5WMd09a4U+e9u9d4a22c/4fR+G4xyDmEax8Bba9tHtIdCm661DfBThHEEiDWmsGk0owl7J8TuqQ0X4kEqrICGi2pz4sEjWWJKcPxLZTSp9eJFLDcIancNTgo7JnRTkgxZzeZ6po47zS
|
||||||
|
|
||||||
|
4zZ245KCOw57O946HGy0AaGv48y4ExAYu/LYGZAKu/AScdE+9xKxrQTLu/K9WAbcWulLHjeHvSe5QQbMxIl5DMRDcOWsvsa4Waa6wc5ZwRvVaxieYqm/yX7SDWyTwJgByZgEYOiu67iVil6++z6/lyEcx8pcnRlz6GnRnZa6UMXZ0LnWAPnTdIXZX6UNhNz3YrDHzzeDZVcqUCL4asCCCZL/PscFg2AEVkSJ3XfYNn3Qic/QtEPSPVBLBNK/3VPT
|
||||||
|
|
||||||
|
PRIMmA4CRwiUAyvWvZQg3zX+cILDhFsPkdqS2kew4gg8hVJCOFJLsNqZcIOEP0cpADfePw/XcU/YPQ9E8y828x8xPcvz/qkB56MxWYsvRYa787ouoI+ssEIix4GIWLTevhFEz0QcIGqYiB3yf7BxO6pDf6OQxw5lASG+DEIBQzRTX18ANDBppAAYYIAmG2/Mvmw0kAcNQ+7eHhr+WDb0kBGFMNjk9ET7J9JAqfATmm0gDCcxMyQEcIfi4jjwtYwS
|
||||||
|
|
||||||
|
AtvRCQoC8aaPRMSOiHxodJzweoJVOJjKI3gdSenShP8CNKy8TOFLEYjzSs5ds6WtnB0hrx1yOcACuvcWqO0N7ztZaJvW3ArRXZK012/nG3sBEjLYgv66BYqKF3DLhdIYoeK8DhX3Sns/cwTWLs8htq8A1GTbZYGl2drF8e04fWEvJVfbZ9cuH7Wss3mqwcCMMxXKhBgkCAjlwYuAVAMmBA5iF5QtIcIB2RqF1DTEF3U7o2lg5Xd4Ov7W7iMwkBjM
|
||||||
|
|
||||||
|
7y81VDjM2PQDY/mZQLDm90qCbQieDrCULYTw7tMmhVQ1oQB3aF7Mker2FHlRw+ro9aOrArHiwIToF8gaOeCQJEX0AwR6A5XCYAk0kZYlyeQnRDHYmv5L4midiFEMvnjRFt6IkDeYOiFVjnh9GcQPIkxA2CyRaIzRamosAkhRDAQWpLYLsHiEds5eZnBxlS1YrONrONSellxVc62oJ2evYSgby17spXBQTOWh4LN6ZwImPgjdmSRWRxM6gDvfur7n
|
||||||
|
|
||||||
|
1qKt922TOxMOGPae8LanoE4AIkzJJCyK6ICLEH3S6nCIA0JLLpHzwzR9x8wgh5qv1JCSBZQcEY4CwHT4V5M+blGOrnyKFnCCBR8byqUKxCl8chWdKAQgwLpfQ9+FEIxpCLgbAipItdbCPCPyIfAkRN4FEXRCH4j8ogC1N/oMDNEYB+QYY5QBGNH64NiBhDCMUQLIYkDfoZA4hhQP+hUDqSJwx9vQxvK0DmGO/ZgIwOYF/kLhNrcoBqK1E6jxuBYG
|
||||||
|
|
||||||
|
PoJ3+bcwkKHGFnGIORqYhGeHwaAcxHVh/AbK1wM8PozWCylHEjiEcCximD6Dqw7bIzmS0jimDmK2IpxuMQKRWC1ejLckaqAcFi0/GXLL0nO02JuCPOS7QVt52FaW8Va1vCMSyLTCNBd28ZZ4vzGmDaMxIGrIiL72rTdF+RdI4PiU1lHyj3a2XfIZ1Dy4mjA2JQn9qIQcLrRsAYgXMLszaY9ZYJ8EzplBx6YZw1Cl3capNWGaPohhC0EYRtk3KDD0
|
||||||
|
|
||||||
|
AEw+Zhv2mHrVZhVwm4XcNlAPClh21ewnMLgn2hEJpHR8rsKOao8TmiGKxqGzYF/UG8oOfHmqPQAUgaYyiHgIgApCe4nhjY9NjRj1QbA822wdSfxGmByCrwspITCjQcQ71K2zwJFuJFVj5EVgQLJxLRSF6ttDBMvGXBiIKQK9LONLSwYOwZa2CFiFIpzo4L3HOCKRRvPlhAF2K0iZKPnF9uu2vHit/BkrXAGyQfFhdni2pVEWsA9Hm04ulOT8RNSk
|
||||||
|
|
||||||
|
ipcuM6Qh9paMy5ATLxnrSsjnzrwxci+so8oRACJhEhsgzAVAJwFQBp0FqE1YEBMA7INCHCdUmcO7CakcAWptIKAO1OHBdT0Jw1TCXOU0ILkhmiHO7rNQMILVHupE6AKYUw7UTLCEY5YZ9x6y9SGpA0oaW1KGhjSOh1+fZsjx4n7C0e1FDHt+SEkMcqSuPQGmJMuHoBCABMHaDjGODY5cAQguPk2JBGzB+IUsO2kNH+BjhGeTfbiERBWDXA6IqqQX
|
||||||
|
|
||||||
|
sZN17wibg5k0eIagbbBoWiM4owQ5JMGMUsRvbHEauKqSq8h27jbinYO8k7izMpI6dluLALuc/SnnTwUGW8ElTRWUUvdrbziZqIEpYQ54jhCpy7AacGrVSVlOBAnAQRC+Ypg5UY6ASKmnM4rCBLKzGjKpFoqCbHx6mZY+pjU5qa1JGkoCMwSEsQvtP6kGzhp2U24Cd2g5kEehOExcgtLWwocVp+Elai9wsJHZtpzE/DpUHNn6zBphs62SbM4nkc0A
|
||||||
|
|
||||||
|
ewkIjdI/JHDBJuYwqfsPOGvTKxyiOoJQFJCrgYIZ076CXhSKE4BSFFc4NqTPwrBUQKIeIWRFMnG1y5rccnDZTBFFsGaFwVVlsC0nYt0eY8TQXyJzJSwMQ7YeyWzSnZqRCZPbdkH21xFuTyZHk+ztTO3Gi06ZTgzXm52PEszTxXnLwQyOVm+CbxAQ3AJ8xC7e4ORDxbkS7wHAHwN8UwDOLENLlZSRcOEW9vlJD55ishbtJWccQrLV4Ch6sidABXDJ
|
||||||
|
|
||||||
|
VTn5ccwIo9IQAVi2UqsUgBChpjNBHoiKBScIIgCioRxEnDEPVD8SiY0QGFWtneGvZDg0Q1EUPGp2CS8xy6fiEFvcAElSBPqV+QzqS07ZOSiZY8kmXzSnmEjGZrpZzkPKjj7ivGlIo8dSPcHSUhWRxXIZFJdw8yYpGtHGALOUrhCyClwZiLC0FFxcy2WU4cPgSIjz45Z2YxWRH3fmlTP5oEwoRrMK6McapNQWkOSF1GmyHC5i0gJYrrGKEuhMHToW
|
||||||
|
|
||||||
|
NRmkTUnZq0+7r8TGGrTnuL6Tad7OinBTfZ7TWxfYu2FkcDmEcq6VHL4m+FzmD07Hk9N/k4Zk5bKZkPtAaCgQIUuACuNoQbHwLRUZ4PUPcn7HN0EZvwgcOoxcRAjFghqFYNMCMkc4OkRETjA4igZNR8KLbaitZJoX0UCZ8vBhXKPHmky3ULCwWmwpM4kjF5s8pmSvP5Zry2Z5vDmXoq5liLmRu8lHNIsDxth0mTEMFtci94GlpehlRIXkwMa7BS2Z
|
||||||
|
|
||||||
|
4Ign+PllUkdFOQqOmVK/kVSvKJiqkjVOcCoAsAqUHwDlTgAwAOyXyn5YlWCC6BmAmAIFagG0AwruplQYFZgF+VgqAVUKkFX8vBWQqXA0K2FRNO6EuK4Os0m7kh0WmuySJ7staRhyolLMtpQSnaTtXaYIqkV/ywFVirRVgrsAEKqFTCu0ARKuJr1f6tHNOaxycxQCpJf9STlcDGS4kiABMBRw4xuwE4HIJSu9qwUAZSk8iCODwg3gtYw4T5EKTfFQ
|
||||||
|
|
||||||
|
yA+EsD4N3BzJDgbgoSRFmyyIjJB/ROwIaNME1gfj25PS2cbQsckOozBSvCwWuPcmsLuFY7WmYbj8lLy5l/Ck8abzCkXjVloixAuIpUoa0IU2ypVuFhFySpz26UqtJqxiE6skheZSLM1BuUyiAFco7IWWSeUGK1Zry+ppkOgmVBIqj0EkDkFcD8g6qC3CgNEGUBMA2164Q6qNmkIDkxCDaptVABbXEA21soDtcoC7WkAe1+APtbbIwkOy3FuE+aZ4
|
||||||
|
|
||||||
|
pJUPcyV6CdaVSte40r41dKliSV1QCNqP4Y6idVOpnVzqF1iPSJZdMo6xKaOFC/7KKvQygKHoUAfAATEeibAYIl4f6XNSKVnA0mzEJIC+MNSVLlJMwZxBJgvCGoKaTSlUi0shZ2qz6jqoUs6psmuq8Zg87XqZ3oWjzhlTClXviOsHDsZ5Xkueay13EudJlh4n0lbgEXLt2Zm8mNdvKCW3j8o6JDYrGU0qCyt4SQBpWeCHGZrSCNFTsacvrSvIPgyN
|
||||||
|
|
||||||
|
bItFnvZPyE5L88propEU5dDF38mtdVJ6zzdT1zam+hes7XdrjqQVXtbgH7ViVJulQXTSOvPUmb21Rm2dfZrM0WbvMQ1PFX0ygCuLru/Q4lS7M3VLVfFO6yADMP3VklD1fsiQDZrPUGb7Nk6xzdevM28rw5z5B9QnUFX8SEl8crWc9Myjvq10PAfAN2HiAo4VgAG6Gi4nOBM01FSIFxPbQLZIVzwQuGngJjRZC5lS1beiFTwiwXh8CpwRYPuhxnZM
|
||||||
|
|
||||||
|
M46IgZZiMI16QVxzC0jRuM8mS0aZ884NbRoDUuC+FxvCNaFKEW+dQyfghNXEx2jJqeRdUaGB0W1ZCjeAKwSWYfmk7EQjWxapTU5VfmqaK17lIxW8sglFc9qQVCkJyHBCNShyrZNtT9o6zhBUAoENhCQg4kDqvu3237aDoB2Xl7NwOv7WDoh3yEHFbm7pmd2XxYSvNBK9xXNIGHkqvF65ZaVuu3L+LqVgSg9SEq+2oBkd8O88i2UR2RUGdjU8HUwH
|
||||||
|
|
||||||
|
R3JaolqWgVXEq+pZbX1Ja3Lbc0lVKiCeEAdYOtE0CbAIUcESQMF3yUqi1VIg8YKfhKVC5T+GA9WBQrIifAD8ZOD4AqRzJgj4RdEeqICGU5Rcx404s8dfjnF0LPVS44mVNpI0f4KZ6vCjfNqo068aNnCokcELW1BSQpgi88cIr85MiA2nGtZLAsD28bYxzxU4FJB/E5rzttGDMrmvOX1Q6wSQD4lotrWlqntjyrPs8o03Vqv2H20xXTopD0hUARAT
|
||||||
|
|
||||||
|
QOQFIAsr5u3YIQMog7QTr7sHWYgAYAOz06YIKOCcD2pyDuh+9g+ttZEWsDRBSyTUxAOQCQScAMd7KKzcepr1162qje5vUFVb3t6oAne9rJIB72HVkwY+ofc5pH2wBT9E+qfV2uXiz6mAR4FsEvtx12zMKy6nzdBL81zViJgW8lX4sWZ7rqd4W2nUOu+21769m+trjvo71xau9h+3vSfopAD6z9kVVcBfpgBX77Nk+87LfoWj3759T+7nfer51PrB
|
||||||
|
|
||||||
|
dtJYBSJLx5i77mb0iAFChhTwpY99YkvGTyoAClu4yQNYKJmSm1a0sRBMiEoz1DAgVO/wE6SrDBGidmtKIY/FeCbjdLeEGgmQYiDRa99lglq+3e6rG0EavVy45XniPd3TyqZlG4kRwrw0B7vSvLRjRttD0bzwpjI7mRstinYB2RCJTkc720oNxs9rPche+MRCIaxR5yxxPgTJwZN89AEsteXxoM5yfaKu6VYHXoBCB9oMEBoDwGTYR1h+JeytXwTj
|
||||||
|
|
||||||
|
o/ybmf8zWVc2tGZ0boe/avrXwNElG7oFESQ6grVS7w5DtdRQzmVnw6qARCwIMdgNDE91wxj9Kfl/zTBKJVEGiLRDoj0QGI/EygYxNnLKCADZ6wAgBv3S34QCwGN0GokxEVL4Q6IiNIUpfzFwbHrg2xy8FgOIZRjujMY3oz6Gn45Ah6UOGHHDgRxI5Uc6OTHNjmVXUDf6cxkAYvUIDgCd+KxqvnDTaJC57a2pZtLIKgGAnjdIJgEMHgmDHGcGS2BM
|
||||||
|
|
||||||
|
btuTF4DUxkNNgxmMoEF6aBdAiASWLUBMD8ssol9eQbFX5aJA8RxI8kdSPlaC5GNEET6JQVNxBwkG3gA4gU5XBhNp/Hosvmrb8wqtxjAEZvm+G27eAbq/pZwsXEWdzBrk31eMrs5GHvdJh3yctv8nLzw1q8yNVtoinsb410e7EAFJCEFpHxRtE+msCPwat8Iks8eFXR3wKb/xJah5eWsyOvbNNFe7MTVJRxoSJug6hwl6ah2OLX9BnTHXjt6GErfN
|
||||||
|
|
||||||
|
MzYYRM28VuzoE5EqYSFoCVD16D+eQvBsxWE9Z/TS+p7Heu4lpbrmGW+JZjyF0PamOok6g1G3elwBSQbAQhsoCWC0nXh6RLjMTQahKpzwDq/g7YiQrRCdSUkLQYhs62iY4a/EJEQEixlUVeE3cHDcZ0lMjydDLuvQ5PJm0e7NxK2hbdRoXkhrZlgehjXVBpE2GWNdhreZHvDL6ncAlE6WkabjKJTEy3GNnl8lE0dRYBkskFrcG7h3s20GQ8I0XudO
|
||||||
|
|
||||||
|
GjvW5egrpXo+V07IiCYJBH8paljleyHZbBFKGUAiBH9tyaxfWtK7gXCAkF68reSc1wXIQiFhfdMYPRY7ksOO6aR/u1lLkozS0nxX/uC3Lkkzu2iLTDqh7oXML0F4zbhYQsEHkLYcnnZHPS3867pdHEs/wyBgZG6AuAOAHAHlCbxJ+XQcEFkEqCQJoQTwBgPIQoD7QRl02gwyMGXIiAvYAYGcPoHlALjGKLIcUKpewD6WZ+RlzS8Rvl7mXWQll6y9
|
||||||
|
|
||||||
|
caMuPQ5t3LZy+dBsuZATLm5nUHqC8sGWjLflzlmqaCs+WbhVIkMMpD0veXXLmQBqqzMDJxXgrmQR6GRb6GpXIrGV4i6cgisJX9AFCSi4ROjMhaXLUAQy75ZDGImUxiY8VtlcKtoHiAuAigPgPRO6WrL8Vyq0Zdas4wY+TsTqxVaqv6BG1aaGCNSDxBN5DQ2AEkDKCTXjIeYVwZrbe3qhNRVLzAWa9SHwDtA0AE4ouTJqT3TA0BqlowGwAMByXrkB
|
||||||
|
|
||||||
|
ANhAFZohZF/cRyRqz1cyATWq4BaXhfyF0s8gSAQZ2K99eIDygEAu2cM5nBIB9dcoaBwDsEEyGg3RlH/H0PtGpAPQOUHITKnRFCTJCMb6N6gPiD1BlUsQjCBCzVQKQo3cAaN7YDjZrCU2KbuN44GVTBxPXQrSOOAARb41kpY1jCFMCQh6Pw3CB52dqulH4vLkiAwN3nViHOyKXol+Z4QJ+rwQUcMMh1OxUwG7Bpp5bWIRW5YshsC35bDNuwHCh+bM
|
||||||
|
|
||||||
|
BZQ52OAODYQBa3obLA7EP0EICMAcY51/AJdedYxwMg1tni5h2aYYIir5iJOgUcY5RgDAN8YIK7Y6g5bQgS2a27bftvlnHrY3KG3alXKXxIi2QIQG+rF2e7dmOyIGCWCAA===
|
||||||
|
```
|
||||||
|
%%
|
||||||
2
docs/ProjectOutline/ProjectOutlineDiagram.excalidraw.svg
Normal file
2
docs/ProjectOutline/ProjectOutlineDiagram.excalidraw.svg
Normal file
File diff suppressed because one or more lines are too long
|
After Width: | Height: | Size: 36 KiB |
@@ -1,21 +1,21 @@
|
|||||||
# Overview
|
# Overview
|
||||||
The server by default
|
The server by default
|
||||||
- runs on port 5146
|
- runs on port 5146
|
||||||
- Uses Swagger UI in development mode (`/swagger/index.html`)
|
- Uses Swagger UI (`/swagger/index.html`)
|
||||||
- Ignores API keys when in development mode
|
|
||||||
- Uses Elmah error logging (endpoint: `/elmah`, local files: `~/logs`)
|
- Uses Elmah error logging (endpoint: `/elmah`, local files: `~/logs`)
|
||||||
- Uses serilog logging (local files: `~/logs`)
|
- Uses serilog logging (local files: `~/logs`)
|
||||||
- Uses HealthChecks (endpoint: `/healthz`)
|
- Uses HealthChecks (endpoint: `/healthz`)
|
||||||
## Docker installation
|
## Docker installation
|
||||||
(On Linux you might need root privileges, thus use `sudo` where necessary)
|
(On Linux you might need root privileges. Use `sudo` where necessary)
|
||||||
1. Navigate to the `src/server` directory
|
1. [Set up the configuration](docs/Server.md#setup)
|
||||||
2. Build the docker container: `docker build -t embeddingsearch-server -f /Dockerfile .`
|
2. Navigate to the `src` directory
|
||||||
3. Run the docker container: `docker run --net=host -t embeddingsearch-server` (the `-t` is optional, but you get more meaningful output. Or use `-d` to run it in the background)
|
3. Build the docker container: `docker build -t embeddingsearch-server -f Server/Dockerfile .`
|
||||||
|
4. Run the docker container: `docker run --net=host -t embeddingsearch-server` (the `-t` is optional, but you get more meaningful output. Or use `-d` to run it in the background)
|
||||||
# Installing the dependencies
|
# Installing the dependencies
|
||||||
## Ubuntu 24.04
|
## Ubuntu 24.04
|
||||||
1. Install the .NET SDK: `sudo apt update && sudo apt install dotnet-sdk-8.0 -y`
|
1. Install the .NET SDK: `sudo apt update && sudo apt install dotnet-sdk-10.0 -y`
|
||||||
## Windows
|
## Windows
|
||||||
Download the [.NET SDK](https://dotnet.microsoft.com/en-us/download) or follow these steps to use WSL:
|
Download and install the [.NET SDK](https://dotnet.microsoft.com/en-us/download) or follow these steps to use WSL:
|
||||||
1. Install Ubuntu in WSL (`wsl --install` and `wsl --install -d Ubuntu`)
|
1. Install Ubuntu in WSL (`wsl --install` and `wsl --install -d Ubuntu`)
|
||||||
2. Enter your WSL environment `wsl.exe` and configure it
|
2. Enter your WSL environment `wsl.exe` and configure it
|
||||||
3. Update via `sudo apt update && sudo apt upgrade -y && sudo snap refresh`
|
3. Update via `sudo apt update && sudo apt upgrade -y && sudo snap refresh`
|
||||||
@@ -30,6 +30,9 @@ Download the [.NET SDK](https://dotnet.microsoft.com/en-us/download) or follow t
|
|||||||
`CREATE DATABASE embeddingsearch; use embeddingsearch;`
|
`CREATE DATABASE embeddingsearch; use embeddingsearch;`
|
||||||
4. Create the user (replace "somepassword! with a secure password):
|
4. Create the user (replace "somepassword! with a secure password):
|
||||||
`CREATE USER 'embeddingsearch'@'%' identified by "somepassword!"; GRANT ALL ON embeddingsearch.* TO embeddingsearch; FLUSH PRIVILEGES;`
|
`CREATE USER 'embeddingsearch'@'%' identified by "somepassword!"; GRANT ALL ON embeddingsearch.* TO embeddingsearch; FLUSH PRIVILEGES;`
|
||||||
|
- Caution: The symbol "%" in the command means that this user can be logged into from outside of the machine.
|
||||||
|
- Replace `'%'` with `'localhost'` or with the IP of your embeddingsearch server machine if that is a concern.
|
||||||
|
5. Exit mysql: `exit`
|
||||||
|
|
||||||
# Configuration
|
# Configuration
|
||||||
## Environments
|
## Environments
|
||||||
@@ -43,34 +46,39 @@ If you just installed the server and want to configure it:
|
|||||||
3. Check the "AiProviders" section. If your Ollama/LocalAI/etc. instance does not run locally, update the "baseURL" to point to the correct URL.
|
3. Check the "AiProviders" section. If your Ollama/LocalAI/etc. instance does not run locally, update the "baseURL" to point to the correct URL.
|
||||||
4. If you plan on using the server in production:
|
4. If you plan on using the server in production:
|
||||||
1. Set the environment variable `DOTNET_ENVIRONMENT` to something that is not "Development". (e.g. "Prod")
|
1. Set the environment variable `DOTNET_ENVIRONMENT` to something that is not "Development". (e.g. "Prod")
|
||||||
2. Rename the `appsettings.Development.json` - replace "Development" with whatever you chose. (e.g. "Prod")
|
2. Rename the `appsettings.Development.json` - replace "Development" with what you chose for `DOTNET_ENVIRONMENT`
|
||||||
3. Set API keys in the "ApiKeys" section (generate keys using the `uuid` command on Linux)
|
3. Set API keys in the "ApiKeys" section (generate keys using the `uuid` command on Linux)
|
||||||
## Structure
|
## Structure
|
||||||
```json
|
```json
|
||||||
"Embeddingsearch": {
|
"Embeddingsearch": {
|
||||||
"ConnectionStrings": {
|
"ConnectionStrings": {
|
||||||
"SQL": "server=localhost;database=embeddingsearch;uid=embeddingsearch;pwd=somepassword!;"
|
"SQL": "server=localhost;database=embeddingsearch;uid=embeddingsearch;pwd=somepassword!;",
|
||||||
|
"Cache": "Data Source=embeddings.db;Mode=ReadWriteCreate;Cache=Shared" // Name of the sqlite cache file
|
||||||
},
|
},
|
||||||
"Elmah": {
|
"Elmah": {
|
||||||
"AllowedHosts": [ // Specify which IP addresses can access /elmah
|
"LogPath": "~/logs" // Where the logs are stored
|
||||||
"127.0.0.1",
|
|
||||||
"::1",
|
|
||||||
"172.17.0.1"
|
|
||||||
]
|
|
||||||
},
|
},
|
||||||
"AiProviders": {
|
"AiProviders": {
|
||||||
"ollama": { // Name of the provider. Used when defining models for a datapoint, e.g. "ollama:mxbai-embed-large"
|
"ollama": { // Name for the provider. Used when defining models for a datapoint, e.g. "ollama:mxbai-embed-large"
|
||||||
"handler": "ollama", // The type of API located at baseURL
|
"handler": "ollama", // The type of API located at baseURL
|
||||||
"baseURL": "http://localhost:11434" // Location of the API
|
"baseURL": "http://localhost:11434", // Location of the API
|
||||||
|
"Allowlist": [".*"], // Allow- and Denylist. Filter out non-embeddings models using regular expressions
|
||||||
|
"Denylist": ["qwen3-coder:latest", "qwen3:0.6b", "deepseek-v3.1:671b-cloud", "qwen3-vl", "deepseek-ocr"]
|
||||||
},
|
},
|
||||||
"localAI": {
|
"localAI": { // e.g. model name: "localAI:bert-embeddings"
|
||||||
"handler": "openai",
|
"handler": "openai",
|
||||||
"baseURL": "http://localhost:8080",
|
"baseURL": "http://localhost:8080",
|
||||||
"ApiKey": "Some API key here"
|
"ApiKey": "Some API key here",
|
||||||
|
"Allowlist": [".*"],
|
||||||
|
"Denylist": ["cross-encoder", "..."]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"ApiKeys": ["Some UUID here", "Another UUID here"], // Restrict access in non-development environments to the server's API using your own generated API keys
|
"ApiKeys": ["Some UUID here", "Another UUID here"], // (optional) Restrict access using API keys
|
||||||
"UseHttpsRedirection": true // tbh I don't even know why this is still here. // TODO implement HttpsRedirection or remove this line
|
"Cache": {
|
||||||
|
"CacheTopN": 10000, // Only cache this number of queries. (Eviction policy: LRU)
|
||||||
|
"StoreEmbeddingCache": true, // If set to true, the SQLite database will be used to store the embeddings
|
||||||
|
"StoreTopN": 10000 // Only write the top n number of queries to the SQLite database
|
||||||
|
}
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
## AiProviders
|
## AiProviders
|
||||||
@@ -91,9 +99,9 @@ One can even specify multiple Ollama instances and name them however one pleases
|
|||||||
```
|
```
|
||||||
### handler
|
### handler
|
||||||
Currently two handlers are implemented for embeddings generation:
|
Currently two handlers are implemented for embeddings generation:
|
||||||
- ollama
|
- `ollama`
|
||||||
- requests embeddings from `/api/embed`
|
- requests embeddings from `/api/embed`
|
||||||
- localai
|
- `openai`
|
||||||
- requests embeddings from `/v1/embeddings`
|
- requests embeddings from `/v1/embeddings`
|
||||||
### baseURL
|
### baseURL
|
||||||
Specified by `scheme://host:port`. E.g.: `"baseUrl": "http://localhost:11434"`
|
Specified by `scheme://host:port`. E.g.: `"baseUrl": "http://localhost:11434"`
|
||||||
@@ -105,7 +113,7 @@ Any specified absolute path will be disregarded. (e.g. "http://x.x.x.x/any/subro
|
|||||||
|
|
||||||
# API
|
# API
|
||||||
## Accessing the api
|
## Accessing the api
|
||||||
Once started, the server's API can be comfortably be viewed and manipulated via swagger.
|
Once started, the server's API can be viewed and manipulated via swagger.
|
||||||
|
|
||||||
By default it is accessible under: `http://localhost:5146/swagger/index.html`
|
By default it is accessible under: `http://localhost:5146/swagger/index.html`
|
||||||
|
|
||||||
@@ -114,7 +122,7 @@ To make an API request from within swagger:
|
|||||||
2. Click the "Try it out" button. The input fields (if there are any for your action) should now be editable.
|
2. Click the "Try it out" button. The input fields (if there are any for your action) should now be editable.
|
||||||
3. Fill in the necessary information
|
3. Fill in the necessary information
|
||||||
4. Click "Execute"
|
4. Click "Execute"
|
||||||
## Restricting access
|
## Authorization
|
||||||
API keys do **not** get checked in Development environment!
|
Being logged in has priority over API Key requirement (if api keys are set).
|
||||||
|
|
||||||
Set up a non-development environment as described in [Configuration>Setup](#setup) to enable API key authentication.
|
So being logged in automatically authorizes endpoint usage.
|
||||||
BIN
docs/logo.png
Normal file
BIN
docs/logo.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 15 KiB |
@@ -47,15 +47,27 @@ public class Client
|
|||||||
return await FetchUrlAndProcessJson<EntityListResults>(HttpMethod.Get, url);
|
return await FetchUrlAndProcessJson<EntityListResults>(HttpMethod.Get, url);
|
||||||
}
|
}
|
||||||
|
|
||||||
public async Task<EntityIndexResult> EntityIndexAsync(List<JSONEntity> jsonEntity)
|
public async Task<EntityIndexResult> EntityIndexAsync(List<JSONEntity> jsonEntity, string? sessionId = null, bool? sessionComplete = null)
|
||||||
{
|
{
|
||||||
return await EntityIndexAsync(JsonSerializer.Serialize(jsonEntity));
|
return await EntityIndexAsync(JsonSerializer.Serialize(jsonEntity), sessionId, sessionComplete);
|
||||||
}
|
}
|
||||||
|
|
||||||
public async Task<EntityIndexResult> EntityIndexAsync(string jsonEntity)
|
public async Task<EntityIndexResult> EntityIndexAsync(string jsonEntity, string? sessionId = null, bool? sessionComplete = null)
|
||||||
{
|
{
|
||||||
var content = new StringContent(jsonEntity, Encoding.UTF8, "application/json");
|
var content = new StringContent(jsonEntity, Encoding.UTF8, "application/json");
|
||||||
return await FetchUrlAndProcessJson<EntityIndexResult>(HttpMethod.Put, GetUrl($"{baseUri}", "Entities", []), content);
|
Dictionary<string, string> parameters = [];
|
||||||
|
if (sessionId is not null) parameters.Add("sessionId", sessionId);
|
||||||
|
if (sessionComplete is not null) parameters.Add("sessionComplete", ((bool)sessionComplete).ToString());
|
||||||
|
|
||||||
|
return await FetchUrlAndProcessJson<EntityIndexResult>(
|
||||||
|
HttpMethod.Put,
|
||||||
|
GetUrl(
|
||||||
|
$"{baseUri}",
|
||||||
|
$"Entities",
|
||||||
|
parameters
|
||||||
|
),
|
||||||
|
content
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
public async Task<EntityDeleteResults> EntityDeleteAsync(string entityName)
|
public async Task<EntityDeleteResults> EntityDeleteAsync(string entityName)
|
||||||
@@ -121,13 +133,13 @@ public class Client
|
|||||||
}), new StringContent(settings, Encoding.UTF8, "application/json"));
|
}), new StringContent(settings, Encoding.UTF8, "application/json"));
|
||||||
}
|
}
|
||||||
|
|
||||||
public async Task<SearchdomainSearchesResults> SearchdomainGetQueriesAsync(string searchdomain)
|
public async Task<SearchdomainQueriesResults> SearchdomainGetQueriesAsync(string searchdomain)
|
||||||
{
|
{
|
||||||
Dictionary<string, string> parameters = new()
|
Dictionary<string, string> parameters = new()
|
||||||
{
|
{
|
||||||
{"searchdomain", searchdomain}
|
{"searchdomain", searchdomain}
|
||||||
};
|
};
|
||||||
return await FetchUrlAndProcessJson<SearchdomainSearchesResults>(HttpMethod.Get, GetUrl($"{baseUri}/Searchdomain", "Queries", parameters));
|
return await FetchUrlAndProcessJson<SearchdomainQueriesResults>(HttpMethod.Get, GetUrl($"{baseUri}/Searchdomain", "Queries", parameters));
|
||||||
}
|
}
|
||||||
|
|
||||||
public async Task<EntityQueryResults> SearchdomainQueryAsync(string query)
|
public async Task<EntityQueryResults> SearchdomainQueryAsync(string query)
|
||||||
@@ -190,13 +202,13 @@ public class Client
|
|||||||
return await FetchUrlAndProcessJson<SearchdomainUpdateResults>(HttpMethod.Put, GetUrl($"{baseUri}/Searchdomain", "Settings", parameters), content);
|
return await FetchUrlAndProcessJson<SearchdomainUpdateResults>(HttpMethod.Put, GetUrl($"{baseUri}/Searchdomain", "Settings", parameters), content);
|
||||||
}
|
}
|
||||||
|
|
||||||
public async Task<SearchdomainSearchCacheSizeResults> SearchdomainGetQueryCacheSizeAsync(string searchdomain)
|
public async Task<SearchdomainQueryCacheSizeResults> SearchdomainGetQueryCacheSizeAsync(string searchdomain)
|
||||||
{
|
{
|
||||||
Dictionary<string, string> parameters = new()
|
Dictionary<string, string> parameters = new()
|
||||||
{
|
{
|
||||||
{"searchdomain", searchdomain}
|
{"searchdomain", searchdomain}
|
||||||
};
|
};
|
||||||
return await FetchUrlAndProcessJson<SearchdomainSearchCacheSizeResults>(HttpMethod.Get, GetUrl($"{baseUri}/Searchdomain/QueryCache", "Size", parameters));
|
return await FetchUrlAndProcessJson<SearchdomainQueryCacheSizeResults>(HttpMethod.Get, GetUrl($"{baseUri}/Searchdomain/QueryCache", "Size", parameters));
|
||||||
}
|
}
|
||||||
|
|
||||||
public async Task<SearchdomainInvalidateCacheResults> SearchdomainClearQueryCache(string searchdomain)
|
public async Task<SearchdomainInvalidateCacheResults> SearchdomainClearQueryCache(string searchdomain)
|
||||||
@@ -222,9 +234,9 @@ public class Client
|
|||||||
return await FetchUrlAndProcessJson<ServerGetModelsResult>(HttpMethod.Get, GetUrl($"{baseUri}/Server", "Models", []));
|
return await FetchUrlAndProcessJson<ServerGetModelsResult>(HttpMethod.Get, GetUrl($"{baseUri}/Server", "Models", []));
|
||||||
}
|
}
|
||||||
|
|
||||||
public async Task<ServerGetEmbeddingCacheSizeResult> ServerGetEmbeddingCacheSizeAsync()
|
public async Task<ServerGetStatsResult> ServerGetStatsAsync()
|
||||||
{
|
{
|
||||||
return await FetchUrlAndProcessJson<ServerGetEmbeddingCacheSizeResult>(HttpMethod.Get, GetUrl($"{baseUri}/Server/EmbeddingCache", "Size", []));
|
return await FetchUrlAndProcessJson<ServerGetStatsResult>(HttpMethod.Get, GetUrl($"{baseUri}/Server/Stats", "Size", []));
|
||||||
}
|
}
|
||||||
|
|
||||||
private async Task<T> FetchUrlAndProcessJson<T>(HttpMethod httpMethod, string url, HttpContent? content = null)
|
private async Task<T> FetchUrlAndProcessJson<T>(HttpMethod httpMethod, string url, HttpContent? content = null)
|
||||||
|
|||||||
@@ -5,7 +5,7 @@
|
|||||||
</ItemGroup>
|
</ItemGroup>
|
||||||
|
|
||||||
<PropertyGroup>
|
<PropertyGroup>
|
||||||
<TargetFramework>net8.0</TargetFramework>
|
<TargetFramework>net10.0</TargetFramework>
|
||||||
<ImplicitUsings>enable</ImplicitUsings>
|
<ImplicitUsings>enable</ImplicitUsings>
|
||||||
<Nullable>enable</Nullable>
|
<Nullable>enable</Nullable>
|
||||||
</PropertyGroup>
|
</PropertyGroup>
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
FROM ubuntu:24.04 AS ubuntu
|
FROM ubuntu:25.10 AS ubuntu
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
RUN apt-get update
|
RUN apt-get update
|
||||||
RUN apt-get install -y python3.12 python3.12-venv python3.12-dev dotnet-sdk-8.0
|
RUN apt-get install -y python3.13 python3.13-venv python3.13-dev dotnet-sdk-10.0
|
||||||
RUN apt-get clean
|
RUN apt-get clean
|
||||||
COPY . /src/
|
COPY . /src/
|
||||||
ENV ASPNETCORE_ENVIRONMENT Docker
|
ENV ASPNETCORE_ENVIRONMENT Docker
|
||||||
|
|||||||
@@ -1,22 +1,22 @@
|
|||||||
<Project Sdk="Microsoft.NET.Sdk.Web">
|
<Project Sdk="Microsoft.NET.Sdk.Web">
|
||||||
|
|
||||||
<PropertyGroup>
|
<PropertyGroup>
|
||||||
<TargetFramework>net8.0</TargetFramework>
|
<TargetFramework>net10.0</TargetFramework>
|
||||||
<Nullable>enable</Nullable>
|
<Nullable>enable</Nullable>
|
||||||
<ImplicitUsings>enable</ImplicitUsings>
|
<ImplicitUsings>enable</ImplicitUsings>
|
||||||
</PropertyGroup>
|
</PropertyGroup>
|
||||||
|
|
||||||
<ItemGroup>
|
<ItemGroup>
|
||||||
<PackageReference Include="ElmahCore" Version="2.1.2" />
|
<PackageReference Include="ElmahCore" Version="2.1.2" />
|
||||||
<PackageReference Include="Microsoft.AspNetCore.OpenApi" Version="8.0.14" />
|
<PackageReference Include="Microsoft.AspNetCore.OpenApi" Version="10.0.2" />
|
||||||
<PackageReference Include="Microsoft.CodeAnalysis.CSharp.Scripting" Version="4.14.0" />
|
<PackageReference Include="Microsoft.CodeAnalysis.CSharp.Scripting" Version="5.0.0" />
|
||||||
<PackageReference Include="Microsoft.Extensions.Hosting" Version="9.0.8" />
|
<PackageReference Include="Microsoft.Extensions.Hosting" Version="10.0.2" />
|
||||||
<PackageReference Include="Quartz.Extensions.Hosting" Version="3.15.0" />
|
<PackageReference Include="Quartz.Extensions.Hosting" Version="3.15.1" />
|
||||||
<PackageReference Include="Serilog.AspNetCore" Version="9.0.0" />
|
<PackageReference Include="Serilog.AspNetCore" Version="10.0.0" />
|
||||||
<PackageReference Include="Serilog.Sinks.File" Version="7.0.0" />
|
<PackageReference Include="Serilog.Sinks.File" Version="7.0.0" />
|
||||||
<PackageReference Include="Swashbuckle.AspNetCore" Version="6.6.2" />
|
<PackageReference Include="Swashbuckle.AspNetCore" Version="10.1.0" />
|
||||||
<PackageReference Include="System.Configuration.ConfigurationManager" Version="9.0.3" />
|
<PackageReference Include="System.Configuration.ConfigurationManager" Version="10.0.2" />
|
||||||
<PackageReference Include="Python" Version="3.13.3" />
|
<PackageReference Include="Python" Version="3.14.2" />
|
||||||
<PackageReference Include="Pythonnet" Version="3.0.5" />
|
<PackageReference Include="Pythonnet" Version="3.0.5" />
|
||||||
</ItemGroup>
|
</ItemGroup>
|
||||||
<ItemGroup>
|
<ItemGroup>
|
||||||
|
|||||||
@@ -80,8 +80,6 @@ else
|
|||||||
app.UseMiddleware<Shared.ApiKeyMiddleware>();
|
app.UseMiddleware<Shared.ApiKeyMiddleware>();
|
||||||
}
|
}
|
||||||
|
|
||||||
// app.UseHttpsRedirection();
|
|
||||||
|
|
||||||
app.MapControllers();
|
app.MapControllers();
|
||||||
|
|
||||||
app.Run();
|
app.Run();
|
||||||
|
|||||||
@@ -65,6 +65,7 @@ def index_files(toolset: Toolset):
|
|||||||
jsonEntities.append(jsonEntity)
|
jsonEntities.append(jsonEntity)
|
||||||
jsonstring = json.dumps(jsonEntities)
|
jsonstring = json.dumps(jsonEntities)
|
||||||
timer_start = time.time()
|
timer_start = time.time()
|
||||||
|
# Index all entities in one go. If you need to split it into chunks, use the session attributes! See example_chunked.py
|
||||||
result:EntityIndexResult = toolset.Client.EntityIndexAsync(jsonstring).Result
|
result:EntityIndexResult = toolset.Client.EntityIndexAsync(jsonstring).Result
|
||||||
timer_end = time.time()
|
timer_end = time.time()
|
||||||
toolset.Logger.LogInformation(f"Update was successful: {result.Success} - and was done in {timer_end - timer_start} seconds.")
|
toolset.Logger.LogInformation(f"Update was successful: {result.Success} - and was done in {timer_end - timer_start} seconds.")
|
||||||
85
src/Indexer/Scripts/example_chunked.py
Normal file
85
src/Indexer/Scripts/example_chunked.py
Normal file
@@ -0,0 +1,85 @@
|
|||||||
|
import math
|
||||||
|
import os
|
||||||
|
from tools import *
|
||||||
|
import json
|
||||||
|
from dataclasses import asdict
|
||||||
|
import time
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
example_content = "./Scripts/example_content"
|
||||||
|
probmethod = "HVEWAvg"
|
||||||
|
similarityMethod = "Cosine"
|
||||||
|
example_searchdomain = "example_" + probmethod
|
||||||
|
example_counter = 0
|
||||||
|
models = ["ollama:bge-m3", "ollama:mxbai-embed-large"]
|
||||||
|
probmethod_datapoint = probmethod
|
||||||
|
probmethod_entity = probmethod
|
||||||
|
# Example for a dictionary based weighted average:
|
||||||
|
# probmethod_datapoint = "DictionaryWeightedAverage:{\"ollama:bge-m3\": 4, \"ollama:mxbai-embed-large\": 1}"
|
||||||
|
# probmethod_entity = "DictionaryWeightedAverage:{\"title\": 2, \"filename\": 0.1, \"text\": 0.25}"
|
||||||
|
|
||||||
|
def init(toolset: Toolset):
|
||||||
|
global example_counter
|
||||||
|
toolset.Logger.LogInformation("{toolset.Name} - init", toolset.Name)
|
||||||
|
toolset.Logger.LogInformation("This is the init function from the python example script")
|
||||||
|
toolset.Logger.LogInformation(f"example_counter: {example_counter}")
|
||||||
|
searchdomainlist:SearchdomainListResults = toolset.Client.SearchdomainListAsync().Result
|
||||||
|
if example_searchdomain not in searchdomainlist.Searchdomains:
|
||||||
|
toolset.Client.SearchdomainCreateAsync(example_searchdomain).Result
|
||||||
|
searchdomainlist = toolset.Client.SearchdomainListAsync().Result
|
||||||
|
output = "Currently these searchdomains exist:\n"
|
||||||
|
for searchdomain in searchdomainlist.Searchdomains:
|
||||||
|
output += f" - {searchdomain}\n"
|
||||||
|
toolset.Logger.LogInformation(output)
|
||||||
|
|
||||||
|
def update(toolset: Toolset):
|
||||||
|
global example_counter
|
||||||
|
toolset.Logger.LogInformation("{toolset.Name} - update", toolset.Name)
|
||||||
|
toolset.Logger.LogInformation("This is the update function from the python example script")
|
||||||
|
callbackInfos:ICallbackInfos = toolset.CallbackInfos
|
||||||
|
if (str(callbackInfos) == "Indexer.Models.RunOnceCallbackInfos"):
|
||||||
|
toolset.Logger.LogInformation("It was triggered by a runonce call")
|
||||||
|
elif (str(callbackInfos) == "Indexer.Models.IntervalCallbackInfos"):
|
||||||
|
toolset.Logger.LogInformation("It was triggered by an interval call")
|
||||||
|
elif (str(callbackInfos) == "Indexer.Models.ScheduleCallbackInfos"):
|
||||||
|
toolset.Logger.LogInformation("It was triggered by a schedule call")
|
||||||
|
elif (str(callbackInfos) == "Indexer.Models.FileUpdateCallbackInfos"):
|
||||||
|
toolset.Logger.LogInformation("It was triggered by a fileupdate call")
|
||||||
|
else:
|
||||||
|
toolset.Logger.LogInformation("It was triggered, but the origin of the call could not be determined")
|
||||||
|
example_counter += 1
|
||||||
|
toolset.Logger.LogInformation(f"example_counter: {example_counter}")
|
||||||
|
index_files(toolset)
|
||||||
|
|
||||||
|
def index_files(toolset: Toolset):
|
||||||
|
jsonEntities:list = []
|
||||||
|
for filename in os.listdir(example_content):
|
||||||
|
qualified_filepath = example_content + "/" + filename
|
||||||
|
with open(qualified_filepath, "r", encoding='utf-8', errors="replace") as file:
|
||||||
|
title = file.readline()
|
||||||
|
text = file.read()
|
||||||
|
datapoints:list = [
|
||||||
|
JSONDatapoint("filename", qualified_filepath, probmethod_datapoint, similarityMethod, models),
|
||||||
|
JSONDatapoint("title", title, probmethod_datapoint, similarityMethod, models),
|
||||||
|
JSONDatapoint("text", text, probmethod_datapoint, similarityMethod, models)
|
||||||
|
]
|
||||||
|
jsonEntity:dict = asdict(JSONEntity(qualified_filepath, probmethod_entity, example_searchdomain, {}, datapoints))
|
||||||
|
jsonEntities.append(jsonEntity)
|
||||||
|
timer_start = time.time()
|
||||||
|
chunkSize = 5
|
||||||
|
chunkList = chunk_list(jsonEntities, chunkSize)
|
||||||
|
chunkCount = math.ceil(len(jsonEntities) / chunkSize)
|
||||||
|
sessionId = uuid.uuid4().hex
|
||||||
|
print(f"indexing {len(jsonEntities)} entities")
|
||||||
|
for i, entities in enumerate(chunkList):
|
||||||
|
isLast = i == chunkCount
|
||||||
|
print(f'Processing chunk {i} / {len(jsonEntities) / chunkSize}')
|
||||||
|
jsonstring = json.dumps(entities)
|
||||||
|
result:EntityIndexResult = toolset.Client.EntityIndexAsync(jsonstring, sessionId, isLast).Result
|
||||||
|
timer_end = time.time()
|
||||||
|
toolset.Logger.LogInformation(f"Update was successful: {result.Success} - and was done in {timer_end - timer_start} seconds.")
|
||||||
|
|
||||||
|
|
||||||
|
def chunk_list(lst, chunk_size):
|
||||||
|
for i in range(0, len(lst), chunk_size):
|
||||||
|
yield lst[i: i + chunk_size]
|
||||||
@@ -107,6 +107,8 @@ class Client:
|
|||||||
# pass
|
# pass
|
||||||
async def EntityIndexAsync(jsonEntity:str) -> EntityIndexResult:
|
async def EntityIndexAsync(jsonEntity:str) -> EntityIndexResult:
|
||||||
pass
|
pass
|
||||||
|
async def EntityIndexAsync(jsonEntity:str, sessionId:str, sessionComplete:bool) -> EntityIndexResult:
|
||||||
|
pass
|
||||||
async def EntityIndexAsync(searchdomain:str, jsonEntity:str) -> EntityIndexResult:
|
async def EntityIndexAsync(searchdomain:str, jsonEntity:str) -> EntityIndexResult:
|
||||||
pass
|
pass
|
||||||
async def EntityListAsync(returnEmbeddings:bool = False) -> EntityListResults:
|
async def EntityListAsync(returnEmbeddings:bool = False) -> EntityListResults:
|
||||||
|
|||||||
@@ -21,7 +21,8 @@
|
|||||||
"ApiKeys": ["xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"],
|
"ApiKeys": ["xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"],
|
||||||
"Server": {
|
"Server": {
|
||||||
"BaseUri": "http://localhost:5146",
|
"BaseUri": "http://localhost:5146",
|
||||||
"ApiKey": "yyyyyyyy-yyyy-yyyy-yyyy-yyyyyyyyyyyy"
|
"ApiKey": "APIKeyForTheServer"
|
||||||
}
|
},
|
||||||
|
"PythonRuntime": "libpython3.13.so"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,26 +5,8 @@
|
|||||||
"Microsoft.AspNetCore": "Warning"
|
"Microsoft.AspNetCore": "Warning"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"Kestrel":{
|
"Indexer": {
|
||||||
"Endpoints": {
|
"Workers":
|
||||||
"http":{
|
|
||||||
"Url": "http://0.0.0.0:5120"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"Embeddingsearch": {
|
|
||||||
"BaseUri": "http://172.17.0.1:5146",
|
|
||||||
"ApiKeys": ["b54ea868-496e-11f0-9cc7-f79f06b160e5", "bbdeedf0-496e-11f0-9744-97e28c221f67"]
|
|
||||||
},
|
|
||||||
"EmbeddingsearchIndexer": {
|
|
||||||
"Elmah": {
|
|
||||||
"AllowedHosts": [
|
|
||||||
"127.0.0.1",
|
|
||||||
"::1",
|
|
||||||
"172.17.0.1"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"Worker":
|
|
||||||
[
|
[
|
||||||
{
|
{
|
||||||
"Name": "pythonExample",
|
"Name": "pythonExample",
|
||||||
@@ -36,6 +18,12 @@
|
|||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
]
|
],
|
||||||
|
"ApiKeys": ["APIKeyOfYourChoice", "AnotherOneIfYouLike"],
|
||||||
|
"Server": {
|
||||||
|
"BaseUri": "http://172.17.0.1:5146",
|
||||||
|
"ApiKey": "APIKeyForTheServer"
|
||||||
|
},
|
||||||
|
"PythonRuntime": "libpython3.13.so"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ public class AIProvider
|
|||||||
{
|
{
|
||||||
private readonly ILogger<AIProvider> _logger;
|
private readonly ILogger<AIProvider> _logger;
|
||||||
private readonly EmbeddingSearchOptions _configuration;
|
private readonly EmbeddingSearchOptions _configuration;
|
||||||
public Dictionary<string, AiProvider> aIProvidersConfiguration;
|
public Dictionary<string, AiProvider> AiProvidersConfiguration;
|
||||||
|
|
||||||
public AIProvider(ILogger<AIProvider> logger, IOptions<EmbeddingSearchOptions> configuration)
|
public AIProvider(ILogger<AIProvider> logger, IOptions<EmbeddingSearchOptions> configuration)
|
||||||
{
|
{
|
||||||
@@ -27,16 +27,21 @@ public class AIProvider
|
|||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
aIProvidersConfiguration = retrievedAiProvidersConfiguration;
|
AiProvidersConfiguration = retrievedAiProvidersConfiguration;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public float[] GenerateEmbeddings(string modelUri, string[] input)
|
public float[] GenerateEmbeddings(string modelUri, string input)
|
||||||
|
{
|
||||||
|
return [.. GenerateEmbeddings(modelUri, [input]).First()];
|
||||||
|
}
|
||||||
|
|
||||||
|
public IEnumerable<float[]> GenerateEmbeddings(string modelUri, string[] input)
|
||||||
{
|
{
|
||||||
Uri uri = new(modelUri);
|
Uri uri = new(modelUri);
|
||||||
string provider = uri.Scheme;
|
string provider = uri.Scheme;
|
||||||
string model = uri.AbsolutePath;
|
string model = uri.AbsolutePath;
|
||||||
AiProvider? aIProvider = aIProvidersConfiguration
|
AiProvider? aIProvider = AiProvidersConfiguration
|
||||||
.FirstOrDefault(x => string.Equals(x.Key.ToLower(), provider.ToLower()))
|
.FirstOrDefault(x => string.Equals(x.Key.ToLower(), provider.ToLower()))
|
||||||
.Value;
|
.Value;
|
||||||
if (aIProvider is null)
|
if (aIProvider is null)
|
||||||
@@ -103,13 +108,22 @@ public class AIProvider
|
|||||||
try
|
try
|
||||||
{
|
{
|
||||||
JObject responseContentJson = JObject.Parse(responseContent);
|
JObject responseContentJson = JObject.Parse(responseContent);
|
||||||
JToken? responseContentTokens = responseContentJson.SelectToken(embeddingsJsonPath);
|
List<JToken>? responseContentTokens = [.. responseContentJson.SelectTokens(embeddingsJsonPath)];
|
||||||
if (responseContentTokens is null)
|
if (responseContentTokens is null || responseContentTokens.Count == 0)
|
||||||
{
|
{
|
||||||
_logger.LogError("Unable to select tokens using JSONPath {embeddingsJsonPath} for string: {responseContent}.", [embeddingsJsonPath, responseContent]);
|
if (responseContentJson.TryGetValue("error", out JToken? errorMessageJson) && errorMessageJson is not null)
|
||||||
throw new JSONPathSelectionException(embeddingsJsonPath, responseContent);
|
{
|
||||||
|
string errorMessage = errorMessageJson.Value<string>() ?? "";
|
||||||
|
_logger.LogError("Unable to retrieve embeddings due to error: {errorMessage}", [errorMessage]);
|
||||||
|
throw new Exception($"Unable to retrieve embeddings due to error: {errorMessage}");
|
||||||
|
|
||||||
|
} else
|
||||||
|
{
|
||||||
|
_logger.LogError("Unable to select tokens using JSONPath {embeddingsJsonPath} for string: {responseContent}.", [embeddingsJsonPath, responseContent]);
|
||||||
|
throw new JSONPathSelectionException(embeddingsJsonPath, responseContent);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return [.. responseContentTokens.Values<float>()];
|
return [.. responseContentTokens.Select(token => token.ToObject<float[]>() ?? throw new Exception("Unable to cast embeddings response to float[]"))];
|
||||||
}
|
}
|
||||||
catch (Exception ex)
|
catch (Exception ex)
|
||||||
{
|
{
|
||||||
@@ -120,7 +134,7 @@ public class AIProvider
|
|||||||
|
|
||||||
public string[] GetModels()
|
public string[] GetModels()
|
||||||
{
|
{
|
||||||
var aIProviders = aIProvidersConfiguration;
|
var aIProviders = AiProvidersConfiguration;
|
||||||
List<string> results = [];
|
List<string> results = [];
|
||||||
foreach (KeyValuePair<string, AiProvider> aIProviderKV in aIProviders)
|
foreach (KeyValuePair<string, AiProvider> aIProviderKV in aIProviders)
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -14,6 +14,9 @@ public class EntityController : ControllerBase
|
|||||||
private SearchdomainManager _domainManager;
|
private SearchdomainManager _domainManager;
|
||||||
private readonly SearchdomainHelper _searchdomainHelper;
|
private readonly SearchdomainHelper _searchdomainHelper;
|
||||||
private readonly DatabaseHelper _databaseHelper;
|
private readonly DatabaseHelper _databaseHelper;
|
||||||
|
private readonly Dictionary<string, EntityIndexSessionData> _sessions = [];
|
||||||
|
private readonly object _sessionLock = new();
|
||||||
|
private const int SessionTimeoutMinutes = 60; // TODO: remove magic number; add an optional configuration option
|
||||||
|
|
||||||
public EntityController(ILogger<EntityController> logger, IConfiguration config, SearchdomainManager domainManager, SearchdomainHelper searchdomainHelper, DatabaseHelper databaseHelper)
|
public EntityController(ILogger<EntityController> logger, IConfiguration config, SearchdomainManager domainManager, SearchdomainHelper searchdomainHelper, DatabaseHelper databaseHelper)
|
||||||
{
|
{
|
||||||
@@ -46,34 +49,34 @@ public class EntityController : ControllerBase
|
|||||||
(Searchdomain? searchdomain_, int? httpStatusCode, string? message) = SearchdomainHelper.TryGetSearchdomain(_domainManager, searchdomain, _logger);
|
(Searchdomain? searchdomain_, int? httpStatusCode, string? message) = SearchdomainHelper.TryGetSearchdomain(_domainManager, searchdomain, _logger);
|
||||||
if (searchdomain_ is null || httpStatusCode is not null) return StatusCode(httpStatusCode ?? 500, new SearchdomainUpdateResults(){Success = false, Message = message});
|
if (searchdomain_ is null || httpStatusCode is not null) return StatusCode(httpStatusCode ?? 500, new SearchdomainUpdateResults(){Success = false, Message = message});
|
||||||
EntityListResults entityListResults = new() {Results = [], Success = true};
|
EntityListResults entityListResults = new() {Results = [], Success = true};
|
||||||
foreach (Entity entity in searchdomain_.entityCache)
|
foreach ((string _, Entity entity) in searchdomain_.EntityCache)
|
||||||
{
|
{
|
||||||
List<AttributeResult> attributeResults = [];
|
List<AttributeResult> attributeResults = [];
|
||||||
foreach (KeyValuePair<string, string> attribute in entity.attributes)
|
foreach (KeyValuePair<string, string> attribute in entity.Attributes)
|
||||||
{
|
{
|
||||||
attributeResults.Add(new AttributeResult() {Name = attribute.Key, Value = attribute.Value});
|
attributeResults.Add(new AttributeResult() {Name = attribute.Key, Value = attribute.Value});
|
||||||
}
|
}
|
||||||
List<DatapointResult> datapointResults = [];
|
List<DatapointResult> datapointResults = [];
|
||||||
foreach (Datapoint datapoint in entity.datapoints)
|
foreach (Datapoint datapoint in entity.Datapoints)
|
||||||
{
|
{
|
||||||
if (returnModels)
|
if (returnModels)
|
||||||
{
|
{
|
||||||
List<EmbeddingResult> embeddingResults = [];
|
List<EmbeddingResult> embeddingResults = [];
|
||||||
foreach ((string, float[]) embedding in datapoint.embeddings)
|
foreach ((string, float[]) embedding in datapoint.Embeddings)
|
||||||
{
|
{
|
||||||
embeddingResults.Add(new EmbeddingResult() {Model = embedding.Item1, Embeddings = returnEmbeddings ? embedding.Item2 : []});
|
embeddingResults.Add(new EmbeddingResult() {Model = embedding.Item1, Embeddings = returnEmbeddings ? embedding.Item2 : []});
|
||||||
}
|
}
|
||||||
datapointResults.Add(new DatapointResult() {Name = datapoint.name, ProbMethod = datapoint.probMethod.name, SimilarityMethod = datapoint.similarityMethod.name, Embeddings = embeddingResults});
|
datapointResults.Add(new DatapointResult() {Name = datapoint.Name, ProbMethod = datapoint.ProbMethod.Name, SimilarityMethod = datapoint.SimilarityMethod.Name, Embeddings = embeddingResults});
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
datapointResults.Add(new DatapointResult() {Name = datapoint.name, ProbMethod = datapoint.probMethod.name, SimilarityMethod = datapoint.similarityMethod.name, Embeddings = null});
|
datapointResults.Add(new DatapointResult() {Name = datapoint.Name, ProbMethod = datapoint.ProbMethod.Name, SimilarityMethod = datapoint.SimilarityMethod.Name, Embeddings = null});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
EntityListResult entityListResult = new()
|
EntityListResult entityListResult = new()
|
||||||
{
|
{
|
||||||
Name = entity.name,
|
Name = entity.Name,
|
||||||
ProbMethod = entity.probMethodName,
|
ProbMethod = entity.ProbMethodName,
|
||||||
Attributes = attributeResults,
|
Attributes = attributeResults,
|
||||||
Datapoints = datapointResults
|
Datapoints = datapointResults
|
||||||
};
|
};
|
||||||
@@ -86,67 +89,182 @@ public class EntityController : ControllerBase
|
|||||||
/// Index entities
|
/// Index entities
|
||||||
/// </summary>
|
/// </summary>
|
||||||
/// <remarks>
|
/// <remarks>
|
||||||
/// Behavior: Creates new entities, but overwrites existing entities that have the same name
|
/// Behavior: Updates the index using the provided entities. Creates new entities, overwrites existing entities with the same name, and deletes entities that are not part of the index anymore.
|
||||||
|
///
|
||||||
|
/// Can be executed in a single request or in multiple chunks using a (self-defined) session UUID string.
|
||||||
|
///
|
||||||
|
/// For session-based chunk uploads:
|
||||||
|
/// - Provide sessionId to accumulate entities across multiple requests
|
||||||
|
/// - Set sessionComplete=true on the final request to finalize and delete entities that are not in the accumulated list
|
||||||
|
/// - Without sessionId: Missing entities will be deleted from the searchdomain.
|
||||||
|
/// - Sessions expire after 60 minutes of inactivity (or as otherwise configured in the appsettings)
|
||||||
/// </remarks>
|
/// </remarks>
|
||||||
/// <param name="jsonEntities">Entities to index</param>
|
/// <param name="jsonEntities">Entities to index</param>
|
||||||
|
/// <param name="sessionId">Optional session ID for batch uploads across multiple requests</param>
|
||||||
|
/// <param name="sessionComplete">If true, finalizes the session and deletes entities not in the accumulated list</param>
|
||||||
[HttpPut("/Entities")]
|
[HttpPut("/Entities")]
|
||||||
public ActionResult<EntityIndexResult> Index([FromBody] List<JSONEntity>? jsonEntities)
|
public async Task<ActionResult<EntityIndexResult>> Index(
|
||||||
|
[FromBody] List<JSONEntity>? jsonEntities,
|
||||||
|
string? sessionId = null,
|
||||||
|
bool sessionComplete = false)
|
||||||
{
|
{
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
List<Entity>? entities = _searchdomainHelper.EntitiesFromJSON(
|
if (sessionId is null || string.IsNullOrWhiteSpace(sessionId))
|
||||||
|
{
|
||||||
|
sessionId = Guid.NewGuid().ToString(); // Create a short-lived session
|
||||||
|
sessionComplete = true; // If no sessionId was set, there is no trackable session. The pseudo-session ends here.
|
||||||
|
}
|
||||||
|
// Periodic cleanup of expired sessions
|
||||||
|
CleanupExpiredEntityIndexSessions();
|
||||||
|
EntityIndexSessionData session = GetOrCreateEntityIndexSession(sessionId);
|
||||||
|
|
||||||
|
if (jsonEntities is null && !sessionComplete)
|
||||||
|
{
|
||||||
|
return BadRequest(new EntityIndexResult() { Success = false, Message = "jsonEntities can only be null for a complete session" });
|
||||||
|
} else if (jsonEntities is null && sessionComplete)
|
||||||
|
{
|
||||||
|
await EntityIndexSessionDeleteUnindexedEntities(session);
|
||||||
|
return Ok(new EntityIndexResult() { Success = true });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Standard entity indexing (upsert behavior)
|
||||||
|
List<Entity>? entities = await _searchdomainHelper.EntitiesFromJSON(
|
||||||
_domainManager,
|
_domainManager,
|
||||||
_logger,
|
_logger,
|
||||||
JsonSerializer.Serialize(jsonEntities));
|
JsonSerializer.Serialize(jsonEntities));
|
||||||
if (entities is not null && jsonEntities is not null)
|
if (entities is not null && jsonEntities is not null)
|
||||||
{
|
{
|
||||||
List<string> invalidatedSearchdomains = [];
|
session.AccumulatedEntities.AddRange(entities);
|
||||||
foreach (var jsonEntity in jsonEntities)
|
|
||||||
|
if (sessionComplete)
|
||||||
{
|
{
|
||||||
string jsonEntityName = jsonEntity.Name;
|
await EntityIndexSessionDeleteUnindexedEntities(session);
|
||||||
string jsonEntitySearchdomainName = jsonEntity.Searchdomain;
|
|
||||||
if (entities.Select(x => x.name == jsonEntityName).Any()
|
|
||||||
&& !invalidatedSearchdomains.Contains(jsonEntitySearchdomainName))
|
|
||||||
{
|
|
||||||
invalidatedSearchdomains.Add(jsonEntitySearchdomainName);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return Ok(new EntityIndexResult() { Success = true });
|
return Ok(new EntityIndexResult() { Success = true });
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
_logger.LogError("Unable to deserialize an entity");
|
_logger.LogError("Unable to deserialize an entity");
|
||||||
|
ElmahCore.ElmahExtensions.RaiseError(new Exception("Unable to deserialize an entity"));
|
||||||
return Ok(new EntityIndexResult() { Success = false, Message = "Unable to deserialize an entity"});
|
return Ok(new EntityIndexResult() { Success = false, Message = "Unable to deserialize an entity"});
|
||||||
}
|
}
|
||||||
} catch (Exception ex)
|
} catch (Exception ex)
|
||||||
{
|
{
|
||||||
if (ex.InnerException is not null) ex = ex.InnerException;
|
if (ex.InnerException is not null) ex = ex.InnerException;
|
||||||
_logger.LogError("Unable to index the provided entities. {ex.Message} - {ex.StackTrace}", [ex.Message, ex.StackTrace]);
|
_logger.LogError("Unable to index the provided entities. {ex.Message} - {ex.StackTrace}", [ex.Message, ex.StackTrace]);
|
||||||
|
ElmahCore.ElmahExtensions.RaiseError(ex);
|
||||||
return Ok(new EntityIndexResult() { Success = false, Message = ex.Message });
|
return Ok(new EntityIndexResult() { Success = false, Message = ex.Message });
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private async Task EntityIndexSessionDeleteUnindexedEntities(EntityIndexSessionData session)
|
||||||
|
{
|
||||||
|
var entityGroupsBySearchdomain = session.AccumulatedEntities.GroupBy(e => e.Searchdomain);
|
||||||
|
|
||||||
|
foreach (var entityGroup in entityGroupsBySearchdomain)
|
||||||
|
{
|
||||||
|
string searchdomainName = entityGroup.Key;
|
||||||
|
var entityNamesInRequest = entityGroup.Select(e => e.Name).ToHashSet();
|
||||||
|
|
||||||
|
(Searchdomain? searchdomain_, int? httpStatusCode, string? message) =
|
||||||
|
SearchdomainHelper.TryGetSearchdomain(_domainManager, searchdomainName, _logger);
|
||||||
|
|
||||||
|
if (searchdomain_ is not null && httpStatusCode is null) // If getting searchdomain was successful
|
||||||
|
{
|
||||||
|
var entitiesToDelete = searchdomain_.EntityCache
|
||||||
|
.Where(kvp => !entityNamesInRequest.Contains(kvp.Value.Name))
|
||||||
|
.Select(kvp => kvp.Value)
|
||||||
|
.ToList();
|
||||||
|
|
||||||
|
foreach (var entity in entitiesToDelete)
|
||||||
|
{
|
||||||
|
searchdomain_.ReconciliateOrInvalidateCacheForDeletedEntity(entity);
|
||||||
|
await _databaseHelper.RemoveEntity(
|
||||||
|
[],
|
||||||
|
_domainManager.Helper,
|
||||||
|
entity.Name,
|
||||||
|
searchdomainName);
|
||||||
|
searchdomain_.EntityCache.TryRemove(entity.Name, out _);
|
||||||
|
_logger.LogInformation("Deleted entity {entityName} from {searchdomain}", entity.Name, searchdomainName);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
_logger.LogWarning("Unable to delete entities for searchdomain {searchdomain}", searchdomainName);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// Deletes an entity
|
/// Deletes an entity
|
||||||
/// </summary>
|
/// </summary>
|
||||||
/// <param name="searchdomain">Name of the searchdomain</param>
|
/// <param name="searchdomain">Name of the searchdomain</param>
|
||||||
/// <param name="entityName">Name of the entity</param>
|
/// <param name="entityName">Name of the entity</param>
|
||||||
[HttpDelete]
|
[HttpDelete]
|
||||||
public ActionResult<EntityDeleteResults> Delete(string searchdomain, string entityName)
|
public async Task<ActionResult<EntityDeleteResults>> Delete(string searchdomain, string entityName)
|
||||||
{
|
{
|
||||||
(Searchdomain? searchdomain_, int? httpStatusCode, string? message) = SearchdomainHelper.TryGetSearchdomain(_domainManager, searchdomain, _logger);
|
(Searchdomain? searchdomain_, int? httpStatusCode, string? message) = SearchdomainHelper.TryGetSearchdomain(_domainManager, searchdomain, _logger);
|
||||||
if (searchdomain_ is null || httpStatusCode is not null) return StatusCode(httpStatusCode ?? 500, new SearchdomainUpdateResults(){Success = false, Message = message});
|
if (searchdomain_ is null || httpStatusCode is not null) return StatusCode(httpStatusCode ?? 500, new SearchdomainUpdateResults(){Success = false, Message = message});
|
||||||
|
|
||||||
Entity? entity_ = SearchdomainHelper.CacheGetEntity(searchdomain_.entityCache, entityName);
|
Entity? entity_ = SearchdomainHelper.CacheGetEntity(searchdomain_.EntityCache, entityName);
|
||||||
if (entity_ is null)
|
if (entity_ is null)
|
||||||
{
|
{
|
||||||
_logger.LogError("Unable to delete the entity {entityName} in {searchdomain} - it was not found under the specified name", [entityName, searchdomain]);
|
_logger.LogError("Unable to delete the entity {entityName} in {searchdomain} - it was not found under the specified name", [entityName, searchdomain]);
|
||||||
|
ElmahCore.ElmahExtensions.RaiseError(
|
||||||
|
new Exception(
|
||||||
|
$"Unable to delete the entity {entityName} in {searchdomain} - it was not found under the specified name"
|
||||||
|
)
|
||||||
|
);
|
||||||
return Ok(new EntityDeleteResults() {Success = false, Message = "Entity not found"});
|
return Ok(new EntityDeleteResults() {Success = false, Message = "Entity not found"});
|
||||||
}
|
}
|
||||||
searchdomain_.ReconciliateOrInvalidateCacheForDeletedEntity(entity_);
|
searchdomain_.ReconciliateOrInvalidateCacheForDeletedEntity(entity_);
|
||||||
_databaseHelper.RemoveEntity([], _domainManager.helper, entityName, searchdomain);
|
await _databaseHelper.RemoveEntity([], _domainManager.Helper, entityName, searchdomain);
|
||||||
searchdomain_.entityCache.RemoveAll(entity => entity.name == entityName);
|
|
||||||
return Ok(new EntityDeleteResults() {Success = true});
|
bool success = searchdomain_.EntityCache.TryRemove(entityName, out Entity? _);
|
||||||
|
|
||||||
|
return Ok(new EntityDeleteResults() {Success = success});
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
private void CleanupExpiredEntityIndexSessions()
|
||||||
|
{
|
||||||
|
lock (_sessionLock)
|
||||||
|
{
|
||||||
|
var expiredSessions = _sessions
|
||||||
|
.Where(kvp => (DateTime.UtcNow - kvp.Value.LastInteractionAt).TotalMinutes > SessionTimeoutMinutes)
|
||||||
|
.Select(kvp => kvp.Key)
|
||||||
|
.ToList();
|
||||||
|
|
||||||
|
foreach (var sessionId in expiredSessions)
|
||||||
|
{
|
||||||
|
_sessions.Remove(sessionId);
|
||||||
|
_logger.LogWarning("Removed expired, non-closed session {sessionId}", sessionId);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private EntityIndexSessionData GetOrCreateEntityIndexSession(string sessionId)
|
||||||
|
{
|
||||||
|
lock (_sessionLock)
|
||||||
|
{
|
||||||
|
if (!_sessions.TryGetValue(sessionId, out var session))
|
||||||
|
{
|
||||||
|
session = new EntityIndexSessionData();
|
||||||
|
_sessions[sessionId] = session;
|
||||||
|
} else
|
||||||
|
{
|
||||||
|
session.LastInteractionAt = DateTime.UtcNow;
|
||||||
|
}
|
||||||
|
return session;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public class EntityIndexSessionData
|
||||||
|
{
|
||||||
|
public List<Entity> AccumulatedEntities { get; set; } = [];
|
||||||
|
public DateTime LastInteractionAt { get; set; } = DateTime.UtcNow;
|
||||||
|
}
|
||||||
@@ -35,11 +35,11 @@ public class HomeController : Controller
|
|||||||
|
|
||||||
[Authorize]
|
[Authorize]
|
||||||
[HttpGet("Searchdomains")]
|
[HttpGet("Searchdomains")]
|
||||||
public IActionResult Searchdomains()
|
public async Task<ActionResult> Searchdomains()
|
||||||
{
|
{
|
||||||
HomeIndexViewModel viewModel = new()
|
HomeIndexViewModel viewModel = new()
|
||||||
{
|
{
|
||||||
Searchdomains = _domainManager.ListSearchdomains()
|
Searchdomains = await _domainManager.ListSearchdomainsAsync()
|
||||||
};
|
};
|
||||||
return View(viewModel);
|
return View(viewModel);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ using Microsoft.AspNetCore.Http.HttpResults;
|
|||||||
using Microsoft.AspNetCore.Mvc;
|
using Microsoft.AspNetCore.Mvc;
|
||||||
using Server.Exceptions;
|
using Server.Exceptions;
|
||||||
using Server.Helper;
|
using Server.Helper;
|
||||||
|
using Shared;
|
||||||
using Shared.Models;
|
using Shared.Models;
|
||||||
|
|
||||||
namespace Server.Controllers;
|
namespace Server.Controllers;
|
||||||
@@ -28,12 +29,12 @@ public class SearchdomainController : ControllerBase
|
|||||||
/// Lists all searchdomains
|
/// Lists all searchdomains
|
||||||
/// </summary>
|
/// </summary>
|
||||||
[HttpGet("/Searchdomains")]
|
[HttpGet("/Searchdomains")]
|
||||||
public ActionResult<SearchdomainListResults> List()
|
public async Task<ActionResult<SearchdomainListResults>> List()
|
||||||
{
|
{
|
||||||
List<string> results;
|
List<string> results;
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
results = _domainManager.ListSearchdomains();
|
results = await _domainManager.ListSearchdomainsAsync();
|
||||||
}
|
}
|
||||||
catch (Exception)
|
catch (Exception)
|
||||||
{
|
{
|
||||||
@@ -50,11 +51,15 @@ public class SearchdomainController : ControllerBase
|
|||||||
/// <param name="searchdomain">Name of the searchdomain</param>
|
/// <param name="searchdomain">Name of the searchdomain</param>
|
||||||
/// <param name="settings">Optional initial settings</param>
|
/// <param name="settings">Optional initial settings</param>
|
||||||
[HttpPost]
|
[HttpPost]
|
||||||
public ActionResult<SearchdomainCreateResults> Create([Required]string searchdomain, [FromBody]SearchdomainSettings settings = new())
|
public async Task<ActionResult<SearchdomainCreateResults>> Create([Required]string searchdomain, [FromBody]SearchdomainSettings settings = new())
|
||||||
{
|
{
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
int id = _domainManager.CreateSearchdomain(searchdomain, settings);
|
if (settings.QueryCacheSize <= 0)
|
||||||
|
{
|
||||||
|
settings.QueryCacheSize = 1_000_000; // TODO get rid of this magic number
|
||||||
|
}
|
||||||
|
int id = await _domainManager.CreateSearchdomain(searchdomain, settings);
|
||||||
return Ok(new SearchdomainCreateResults(){Id = id, Success = true});
|
return Ok(new SearchdomainCreateResults(){Id = id, Success = true});
|
||||||
} catch (Exception)
|
} catch (Exception)
|
||||||
{
|
{
|
||||||
@@ -68,7 +73,7 @@ public class SearchdomainController : ControllerBase
|
|||||||
/// </summary>
|
/// </summary>
|
||||||
/// <param name="searchdomain">Name of the searchdomain</param>
|
/// <param name="searchdomain">Name of the searchdomain</param>
|
||||||
[HttpDelete]
|
[HttpDelete]
|
||||||
public ActionResult<SearchdomainDeleteResults> Delete([Required]string searchdomain)
|
public async Task<ActionResult<SearchdomainDeleteResults>> Delete([Required]string searchdomain)
|
||||||
{
|
{
|
||||||
bool success;
|
bool success;
|
||||||
int deletedEntries;
|
int deletedEntries;
|
||||||
@@ -76,7 +81,7 @@ public class SearchdomainController : ControllerBase
|
|||||||
try
|
try
|
||||||
{
|
{
|
||||||
success = true;
|
success = true;
|
||||||
deletedEntries = _domainManager.DeleteSearchdomain(searchdomain);
|
deletedEntries = await _domainManager.DeleteSearchdomain(searchdomain);
|
||||||
}
|
}
|
||||||
catch (SearchdomainNotFoundException ex)
|
catch (SearchdomainNotFoundException ex)
|
||||||
{
|
{
|
||||||
@@ -104,7 +109,7 @@ public class SearchdomainController : ControllerBase
|
|||||||
/// <param name="newName">Updated name of the searchdomain</param>
|
/// <param name="newName">Updated name of the searchdomain</param>
|
||||||
/// <param name="settings">Updated settings of searchdomain</param>
|
/// <param name="settings">Updated settings of searchdomain</param>
|
||||||
[HttpPut]
|
[HttpPut]
|
||||||
public ActionResult<SearchdomainUpdateResults> Update([Required]string searchdomain, string newName, [FromBody]SearchdomainSettings? settings)
|
public async Task<ActionResult<SearchdomainUpdateResults>> Update([Required]string searchdomain, string newName, [FromBody]SearchdomainSettings? settings)
|
||||||
{
|
{
|
||||||
(Searchdomain? searchdomain_, int? httpStatusCode, string? message) = SearchdomainHelper.TryGetSearchdomain(_domainManager, searchdomain, _logger);
|
(Searchdomain? searchdomain_, int? httpStatusCode, string? message) = SearchdomainHelper.TryGetSearchdomain(_domainManager, searchdomain, _logger);
|
||||||
if (searchdomain_ is null || httpStatusCode is not null) return StatusCode(httpStatusCode ?? 500, new SearchdomainUpdateResults(){Success = false, Message = message});
|
if (searchdomain_ is null || httpStatusCode is not null) return StatusCode(httpStatusCode ?? 500, new SearchdomainUpdateResults(){Success = false, Message = message});
|
||||||
@@ -113,18 +118,18 @@ public class SearchdomainController : ControllerBase
|
|||||||
Dictionary<string, dynamic> parameters = new()
|
Dictionary<string, dynamic> parameters = new()
|
||||||
{
|
{
|
||||||
{"name", newName},
|
{"name", newName},
|
||||||
{"id", searchdomain_.id}
|
{"id", searchdomain_.Id}
|
||||||
};
|
};
|
||||||
searchdomain_.helper.ExecuteSQLNonQuery("UPDATE searchdomain set name = @name WHERE id = @id", parameters);
|
await searchdomain_.Helper.ExecuteSQLNonQuery("UPDATE searchdomain set name = @name WHERE id = @id", parameters);
|
||||||
} else
|
} else
|
||||||
{
|
{
|
||||||
Dictionary<string, dynamic> parameters = new()
|
Dictionary<string, dynamic> parameters = new()
|
||||||
{
|
{
|
||||||
{"name", newName},
|
{"name", newName},
|
||||||
{"settings", settings},
|
{"settings", settings},
|
||||||
{"id", searchdomain_.id}
|
{"id", searchdomain_.Id}
|
||||||
};
|
};
|
||||||
searchdomain_.helper.ExecuteSQLNonQuery("UPDATE searchdomain set name = @name, settings = @settings WHERE id = @id", parameters);
|
await searchdomain_.Helper.ExecuteSQLNonQuery("UPDATE searchdomain set name = @name, settings = @settings WHERE id = @id", parameters);
|
||||||
}
|
}
|
||||||
return Ok(new SearchdomainUpdateResults(){Success = true});
|
return Ok(new SearchdomainUpdateResults(){Success = true});
|
||||||
}
|
}
|
||||||
@@ -134,13 +139,13 @@ public class SearchdomainController : ControllerBase
|
|||||||
/// </summary>
|
/// </summary>
|
||||||
/// <param name="searchdomain">Name of the searchdomain</param>
|
/// <param name="searchdomain">Name of the searchdomain</param>
|
||||||
[HttpGet("Queries")]
|
[HttpGet("Queries")]
|
||||||
public ActionResult<SearchdomainSearchesResults> GetQueries([Required]string searchdomain)
|
public ActionResult<SearchdomainQueriesResults> GetQueries([Required]string searchdomain)
|
||||||
{
|
{
|
||||||
(Searchdomain? searchdomain_, int? httpStatusCode, string? message) = SearchdomainHelper.TryGetSearchdomain(_domainManager, searchdomain, _logger);
|
(Searchdomain? searchdomain_, int? httpStatusCode, string? message) = SearchdomainHelper.TryGetSearchdomain(_domainManager, searchdomain, _logger);
|
||||||
if (searchdomain_ is null || httpStatusCode is not null) return StatusCode(httpStatusCode ?? 500, new SearchdomainUpdateResults(){Success = false, Message = message});
|
if (searchdomain_ is null || httpStatusCode is not null) return StatusCode(httpStatusCode ?? 500, new SearchdomainUpdateResults(){Success = false, Message = message});
|
||||||
Dictionary<string, DateTimedSearchResult> searchCache = searchdomain_.searchCache;
|
Dictionary<string, DateTimedSearchResult> searchCache = searchdomain_.QueryCache.AsDictionary();
|
||||||
|
|
||||||
return Ok(new SearchdomainSearchesResults() { Searches = searchCache, Success = true });
|
return Ok(new SearchdomainQueriesResults() { Searches = searchCache, Success = true });
|
||||||
}
|
}
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
@@ -160,7 +165,7 @@ public class SearchdomainController : ControllerBase
|
|||||||
{
|
{
|
||||||
Name = r.Item2,
|
Name = r.Item2,
|
||||||
Value = r.Item1,
|
Value = r.Item1,
|
||||||
Attributes = returnAttributes ? (searchdomain_.entityCache.FirstOrDefault(x => x.name == r.Item2)?.attributes ?? null) : null
|
Attributes = returnAttributes ? (searchdomain_.EntityCache[r.Item2]?.Attributes ?? null) : null
|
||||||
})];
|
})];
|
||||||
return Ok(new EntityQueryResults(){Results = queryResults, Success = true });
|
return Ok(new EntityQueryResults(){Results = queryResults, Success = true });
|
||||||
}
|
}
|
||||||
@@ -175,7 +180,7 @@ public class SearchdomainController : ControllerBase
|
|||||||
{
|
{
|
||||||
(Searchdomain? searchdomain_, int? httpStatusCode, string? message) = SearchdomainHelper.TryGetSearchdomain(_domainManager, searchdomain, _logger);
|
(Searchdomain? searchdomain_, int? httpStatusCode, string? message) = SearchdomainHelper.TryGetSearchdomain(_domainManager, searchdomain, _logger);
|
||||||
if (searchdomain_ is null || httpStatusCode is not null) return StatusCode(httpStatusCode ?? 500, new SearchdomainUpdateResults(){Success = false, Message = message});
|
if (searchdomain_ is null || httpStatusCode is not null) return StatusCode(httpStatusCode ?? 500, new SearchdomainUpdateResults(){Success = false, Message = message});
|
||||||
Dictionary<string, DateTimedSearchResult> searchCache = searchdomain_.searchCache;
|
EnumerableLruCache<string, DateTimedSearchResult> searchCache = searchdomain_.QueryCache;
|
||||||
bool containsKey = searchCache.ContainsKey(query);
|
bool containsKey = searchCache.ContainsKey(query);
|
||||||
if (containsKey)
|
if (containsKey)
|
||||||
{
|
{
|
||||||
@@ -196,7 +201,7 @@ public class SearchdomainController : ControllerBase
|
|||||||
{
|
{
|
||||||
(Searchdomain? searchdomain_, int? httpStatusCode, string? message) = SearchdomainHelper.TryGetSearchdomain(_domainManager, searchdomain, _logger);
|
(Searchdomain? searchdomain_, int? httpStatusCode, string? message) = SearchdomainHelper.TryGetSearchdomain(_domainManager, searchdomain, _logger);
|
||||||
if (searchdomain_ is null || httpStatusCode is not null) return StatusCode(httpStatusCode ?? 500, new SearchdomainUpdateResults(){Success = false, Message = message});
|
if (searchdomain_ is null || httpStatusCode is not null) return StatusCode(httpStatusCode ?? 500, new SearchdomainUpdateResults(){Success = false, Message = message});
|
||||||
Dictionary<string, DateTimedSearchResult> searchCache = searchdomain_.searchCache;
|
EnumerableLruCache<string, DateTimedSearchResult> searchCache = searchdomain_.QueryCache;
|
||||||
bool containsKey = searchCache.ContainsKey(query);
|
bool containsKey = searchCache.ContainsKey(query);
|
||||||
if (containsKey)
|
if (containsKey)
|
||||||
{
|
{
|
||||||
@@ -217,7 +222,7 @@ public class SearchdomainController : ControllerBase
|
|||||||
{
|
{
|
||||||
(Searchdomain? searchdomain_, int? httpStatusCode, string? message) = SearchdomainHelper.TryGetSearchdomain(_domainManager, searchdomain, _logger);
|
(Searchdomain? searchdomain_, int? httpStatusCode, string? message) = SearchdomainHelper.TryGetSearchdomain(_domainManager, searchdomain, _logger);
|
||||||
if (searchdomain_ is null || httpStatusCode is not null) return StatusCode(httpStatusCode ?? 500, new SearchdomainUpdateResults(){Success = false, Message = message});
|
if (searchdomain_ is null || httpStatusCode is not null) return StatusCode(httpStatusCode ?? 500, new SearchdomainUpdateResults(){Success = false, Message = message});
|
||||||
SearchdomainSettings settings = searchdomain_.settings;
|
SearchdomainSettings settings = searchdomain_.Settings;
|
||||||
return Ok(new SearchdomainSettingsResults() { Settings = settings, Success = true });
|
return Ok(new SearchdomainSettingsResults() { Settings = settings, Success = true });
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -225,18 +230,20 @@ public class SearchdomainController : ControllerBase
|
|||||||
/// Update the settings of a searchdomain
|
/// Update the settings of a searchdomain
|
||||||
/// </summary>
|
/// </summary>
|
||||||
/// <param name="searchdomain">Name of the searchdomain</param>
|
/// <param name="searchdomain">Name of the searchdomain</param>
|
||||||
|
/// <param name="request">Settings to apply to the searchdomain</param>
|
||||||
[HttpPut("Settings")]
|
[HttpPut("Settings")]
|
||||||
public ActionResult<SearchdomainUpdateResults> UpdateSettings([Required]string searchdomain, [Required][FromBody] SearchdomainSettings request)
|
public async Task<ActionResult<SearchdomainUpdateResults>> UpdateSettings([Required]string searchdomain, [Required][FromBody] SearchdomainSettings request)
|
||||||
{
|
{
|
||||||
(Searchdomain? searchdomain_, int? httpStatusCode, string? message) = SearchdomainHelper.TryGetSearchdomain(_domainManager, searchdomain, _logger);
|
(Searchdomain? searchdomain_, int? httpStatusCode, string? message) = SearchdomainHelper.TryGetSearchdomain(_domainManager, searchdomain, _logger);
|
||||||
if (searchdomain_ is null || httpStatusCode is not null) return StatusCode(httpStatusCode ?? 500, new SearchdomainUpdateResults(){Success = false, Message = message});
|
if (searchdomain_ is null || httpStatusCode is not null) return StatusCode(httpStatusCode ?? 500, new SearchdomainUpdateResults(){Success = false, Message = message});
|
||||||
Dictionary<string, dynamic> parameters = new()
|
Dictionary<string, dynamic> parameters = new()
|
||||||
{
|
{
|
||||||
{"settings", JsonSerializer.Serialize(request)},
|
{"settings", JsonSerializer.Serialize(request)},
|
||||||
{"id", searchdomain_.id}
|
{"id", searchdomain_.Id}
|
||||||
};
|
};
|
||||||
searchdomain_.helper.ExecuteSQLNonQuery("UPDATE searchdomain set settings = @settings WHERE id = @id", parameters);
|
await searchdomain_.Helper.ExecuteSQLNonQuery("UPDATE searchdomain set settings = @settings WHERE id = @id", parameters);
|
||||||
searchdomain_.settings = request;
|
searchdomain_.Settings = request;
|
||||||
|
searchdomain_.QueryCache.Capacity = request.QueryCacheSize;
|
||||||
return Ok(new SearchdomainUpdateResults(){Success = true});
|
return Ok(new SearchdomainUpdateResults(){Success = true});
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -245,19 +252,17 @@ public class SearchdomainController : ControllerBase
|
|||||||
/// </summary>
|
/// </summary>
|
||||||
/// <param name="searchdomain">Name of the searchdomain</param>
|
/// <param name="searchdomain">Name of the searchdomain</param>
|
||||||
[HttpGet("QueryCache/Size")]
|
[HttpGet("QueryCache/Size")]
|
||||||
public ActionResult<SearchdomainSearchCacheSizeResults> GetSearchCacheSize([Required]string searchdomain)
|
public ActionResult<SearchdomainQueryCacheSizeResults> GetQueryCacheSize([Required]string searchdomain)
|
||||||
{
|
{
|
||||||
|
if (!SearchdomainHelper.IsSearchdomainLoaded(_domainManager, searchdomain))
|
||||||
|
{
|
||||||
|
return Ok(new SearchdomainQueryCacheSizeResults() { SizeBytes = 0, ElementCount = 0, ElementMaxCount = 0, Success = true });
|
||||||
|
}
|
||||||
(Searchdomain? searchdomain_, int? httpStatusCode, string? message) = SearchdomainHelper.TryGetSearchdomain(_domainManager, searchdomain, _logger);
|
(Searchdomain? searchdomain_, int? httpStatusCode, string? message) = SearchdomainHelper.TryGetSearchdomain(_domainManager, searchdomain, _logger);
|
||||||
if (searchdomain_ is null || httpStatusCode is not null) return StatusCode(httpStatusCode ?? 500, new SearchdomainUpdateResults(){Success = false, Message = message});
|
if (searchdomain_ is null || httpStatusCode is not null) return StatusCode(httpStatusCode ?? 500, new SearchdomainUpdateResults(){Success = false, Message = message});
|
||||||
Dictionary<string, DateTimedSearchResult> searchCache = searchdomain_.searchCache;
|
int elementCount = searchdomain_.QueryCache.Count;
|
||||||
long sizeInBytes = 0;
|
int ElementMaxCount = searchdomain_.Settings.QueryCacheSize;
|
||||||
foreach (var entry in searchCache)
|
return Ok(new SearchdomainQueryCacheSizeResults() { SizeBytes = searchdomain_.GetSearchCacheSize(), ElementCount = elementCount, ElementMaxCount = ElementMaxCount, Success = true });
|
||||||
{
|
|
||||||
sizeInBytes += sizeof(int); // string length prefix
|
|
||||||
sizeInBytes += entry.Key.Length * sizeof(char); // string characters
|
|
||||||
sizeInBytes += entry.Value.EstimateSize();
|
|
||||||
}
|
|
||||||
return Ok(new SearchdomainSearchCacheSizeResults() { QueryCacheSizeBytes = sizeInBytes, Success = true });
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
@@ -282,7 +287,7 @@ public class SearchdomainController : ControllerBase
|
|||||||
{
|
{
|
||||||
(Searchdomain? searchdomain_, int? httpStatusCode, string? message) = SearchdomainHelper.TryGetSearchdomain(_domainManager, searchdomain, _logger);
|
(Searchdomain? searchdomain_, int? httpStatusCode, string? message) = SearchdomainHelper.TryGetSearchdomain(_domainManager, searchdomain, _logger);
|
||||||
if (searchdomain_ is null || httpStatusCode is not null) return StatusCode(httpStatusCode ?? 500, new SearchdomainUpdateResults(){Success = false, Message = message});
|
if (searchdomain_ is null || httpStatusCode is not null) return StatusCode(httpStatusCode ?? 500, new SearchdomainUpdateResults(){Success = false, Message = message});
|
||||||
long sizeInBytes = DatabaseHelper.GetSearchdomainDatabaseSize(searchdomain_.helper, searchdomain);
|
long EmbeddingCacheUtilization = DatabaseHelper.GetSearchdomainDatabaseSize(searchdomain_.Helper, searchdomain);
|
||||||
return Ok(new SearchdomainGetDatabaseSizeResult() { SearchdomainDatabaseSizeBytes = sizeInBytes, Success = true });
|
return Ok(new SearchdomainGetDatabaseSizeResult() { SearchdomainDatabaseSizeBytes = EmbeddingCacheUtilization, Success = true });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,12 +1,11 @@
|
|||||||
namespace Server.Controllers;
|
namespace Server.Controllers;
|
||||||
|
|
||||||
using System.Reflection;
|
|
||||||
using System.Text.Json;
|
|
||||||
using AdaptiveExpressions;
|
|
||||||
using ElmahCore;
|
using ElmahCore;
|
||||||
using Microsoft.AspNetCore.Mvc;
|
using Microsoft.AspNetCore.Mvc;
|
||||||
using Server.Exceptions;
|
using Microsoft.Extensions.Options;
|
||||||
using Server.Helper;
|
using Server.Helper;
|
||||||
|
using Server.Models;
|
||||||
|
using Shared;
|
||||||
using Shared.Models;
|
using Shared.Models;
|
||||||
|
|
||||||
[ApiController]
|
[ApiController]
|
||||||
@@ -17,13 +16,15 @@ public class ServerController : ControllerBase
|
|||||||
private readonly IConfiguration _config;
|
private readonly IConfiguration _config;
|
||||||
private AIProvider _aIProvider;
|
private AIProvider _aIProvider;
|
||||||
private readonly SearchdomainManager _searchdomainManager;
|
private readonly SearchdomainManager _searchdomainManager;
|
||||||
|
private readonly IOptions<EmbeddingSearchOptions> _options;
|
||||||
|
|
||||||
public ServerController(ILogger<ServerController> logger, IConfiguration config, AIProvider aIProvider, SearchdomainManager searchdomainManager)
|
public ServerController(ILogger<ServerController> logger, IConfiguration config, AIProvider aIProvider, SearchdomainManager searchdomainManager, IOptions<EmbeddingSearchOptions> options)
|
||||||
{
|
{
|
||||||
_logger = logger;
|
_logger = logger;
|
||||||
_config = config;
|
_config = config;
|
||||||
_aIProvider = aIProvider;
|
_aIProvider = aIProvider;
|
||||||
_searchdomainManager = searchdomainManager;
|
_searchdomainManager = searchdomainManager;
|
||||||
|
_options = options;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
@@ -47,31 +48,74 @@ public class ServerController : ControllerBase
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// Gets the total memory size of the embedding cache
|
/// Gets numeric info regarding the searchdomains
|
||||||
/// </summary>
|
/// </summary>
|
||||||
[HttpGet("EmbeddingCache/Size")]
|
[HttpGet("Stats")]
|
||||||
public ActionResult<ServerGetEmbeddingCacheSizeResult> GetEmbeddingCacheSize()
|
public async Task<ActionResult<ServerGetStatsResult>> Stats()
|
||||||
{
|
{
|
||||||
long size = 0;
|
try
|
||||||
long elementCount = 0;
|
|
||||||
long embeddingsCount = 0;
|
|
||||||
LRUCache<string, Dictionary<string, float[]>> embeddingCache = _searchdomainManager.embeddingCache;
|
|
||||||
var cacheListField = embeddingCache.GetType()
|
|
||||||
.GetField("_cacheList", BindingFlags.Instance | BindingFlags.NonPublic) ?? throw new InvalidOperationException("_cacheList field not found"); // TODO Remove this unsafe reflection atrocity
|
|
||||||
LinkedList<string> cacheListOriginal = (LinkedList<string>)cacheListField.GetValue(embeddingCache)!;
|
|
||||||
LinkedList<string> cacheList = new(cacheListOriginal);
|
|
||||||
|
|
||||||
foreach (string key in cacheList)
|
|
||||||
{
|
{
|
||||||
if (!embeddingCache.TryGet(key, out var entry))
|
long size = 0;
|
||||||
continue;
|
long elementCount = 0;
|
||||||
|
long embeddingsCount = 0;
|
||||||
|
EnumerableLruCache<string, Dictionary<string, float[]>> embeddingCache = _searchdomainManager.EmbeddingCache;
|
||||||
|
|
||||||
// estimate size
|
foreach (KeyValuePair<string, Dictionary<string, float[]>> kv in embeddingCache)
|
||||||
size += EstimateEntrySize(key, entry);
|
{
|
||||||
elementCount++;
|
string key = kv.Key;
|
||||||
embeddingsCount += entry.Keys.Count;
|
Dictionary<string, float[]> entry = kv.Value;
|
||||||
|
size += EstimateEntrySize(key, entry);
|
||||||
|
elementCount++;
|
||||||
|
embeddingsCount += entry.Keys.Count;
|
||||||
|
}
|
||||||
|
var sqlHelper = _searchdomainManager.Helper;
|
||||||
|
var databaseTotalSize = DatabaseHelper.GetTotalDatabaseSize(sqlHelper);
|
||||||
|
Task<long> entityCountTask = DatabaseHelper.CountEntities(sqlHelper);
|
||||||
|
long queryCacheUtilization = 0;
|
||||||
|
long queryCacheElementCount = 0;
|
||||||
|
long queryCacheMaxElementCountAll = 0;
|
||||||
|
long queryCacheMaxElementCountLoadedSearchdomainsOnly = 0;
|
||||||
|
foreach (string searchdomain in await _searchdomainManager.ListSearchdomainsAsync())
|
||||||
|
{
|
||||||
|
if (SearchdomainHelper.IsSearchdomainLoaded(_searchdomainManager, searchdomain))
|
||||||
|
{
|
||||||
|
(Searchdomain? searchdomain_, int? httpStatusCode, string? message) = SearchdomainHelper.TryGetSearchdomain(_searchdomainManager, searchdomain, _logger);
|
||||||
|
if (searchdomain_ is null || httpStatusCode is not null) return StatusCode(httpStatusCode ?? 500, new ServerGetStatsResult(){Success = false, Message = message});
|
||||||
|
queryCacheUtilization += searchdomain_.GetSearchCacheSize();
|
||||||
|
queryCacheElementCount += searchdomain_.QueryCache.Count;
|
||||||
|
queryCacheMaxElementCountAll += searchdomain_.QueryCache.Capacity;
|
||||||
|
queryCacheMaxElementCountLoadedSearchdomainsOnly += searchdomain_.QueryCache.Capacity;
|
||||||
|
} else
|
||||||
|
{
|
||||||
|
var searchdomainSettings = DatabaseHelper.GetSearchdomainSettings(sqlHelper, searchdomain);
|
||||||
|
queryCacheMaxElementCountAll += searchdomainSettings.QueryCacheSize;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
long entityCount = await entityCountTask;
|
||||||
|
GC.Collect();
|
||||||
|
GC.WaitForPendingFinalizers();
|
||||||
|
GC.Collect();
|
||||||
|
long ramTotalSize = GC.GetTotalMemory(false);
|
||||||
|
|
||||||
|
return new ServerGetStatsResult() {
|
||||||
|
Success = true,
|
||||||
|
EntityCount = entityCount,
|
||||||
|
QueryCacheUtilization = queryCacheUtilization,
|
||||||
|
QueryCacheElementCount = queryCacheElementCount,
|
||||||
|
QueryCacheMaxElementCountAll = queryCacheMaxElementCountAll,
|
||||||
|
QueryCacheMaxElementCountLoadedSearchdomainsOnly = queryCacheMaxElementCountLoadedSearchdomainsOnly,
|
||||||
|
EmbeddingCacheUtilization = size,
|
||||||
|
EmbeddingCacheMaxElementCount = _searchdomainManager.EmbeddingCacheMaxCount,
|
||||||
|
EmbeddingCacheElementCount = elementCount,
|
||||||
|
EmbeddingsCount = embeddingsCount,
|
||||||
|
DatabaseTotalSize = databaseTotalSize,
|
||||||
|
RamTotalSize = ramTotalSize
|
||||||
|
};
|
||||||
|
} catch (Exception ex)
|
||||||
|
{
|
||||||
|
ElmahExtensions.RaiseError(ex);
|
||||||
|
return StatusCode(500, new ServerGetStatsResult(){Success = false, Message = ex.Message});
|
||||||
}
|
}
|
||||||
return new ServerGetEmbeddingCacheSizeResult() { Success = true, SizeInBytes = size, MaxElementCount = _searchdomainManager.EmbeddingCacheMaxCount, ElementCount = elementCount, EmbeddingsCount = embeddingsCount};
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static long EstimateEntrySize(string key, Dictionary<string, float[]> value)
|
private static long EstimateEntrySize(string key, Dictionary<string, float[]> value)
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@@ -1,61 +1,141 @@
|
|||||||
using AdaptiveExpressions;
|
using System.Collections.Concurrent;
|
||||||
using OllamaSharp;
|
using Shared;
|
||||||
using OllamaSharp.Models;
|
using Shared.Models;
|
||||||
|
|
||||||
namespace Server;
|
namespace Server;
|
||||||
|
|
||||||
public class Datapoint
|
public class Datapoint
|
||||||
{
|
{
|
||||||
public string name;
|
public string Name;
|
||||||
public ProbMethod probMethod;
|
public ProbMethod ProbMethod;
|
||||||
public SimilarityMethod similarityMethod;
|
public SimilarityMethod SimilarityMethod;
|
||||||
public List<(string, float[])> embeddings;
|
public List<(string, float[])> Embeddings;
|
||||||
public string hash;
|
public string Hash;
|
||||||
|
public int Id;
|
||||||
|
|
||||||
public Datapoint(string name, ProbMethod probMethod, SimilarityMethod similarityMethod, string hash, List<(string, float[])> embeddings)
|
public Datapoint(string name, ProbMethodEnum probMethod, SimilarityMethodEnum similarityMethod, string hash, List<(string, float[])> embeddings, int id)
|
||||||
{
|
{
|
||||||
this.name = name;
|
Name = name;
|
||||||
this.probMethod = probMethod;
|
ProbMethod = new ProbMethod(probMethod);
|
||||||
this.similarityMethod = similarityMethod;
|
SimilarityMethod = new SimilarityMethod(similarityMethod);
|
||||||
this.hash = hash;
|
Hash = hash;
|
||||||
this.embeddings = embeddings;
|
Embeddings = embeddings;
|
||||||
|
Id = id;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Datapoint(string name, ProbMethod probMethod, SimilarityMethod similarityMethod, string hash, List<(string, float[])> embeddings, int id)
|
||||||
|
{
|
||||||
|
Name = name;
|
||||||
|
ProbMethod = probMethod;
|
||||||
|
SimilarityMethod = similarityMethod;
|
||||||
|
Hash = hash;
|
||||||
|
Embeddings = embeddings;
|
||||||
|
Id = id;
|
||||||
}
|
}
|
||||||
|
|
||||||
public float CalcProbability(List<(string, float)> probabilities)
|
public float CalcProbability(List<(string, float)> probabilities)
|
||||||
{
|
{
|
||||||
return probMethod.method(probabilities);
|
return ProbMethod.Method(probabilities);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static Dictionary<string, float[]> GenerateEmbeddings(string content, List<string> models, AIProvider aIProvider)
|
public static Dictionary<string, float[]> GetEmbeddings(string content, ConcurrentBag<string> models, AIProvider aIProvider, EnumerableLruCache<string, Dictionary<string, float[]>> embeddingCache)
|
||||||
{
|
{
|
||||||
return GenerateEmbeddings(content, models, aIProvider, new());
|
Dictionary<string, float[]> embeddings = [];
|
||||||
}
|
bool embeddingCacheHasContent = embeddingCache.TryGetValue(content, out var embeddingCacheForContent);
|
||||||
|
if (!embeddingCacheHasContent || embeddingCacheForContent is null)
|
||||||
public static Dictionary<string, float[]> GenerateEmbeddings(string content, List<string> models, AIProvider aIProvider, LRUCache<string, Dictionary<string, float[]>> embeddingCache)
|
{
|
||||||
{
|
foreach (string model in models)
|
||||||
Dictionary<string, float[]> retVal = [];
|
{
|
||||||
|
embeddings[model] = GenerateEmbeddings(content, model, aIProvider, embeddingCache);
|
||||||
|
}
|
||||||
|
return embeddings;
|
||||||
|
}
|
||||||
foreach (string model in models)
|
foreach (string model in models)
|
||||||
{
|
{
|
||||||
bool embeddingCacheHasModel = embeddingCache.TryGet(model, out var embeddingCacheForModel);
|
bool embeddingCacheHasModel = embeddingCacheForContent.TryGetValue(model, out float[]? embeddingCacheForModel);
|
||||||
if (embeddingCacheHasModel && embeddingCacheForModel.ContainsKey(content))
|
if (embeddingCacheHasModel && embeddingCacheForModel is not null)
|
||||||
{
|
{
|
||||||
retVal[model] = embeddingCacheForModel[content];
|
embeddings[model] = embeddingCacheForModel;
|
||||||
continue;
|
} else
|
||||||
}
|
|
||||||
var response = aIProvider.GenerateEmbeddings(model, [content]);
|
|
||||||
if (response is not null)
|
|
||||||
{
|
{
|
||||||
retVal[model] = response;
|
embeddings[model] = GenerateEmbeddings(content, model, aIProvider, embeddingCache);
|
||||||
if (!embeddingCacheHasModel)
|
|
||||||
{
|
|
||||||
embeddingCacheForModel = [];
|
|
||||||
}
|
|
||||||
if (!embeddingCacheForModel.ContainsKey(content))
|
|
||||||
{
|
|
||||||
embeddingCacheForModel[content] = response;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return retVal;
|
return embeddings;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static Dictionary<string, Dictionary<string, float[]>> GetEmbeddings(string[] content, List<string> models, AIProvider aIProvider, EnumerableLruCache<string, Dictionary<string, float[]>> embeddingCache)
|
||||||
|
{
|
||||||
|
Dictionary<string, Dictionary<string, float[]>> embeddings = [];
|
||||||
|
foreach (string model in models)
|
||||||
|
{
|
||||||
|
List<string> toBeGenerated = [];
|
||||||
|
embeddings[model] = [];
|
||||||
|
foreach (string value in content)
|
||||||
|
{
|
||||||
|
bool generateThisEntry = true;
|
||||||
|
bool embeddingCacheHasContent = embeddingCache.TryGetValue(value, out var embeddingCacheForContent);
|
||||||
|
if (embeddingCacheHasContent && embeddingCacheForContent is not null)
|
||||||
|
{
|
||||||
|
bool embeddingCacheHasModel = embeddingCacheForContent.TryGetValue(model, out float[]? embedding);
|
||||||
|
if (embeddingCacheHasModel && embedding is not null)
|
||||||
|
{
|
||||||
|
embeddings[model][value] = embedding;
|
||||||
|
generateThisEntry = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (generateThisEntry)
|
||||||
|
{
|
||||||
|
if (!toBeGenerated.Contains(value))
|
||||||
|
{
|
||||||
|
toBeGenerated.Add(value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (toBeGenerated.Count == 0)
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
IEnumerable<float[]> generatedEmbeddings = GenerateEmbeddings([.. toBeGenerated], model, aIProvider, embeddingCache);
|
||||||
|
if (generatedEmbeddings.Count() != toBeGenerated.Count)
|
||||||
|
{
|
||||||
|
throw new Exception("Requested embeddings count and generated embeddings count mismatched!");
|
||||||
|
}
|
||||||
|
for (int i = 0; i < toBeGenerated.Count; i++)
|
||||||
|
{
|
||||||
|
embeddings[model][toBeGenerated.ElementAt(i)] = generatedEmbeddings.ElementAt(i);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return embeddings;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static IEnumerable<float[]> GenerateEmbeddings(string[] content, string model, AIProvider aIProvider, EnumerableLruCache<string, Dictionary<string, float[]>> embeddingCache)
|
||||||
|
{
|
||||||
|
IEnumerable<float[]> embeddings = aIProvider.GenerateEmbeddings(model, content);
|
||||||
|
if (embeddings.Count() != content.Length)
|
||||||
|
{
|
||||||
|
throw new Exception("Resulting embeddings count does not match up with request count");
|
||||||
|
}
|
||||||
|
for (int i = 0; i < content.Length; i++)
|
||||||
|
{
|
||||||
|
if (!embeddingCache.ContainsKey(content[i]))
|
||||||
|
{
|
||||||
|
embeddingCache[content[i]] = [];
|
||||||
|
}
|
||||||
|
embeddingCache[content[i]][model] = embeddings.ElementAt(i);
|
||||||
|
}
|
||||||
|
return embeddings;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
public static float[] GenerateEmbeddings(string content, string model, AIProvider aIProvider, EnumerableLruCache<string, Dictionary<string, float[]>> embeddingCache)
|
||||||
|
{
|
||||||
|
float[] embeddings = aIProvider.GenerateEmbeddings(model, content);
|
||||||
|
if (!embeddingCache.ContainsKey(content))
|
||||||
|
{
|
||||||
|
embeddingCache[content] = [];
|
||||||
|
}
|
||||||
|
embeddingCache[content][model] = embeddings;
|
||||||
|
return embeddings;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -1,10 +1,10 @@
|
|||||||
FROM mcr.microsoft.com/dotnet/sdk:8.0 AS build
|
FROM mcr.microsoft.com/dotnet/sdk:10.0 AS build
|
||||||
WORKDIR /build
|
WORKDIR /build
|
||||||
COPY . .
|
COPY . .
|
||||||
RUN dotnet restore ./Server.csproj
|
RUN dotnet restore Server/Server.csproj
|
||||||
RUN dotnet publish ./Server.csproj -c Release -o /output
|
RUN dotnet publish Server/Server.csproj -c Release -o /output
|
||||||
|
|
||||||
FROM mcr.microsoft.com/dotnet/aspnet:8.0 AS final
|
FROM mcr.microsoft.com/dotnet/aspnet:10.0 AS final
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
COPY --from=build /output .
|
COPY --from=build /output .
|
||||||
ENV ASPNETCORE_ENVIRONMENT Docker
|
ENV ASPNETCORE_ENVIRONMENT Docker
|
||||||
|
|||||||
@@ -1,11 +1,14 @@
|
|||||||
|
using System.Collections.Concurrent;
|
||||||
|
|
||||||
namespace Server;
|
namespace Server;
|
||||||
|
|
||||||
public class Entity(Dictionary<string, string> attributes, Probmethods.probMethodDelegate probMethod, string probMethodName, List<Datapoint> datapoints, string name)
|
public class Entity(Dictionary<string, string> attributes, Probmethods.ProbMethodDelegate probMethod, string probMethodName, ConcurrentBag<Datapoint> datapoints, string name, string searchdomain)
|
||||||
{
|
{
|
||||||
public Dictionary<string, string> attributes = attributes;
|
public Dictionary<string, string> Attributes = attributes;
|
||||||
public Probmethods.probMethodDelegate probMethod = probMethod;
|
public Probmethods.ProbMethodDelegate ProbMethod = probMethod;
|
||||||
public string probMethodName = probMethodName;
|
public string ProbMethodName = probMethodName;
|
||||||
public List<Datapoint> datapoints = datapoints;
|
public ConcurrentBag<Datapoint> Datapoints = datapoints;
|
||||||
public int id;
|
public int Id;
|
||||||
public string name = name;
|
public string Name = name;
|
||||||
|
public string Searchdomain = searchdomain;
|
||||||
}
|
}
|
||||||
@@ -12,33 +12,33 @@ public class DatabaseHealthCheck : IHealthCheck
|
|||||||
_searchdomainManager = searchdomainManager;
|
_searchdomainManager = searchdomainManager;
|
||||||
_logger = logger;
|
_logger = logger;
|
||||||
}
|
}
|
||||||
public Task<HealthCheckResult> CheckHealthAsync(
|
public async Task<HealthCheckResult> CheckHealthAsync(
|
||||||
HealthCheckContext context, CancellationToken cancellationToken = default)
|
HealthCheckContext context, CancellationToken cancellationToken = default)
|
||||||
{
|
{
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
DatabaseMigrations.DatabaseGetVersion(_searchdomainManager.helper);
|
DatabaseMigrations.DatabaseGetVersion(_searchdomainManager.Helper);
|
||||||
}
|
}
|
||||||
catch (Exception ex)
|
catch (Exception ex)
|
||||||
{
|
{
|
||||||
_logger.LogCritical("DatabaseHealthCheck - Exception occurred when retrieving and parsing database version: {ex}", ex.Message);
|
_logger.LogCritical("DatabaseHealthCheck - Exception occurred when retrieving and parsing database version: {ex}", ex.Message);
|
||||||
return Task.FromResult(
|
return await Task.FromResult(
|
||||||
HealthCheckResult.Unhealthy());
|
HealthCheckResult.Unhealthy());
|
||||||
}
|
}
|
||||||
|
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
_searchdomainManager.helper.ExecuteSQLNonQuery("INSERT INTO settings (name, value) VALUES ('test', 'x');", []);
|
await _searchdomainManager.Helper.ExecuteSQLNonQuery("INSERT INTO settings (name, value) VALUES ('test', 'x');", []);
|
||||||
_searchdomainManager.helper.ExecuteSQLNonQuery("DELETE FROM settings WHERE name = 'test';", []);
|
await _searchdomainManager.Helper.ExecuteSQLNonQuery("DELETE FROM settings WHERE name = 'test';", []);
|
||||||
}
|
}
|
||||||
catch (Exception ex)
|
catch (Exception ex)
|
||||||
{
|
{
|
||||||
_logger.LogCritical("DatabaseHealthCheck - Exception occurred when executing INSERT/DELETE query: {ex}", ex.Message);
|
_logger.LogCritical("DatabaseHealthCheck - Exception occurred when executing INSERT/DELETE query: {ex}", ex.Message);
|
||||||
return Task.FromResult(
|
return await Task.FromResult(
|
||||||
HealthCheckResult.Unhealthy());
|
HealthCheckResult.Unhealthy());
|
||||||
}
|
}
|
||||||
|
|
||||||
return Task.FromResult(
|
return await Task.FromResult(
|
||||||
HealthCheckResult.Healthy());
|
HealthCheckResult.Healthy());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
241
src/Server/Helper/CacheHelper.cs
Normal file
241
src/Server/Helper/CacheHelper.cs
Normal file
@@ -0,0 +1,241 @@
|
|||||||
|
using System.Configuration;
|
||||||
|
using Microsoft.Data.Sqlite;
|
||||||
|
using Microsoft.Extensions.Options;
|
||||||
|
using OllamaSharp.Models;
|
||||||
|
using Server.Models;
|
||||||
|
using Shared;
|
||||||
|
|
||||||
|
namespace Server.Helper;
|
||||||
|
|
||||||
|
public static class CacheHelper
|
||||||
|
{
|
||||||
|
public static EnumerableLruCache<string, Dictionary<string, float[]>> GetEmbeddingStore(EmbeddingSearchOptions options)
|
||||||
|
{
|
||||||
|
SQLiteHelper helper = new(options);
|
||||||
|
EnumerableLruCache<string, Dictionary<string, float[]>> embeddingCache = new((int)(options.Cache.StoreTopN ?? options.Cache.CacheTopN));
|
||||||
|
helper.ExecuteQuery(
|
||||||
|
"SELECT cache_key, model_key, embedding, idx FROM embedding_cache ORDER BY idx ASC", [], r =>
|
||||||
|
{
|
||||||
|
int embeddingOrdinal = r.GetOrdinal("embedding");
|
||||||
|
int length = (int)r.GetBytes(embeddingOrdinal, 0, null, 0, 0);
|
||||||
|
byte[] buffer = new byte[length];
|
||||||
|
r.GetBytes(embeddingOrdinal, 0, buffer, 0, length);
|
||||||
|
var cache_key = r.GetString(r.GetOrdinal("cache_key"));
|
||||||
|
var model_key = r.GetString(r.GetOrdinal("model_key"));
|
||||||
|
var embedding = SearchdomainHelper.FloatArrayFromBytes(buffer);
|
||||||
|
var index = r.GetInt32(r.GetOrdinal("idx"));
|
||||||
|
if (cache_key is null || model_key is null || embedding is null)
|
||||||
|
{
|
||||||
|
throw new Exception("Unable to get the embedding store due to a returned element being null");
|
||||||
|
}
|
||||||
|
if (!embeddingCache.TryGetValue(cache_key, out Dictionary<string, float[]>? keyElement) || keyElement is null)
|
||||||
|
{
|
||||||
|
keyElement = [];
|
||||||
|
embeddingCache[cache_key] = keyElement;
|
||||||
|
}
|
||||||
|
keyElement[model_key] = embedding;
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
);
|
||||||
|
embeddingCache.Capacity = (int)options.Cache.CacheTopN;
|
||||||
|
return embeddingCache;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static async Task UpdateEmbeddingStore(EnumerableLruCache<string, Dictionary<string, float[]>> embeddingCache, EmbeddingSearchOptions options)
|
||||||
|
{
|
||||||
|
if (options.Cache.StoreTopN is not null)
|
||||||
|
{
|
||||||
|
embeddingCache.Capacity = (int)options.Cache.StoreTopN;
|
||||||
|
}
|
||||||
|
SQLiteHelper helper = new(options);
|
||||||
|
EnumerableLruCache<string, Dictionary<string, float[]>> embeddingStore = GetEmbeddingStore(options);
|
||||||
|
|
||||||
|
|
||||||
|
var embeddingCacheMappings = GetCacheMappings(embeddingCache);
|
||||||
|
var embeddingCacheIndexMap = embeddingCacheMappings.positionToEntry;
|
||||||
|
var embeddingCacheObjectMap = embeddingCacheMappings.entryToPosition;
|
||||||
|
|
||||||
|
var embeddingStoreMappings = GetCacheMappings(embeddingStore);
|
||||||
|
var embeddingStoreIndexMap = embeddingStoreMappings.positionToEntry;
|
||||||
|
var embeddingStoreObjectMap = embeddingStoreMappings.entryToPosition;
|
||||||
|
|
||||||
|
List<int> deletedEntries = [];
|
||||||
|
|
||||||
|
foreach (KeyValuePair<int, KeyValuePair<string, Dictionary<string, float[]>>> kv in embeddingStoreIndexMap)
|
||||||
|
{
|
||||||
|
int storeEntryIndex = kv.Key;
|
||||||
|
string storeEntryString = kv.Value.Key;
|
||||||
|
bool cacheEntryExists = embeddingCacheObjectMap.TryGetValue(storeEntryString, out int cacheEntryIndex);
|
||||||
|
|
||||||
|
if (!cacheEntryExists) // Deleted
|
||||||
|
{
|
||||||
|
deletedEntries.Add(storeEntryIndex);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
await RemoveEntriesFromStore(helper, deletedEntries);
|
||||||
|
|
||||||
|
|
||||||
|
List<(int Index, KeyValuePair<string, Dictionary<string, float[]>> Entry)> createdEntries = [];
|
||||||
|
List<(int Index, int NewIndex)> changedEntries = [];
|
||||||
|
List<(int Index, string Model, string Key, float[] Embedding)> AddedModels = [];
|
||||||
|
List<(int Index, string Model)> RemovedModels = [];
|
||||||
|
foreach (KeyValuePair<int, KeyValuePair<string, Dictionary<string, float[]>>> kv in embeddingCacheIndexMap)
|
||||||
|
{
|
||||||
|
int cacheEntryIndex = kv.Key;
|
||||||
|
string cacheEntryString = kv.Value.Key;
|
||||||
|
|
||||||
|
bool storeEntryExists = embeddingStoreObjectMap.TryGetValue(cacheEntryString, out int storeEntryIndex);
|
||||||
|
|
||||||
|
if (!storeEntryExists) // Created
|
||||||
|
{
|
||||||
|
createdEntries.Add((
|
||||||
|
Index: cacheEntryIndex,
|
||||||
|
Entry: kv.Value
|
||||||
|
));
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (cacheEntryIndex != storeEntryIndex) // Changed
|
||||||
|
{
|
||||||
|
changedEntries.Add((
|
||||||
|
Index: cacheEntryIndex,
|
||||||
|
NewIndex: storeEntryIndex
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for new/removed models
|
||||||
|
var storeModels = embeddingStoreIndexMap[storeEntryIndex].Value;
|
||||||
|
var cacheModels = kv.Value.Value;
|
||||||
|
// New models
|
||||||
|
foreach (var model in storeModels.Keys.Except(cacheModels.Keys))
|
||||||
|
{
|
||||||
|
RemovedModels.Add((
|
||||||
|
Index: cacheEntryIndex,
|
||||||
|
Model: model
|
||||||
|
));
|
||||||
|
}
|
||||||
|
// Removed models
|
||||||
|
foreach (var model in cacheModels.Keys.Except(storeModels.Keys))
|
||||||
|
{
|
||||||
|
AddedModels.Add((
|
||||||
|
Index: cacheEntryIndex,
|
||||||
|
Model: model,
|
||||||
|
Key: cacheEntryString,
|
||||||
|
Embedding: cacheModels[model]
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var taskSet = new List<Task>
|
||||||
|
{
|
||||||
|
CreateEntriesInStore(helper, createdEntries),
|
||||||
|
UpdateEntryIndicesInStore(helper, changedEntries),
|
||||||
|
AddModelsToIndices(helper, AddedModels),
|
||||||
|
RemoveModelsFromIndices(helper, RemovedModels)
|
||||||
|
};
|
||||||
|
|
||||||
|
await Task.WhenAll(taskSet);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static async Task CreateEntriesInStore(
|
||||||
|
SQLiteHelper helper,
|
||||||
|
List<(int Index, KeyValuePair<string, Dictionary<string, float[]>> Entry)> createdEntries)
|
||||||
|
{
|
||||||
|
helper.BulkExecuteNonQuery(
|
||||||
|
"INSERT INTO embedding_cache (cache_key, model_key, embedding, idx) VALUES (@cache_key, @model_key, @embedding, @index)",
|
||||||
|
createdEntries.SelectMany(element => {
|
||||||
|
return element.Entry.Value.Select(model => new object[]
|
||||||
|
{
|
||||||
|
new SqliteParameter("@cache_key", element.Entry.Key),
|
||||||
|
new SqliteParameter("@model_key", model.Key),
|
||||||
|
new SqliteParameter("@embedding", SearchdomainHelper.BytesFromFloatArray(model.Value)),
|
||||||
|
new SqliteParameter("@index", element.Index)
|
||||||
|
});
|
||||||
|
})
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static async Task UpdateEntryIndicesInStore(
|
||||||
|
SQLiteHelper helper,
|
||||||
|
List<(int Index, int NewIndex)> changedEntries)
|
||||||
|
{
|
||||||
|
helper.BulkExecuteNonQuery(
|
||||||
|
"UPDATE embedding_cache SET idx = @newIndex WHERE idx = @index",
|
||||||
|
changedEntries.Select(element => new object[]
|
||||||
|
{
|
||||||
|
new SqliteParameter("@index", element.Index),
|
||||||
|
new SqliteParameter("@newIndex", -element.NewIndex) // The "-" prevents in-place update collisions
|
||||||
|
})
|
||||||
|
);
|
||||||
|
helper.BulkExecuteNonQuery(
|
||||||
|
"UPDATE embedding_cache SET idx = @newIndex WHERE idx = @index",
|
||||||
|
changedEntries.Select(element => new object[]
|
||||||
|
{
|
||||||
|
new SqliteParameter("@index", -element.NewIndex),
|
||||||
|
new SqliteParameter("@newIndex", element.NewIndex) // Flip the negative prefix
|
||||||
|
})
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static async Task RemoveEntriesFromStore(
|
||||||
|
SQLiteHelper helper,
|
||||||
|
List<int> deletedEntries)
|
||||||
|
{
|
||||||
|
helper.BulkExecuteNonQuery(
|
||||||
|
"DELETE FROM embedding_cache WHERE idx = @index",
|
||||||
|
deletedEntries.Select(index => new object[]
|
||||||
|
{
|
||||||
|
new SqliteParameter("@index", index)
|
||||||
|
})
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static async Task AddModelsToIndices(
|
||||||
|
SQLiteHelper helper,
|
||||||
|
List<(int Index, string Model, string Key, float[] Embedding)> addedModels)
|
||||||
|
{
|
||||||
|
helper.BulkExecuteNonQuery(
|
||||||
|
"INSERT INTO embedding_cache (cache_key, model_key, embedding, idx) VALUES (@cache_key, @model_key, @embedding, @index)",
|
||||||
|
addedModels.Select(element => new object[]
|
||||||
|
{
|
||||||
|
new SqliteParameter("@cache_key", element.Key),
|
||||||
|
new SqliteParameter("@model_key", element.Model),
|
||||||
|
new SqliteParameter("@embedding", SearchdomainHelper.BytesFromFloatArray(element.Embedding)),
|
||||||
|
new SqliteParameter("@index", element.Index)
|
||||||
|
})
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static async Task RemoveModelsFromIndices(
|
||||||
|
SQLiteHelper helper,
|
||||||
|
List<(int Index, string Model)> removedModels)
|
||||||
|
{
|
||||||
|
helper.BulkExecuteNonQuery(
|
||||||
|
"DELETE FROM embedding_cache WHERE idx = @index AND model_key = @model",
|
||||||
|
removedModels.Select(element => new object[]
|
||||||
|
{
|
||||||
|
new SqliteParameter("@index", element.Index),
|
||||||
|
new SqliteParameter("@model", element.Model)
|
||||||
|
})
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
private static (Dictionary<int, KeyValuePair<string, Dictionary<string, float[]>>> positionToEntry,
|
||||||
|
Dictionary<string, int> entryToPosition)
|
||||||
|
GetCacheMappings(EnumerableLruCache<string, Dictionary<string, float[]>> embeddingCache)
|
||||||
|
{
|
||||||
|
var positionToEntry = new Dictionary<int, KeyValuePair<string, Dictionary<string, float[]>>>();
|
||||||
|
var entryToPosition = new Dictionary<string, int>();
|
||||||
|
|
||||||
|
int position = 0;
|
||||||
|
|
||||||
|
foreach (var entry in embeddingCache)
|
||||||
|
{
|
||||||
|
positionToEntry[position] = entry;
|
||||||
|
entryToPosition[entry.Key] = position;
|
||||||
|
position++;
|
||||||
|
}
|
||||||
|
|
||||||
|
return (positionToEntry, entryToPosition);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,6 +1,9 @@
|
|||||||
using System.Data.Common;
|
using System.Data.Common;
|
||||||
using System.Text;
|
using System.Text;
|
||||||
|
using System.Text.Json;
|
||||||
|
using MySql.Data.MySqlClient;
|
||||||
using Server.Exceptions;
|
using Server.Exceptions;
|
||||||
|
using Server.Models;
|
||||||
using Shared.Models;
|
using Shared.Models;
|
||||||
|
|
||||||
namespace Server.Helper;
|
namespace Server.Helper;
|
||||||
@@ -9,11 +12,21 @@ public class DatabaseHelper(ILogger<DatabaseHelper> logger)
|
|||||||
{
|
{
|
||||||
private readonly ILogger<DatabaseHelper> _logger = logger;
|
private readonly ILogger<DatabaseHelper> _logger = logger;
|
||||||
|
|
||||||
public static void DatabaseInsertEmbeddingBulk(SQLHelper helper, int id_datapoint, List<(string model, byte[] embedding)> data)
|
public static SQLHelper GetSQLHelper(EmbeddingSearchOptions embeddingSearchOptions)
|
||||||
|
{
|
||||||
|
string connectionString = embeddingSearchOptions.ConnectionStrings.SQL;
|
||||||
|
MySqlConnection connection = new(connectionString);
|
||||||
|
connection.Open();
|
||||||
|
return new SQLHelper(connection, connectionString);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static async Task DatabaseInsertEmbeddingBulk(SQLHelper helper, int id_datapoint, List<(string model, byte[] embedding)> data, int id_entity, int id_searchdomain)
|
||||||
{
|
{
|
||||||
Dictionary<string, object> parameters = [];
|
Dictionary<string, object> parameters = [];
|
||||||
parameters["id_datapoint"] = id_datapoint;
|
parameters["id_datapoint"] = id_datapoint;
|
||||||
var query = new StringBuilder("INSERT INTO embedding (id_datapoint, model, embedding) VALUES ");
|
parameters["id_entity"] = id_entity;
|
||||||
|
parameters["id_searchdomain"] = id_searchdomain;
|
||||||
|
var query = new StringBuilder("INSERT INTO embedding (id_datapoint, model, embedding, id_embedding, id_searchdomain) VALUES ");
|
||||||
foreach (var (model, embedding) in data)
|
foreach (var (model, embedding) in data)
|
||||||
{
|
{
|
||||||
string modelParam = $"model_{Guid.NewGuid()}".Replace("-", "");
|
string modelParam = $"model_{Guid.NewGuid()}".Replace("-", "");
|
||||||
@@ -21,24 +34,39 @@ public class DatabaseHelper(ILogger<DatabaseHelper> logger)
|
|||||||
parameters[modelParam] = model;
|
parameters[modelParam] = model;
|
||||||
parameters[embeddingParam] = embedding;
|
parameters[embeddingParam] = embedding;
|
||||||
|
|
||||||
query.Append($"(@id_datapoint, @{modelParam}, @{embeddingParam}), ");
|
query.Append($"(@id_datapoint, @{modelParam}, @{embeddingParam}, @id_entity), ");
|
||||||
}
|
}
|
||||||
|
|
||||||
query.Length -= 2; // remove trailing comma
|
query.Length -= 2; // remove trailing comma
|
||||||
helper.ExecuteSQLNonQuery(query.ToString(), parameters);
|
await helper.ExecuteSQLNonQuery(query.ToString(), parameters);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static int DatabaseInsertSearchdomain(SQLHelper helper, string name, SearchdomainSettings settings = new())
|
public static async Task<int> DatabaseInsertEmbeddingBulk(SQLHelper helper, List<(int id_datapoint, string model, byte[] embedding)> data, int id_entity, int id_searchdomain)
|
||||||
|
{
|
||||||
|
return await helper.BulkExecuteNonQuery(
|
||||||
|
"INSERT INTO embedding (id_datapoint, model, embedding, id_entity, id_searchdomain) VALUES (@id_datapoint, @model, @embedding, @id_entity, @id_searchdomain);",
|
||||||
|
data.Select(element => new object[] {
|
||||||
|
new MySqlParameter("@model", element.model),
|
||||||
|
new MySqlParameter("@embedding", element.embedding),
|
||||||
|
new MySqlParameter("@id_datapoint", element.id_datapoint),
|
||||||
|
new MySqlParameter("@id_entity", id_entity),
|
||||||
|
new MySqlParameter("@id_searchdomain", id_searchdomain)
|
||||||
|
})
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
public static async Task<int> DatabaseInsertSearchdomain(SQLHelper helper, string name, SearchdomainSettings settings = new())
|
||||||
{
|
{
|
||||||
Dictionary<string, dynamic> parameters = new()
|
Dictionary<string, dynamic> parameters = new()
|
||||||
{
|
{
|
||||||
{ "name", name },
|
{ "name", name },
|
||||||
{ "settings", settings}
|
{ "settings", settings}
|
||||||
};
|
};
|
||||||
return helper.ExecuteSQLCommandGetInsertedID("INSERT INTO searchdomain (name, settings) VALUES (@name, @settings)", parameters);
|
return await helper.ExecuteSQLCommandGetInsertedID("INSERT INTO searchdomain (name, settings) VALUES (@name, @settings)", parameters);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static int DatabaseInsertEntity(SQLHelper helper, string name, ProbMethodEnum probmethod, int id_searchdomain)
|
public static async Task<int> DatabaseInsertEntity(SQLHelper helper, string name, ProbMethodEnum probmethod, int id_searchdomain)
|
||||||
{
|
{
|
||||||
Dictionary<string, dynamic> parameters = new()
|
Dictionary<string, dynamic> parameters = new()
|
||||||
{
|
{
|
||||||
@@ -46,21 +74,59 @@ public class DatabaseHelper(ILogger<DatabaseHelper> logger)
|
|||||||
{ "probmethod", probmethod.ToString() },
|
{ "probmethod", probmethod.ToString() },
|
||||||
{ "id_searchdomain", id_searchdomain }
|
{ "id_searchdomain", id_searchdomain }
|
||||||
};
|
};
|
||||||
return helper.ExecuteSQLCommandGetInsertedID("INSERT INTO entity (name, probmethod, id_searchdomain) VALUES (@name, @probmethod, @id_searchdomain)", parameters);
|
return await helper.ExecuteSQLCommandGetInsertedID("INSERT INTO entity (name, probmethod, id_searchdomain) VALUES (@name, @probmethod, @id_searchdomain);", parameters);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static int DatabaseInsertAttribute(SQLHelper helper, string attribute, string value, int id_entity)
|
public static async Task<int> DatabaseInsertAttributes(SQLHelper helper, List<(string attribute, string value, int id_entity)> values) //string[] attribute, string value, int id_entity)
|
||||||
{
|
{
|
||||||
Dictionary<string, dynamic> parameters = new()
|
return await helper.BulkExecuteNonQuery(
|
||||||
{
|
"INSERT INTO attribute (attribute, value, id_entity) VALUES (@attribute, @value, @id_entity);",
|
||||||
{ "attribute", attribute },
|
values.Select(element => new object[] {
|
||||||
{ "value", value },
|
new MySqlParameter("@attribute", element.attribute),
|
||||||
{ "id_entity", id_entity }
|
new MySqlParameter("@value", element.value),
|
||||||
};
|
new MySqlParameter("@id_entity", element.id_entity)
|
||||||
return helper.ExecuteSQLCommandGetInsertedID("INSERT INTO attribute (attribute, value, id_entity) VALUES (@attribute, @value, @id_entity)", parameters);
|
})
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static int DatabaseInsertDatapoint(SQLHelper helper, string name, ProbMethodEnum probmethod_embedding, SimilarityMethodEnum similarityMethod, string hash, int id_entity)
|
public static async Task<int> DatabaseUpdateAttributes(SQLHelper helper, List<(string attribute, string value, int id_entity)> values)
|
||||||
|
{
|
||||||
|
return await helper.BulkExecuteNonQuery(
|
||||||
|
"UPDATE attribute SET value=@value WHERE id_entity=@id_entity AND attribute=@attribute",
|
||||||
|
values.Select(element => new object[] {
|
||||||
|
new MySqlParameter("@attribute", element.attribute),
|
||||||
|
new MySqlParameter("@value", element.value),
|
||||||
|
new MySqlParameter("@id_entity", element.id_entity)
|
||||||
|
})
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static async Task<int> DatabaseDeleteAttributes(SQLHelper helper, List<(string attribute, int id_entity)> values)
|
||||||
|
{
|
||||||
|
return await helper.BulkExecuteNonQuery(
|
||||||
|
"DELETE FROM attribute WHERE id_entity=@id_entity AND attribute=@attribute",
|
||||||
|
values.Select(element => new object[] {
|
||||||
|
new MySqlParameter("@attribute", element.attribute),
|
||||||
|
new MySqlParameter("@id_entity", element.id_entity)
|
||||||
|
})
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static async Task<int> DatabaseInsertDatapoints(SQLHelper helper, List<(string name, ProbMethodEnum probmethod_embedding, SimilarityMethodEnum similarityMethod, string hash)> values, int id_entity)
|
||||||
|
{
|
||||||
|
return await helper.BulkExecuteNonQuery(
|
||||||
|
"INSERT INTO datapoint (name, probmethod_embedding, similaritymethod, hash, id_entity) VALUES (@name, @probmethod_embedding, @similaritymethod, @hash, @id_entity);",
|
||||||
|
values.Select(element => new object[] {
|
||||||
|
new MySqlParameter("@name", element.name),
|
||||||
|
new MySqlParameter("@probmethod_embedding", element.probmethod_embedding),
|
||||||
|
new MySqlParameter("@similaritymethod", element.similarityMethod),
|
||||||
|
new MySqlParameter("@hash", element.hash),
|
||||||
|
new MySqlParameter("@id_entity", id_entity)
|
||||||
|
})
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static async Task<int> DatabaseInsertDatapoint(SQLHelper helper, string name, ProbMethodEnum probmethod_embedding, SimilarityMethodEnum similarityMethod, string hash, int id_entity)
|
||||||
{
|
{
|
||||||
Dictionary<string, dynamic> parameters = new()
|
Dictionary<string, dynamic> parameters = new()
|
||||||
{
|
{
|
||||||
@@ -70,111 +136,155 @@ public class DatabaseHelper(ILogger<DatabaseHelper> logger)
|
|||||||
{ "hash", hash },
|
{ "hash", hash },
|
||||||
{ "id_entity", id_entity }
|
{ "id_entity", id_entity }
|
||||||
};
|
};
|
||||||
return helper.ExecuteSQLCommandGetInsertedID("INSERT INTO datapoint (name, probmethod_embedding, similaritymethod, hash, id_entity) VALUES (@name, @probmethod_embedding, @similaritymethod, @hash, @id_entity)", parameters);
|
return await helper.ExecuteSQLCommandGetInsertedID("INSERT INTO datapoint (name, probmethod_embedding, similaritymethod, hash, id_entity) VALUES (@name, @probmethod_embedding, @similaritymethod, @hash, @id_entity)", parameters);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static int DatabaseInsertEmbedding(SQLHelper helper, int id_datapoint, string model, byte[] embedding)
|
public static async Task<(int datapoints, int embeddings)> DatabaseDeleteEmbeddingsAndDatapoints(SQLHelper helper, List<string> values, int id_entity)
|
||||||
|
{
|
||||||
|
int embeddings = await helper.BulkExecuteNonQuery(
|
||||||
|
"DELETE e FROM embedding e WHERE id_entity = @entityId",
|
||||||
|
values.Select(element => new object[] {
|
||||||
|
new MySqlParameter("@datapointName", element),
|
||||||
|
new MySqlParameter("@entityId", id_entity)
|
||||||
|
})
|
||||||
|
);
|
||||||
|
int datapoints = await helper.BulkExecuteNonQuery(
|
||||||
|
"DELETE FROM datapoint WHERE name=@datapointName AND id_entity=@entityId",
|
||||||
|
values.Select(element => new object[] {
|
||||||
|
new MySqlParameter("@datapointName", element),
|
||||||
|
new MySqlParameter("@entityId", id_entity)
|
||||||
|
})
|
||||||
|
);
|
||||||
|
return (datapoints: datapoints, embeddings: embeddings);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static async Task<int> DatabaseUpdateDatapoint(SQLHelper helper, List<(string name, string probmethod_embedding, string similarityMethod)> values, int id_entity)
|
||||||
|
{
|
||||||
|
return await helper.BulkExecuteNonQuery(
|
||||||
|
"UPDATE datapoint SET probmethod_embedding=@probmethod, similaritymethod=@similaritymethod WHERE id_entity=@entityId AND name=@datapointName",
|
||||||
|
values.Select(element => new object[] {
|
||||||
|
new MySqlParameter("@probmethod", element.probmethod_embedding),
|
||||||
|
new MySqlParameter("@similaritymethod", element.similarityMethod),
|
||||||
|
new MySqlParameter("@entityId", id_entity),
|
||||||
|
new MySqlParameter("@datapointName", element.name)
|
||||||
|
})
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static async Task<int> DatabaseInsertEmbedding(SQLHelper helper, int id_datapoint, string model, byte[] embedding, int id_entity, int id_searchdomain)
|
||||||
{
|
{
|
||||||
Dictionary<string, dynamic> parameters = new()
|
Dictionary<string, dynamic> parameters = new()
|
||||||
{
|
{
|
||||||
{ "id_datapoint", id_datapoint },
|
{ "id_datapoint", id_datapoint },
|
||||||
{ "model", model },
|
{ "model", model },
|
||||||
{ "embedding", embedding }
|
{ "embedding", embedding },
|
||||||
|
{ "id_entity", id_entity },
|
||||||
|
{ "id_searchdomain", id_searchdomain }
|
||||||
};
|
};
|
||||||
return helper.ExecuteSQLCommandGetInsertedID("INSERT INTO embedding (id_datapoint, model, embedding) VALUES (@id_datapoint, @model, @embedding)", parameters);
|
return await helper.ExecuteSQLCommandGetInsertedID("INSERT INTO embedding (id_datapoint, model, embedding, id_entity, id_searchdomain) VALUES (@id_datapoint, @model, @embedding, @id_entity, @id_searchdomain)", parameters);
|
||||||
}
|
}
|
||||||
|
|
||||||
public int GetSearchdomainID(SQLHelper helper, string searchdomain)
|
public async Task<int> GetSearchdomainID(SQLHelper helper, string searchdomain)
|
||||||
{
|
{
|
||||||
Dictionary<string, dynamic> parameters = new()
|
Dictionary<string, object?> parameters = new()
|
||||||
{
|
{
|
||||||
{ "searchdomain", searchdomain}
|
{ "searchdomain", searchdomain}
|
||||||
};
|
};
|
||||||
lock (helper.connection)
|
return (await helper.ExecuteQueryAsync("SELECT id FROM searchdomain WHERE name = @searchdomain", parameters, x => x.GetInt32(0))).First();
|
||||||
{
|
|
||||||
DbDataReader reader = helper.ExecuteSQLCommand("SELECT id FROM searchdomain WHERE name = @searchdomain", parameters);
|
|
||||||
bool success = reader.Read();
|
|
||||||
int result = success ? reader.GetInt32(0) : 0;
|
|
||||||
reader.Close();
|
|
||||||
if (success)
|
|
||||||
{
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
_logger.LogError("Unable to retrieve searchdomain ID for {searchdomain}", [searchdomain]);
|
|
||||||
throw new SearchdomainNotFoundException(searchdomain);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void RemoveEntity(List<Entity> entityCache, SQLHelper helper, string name, string searchdomain)
|
public async Task RemoveEntity(List<Entity> entityCache, SQLHelper helper, string name, string searchdomain)
|
||||||
{
|
{
|
||||||
Dictionary<string, dynamic> parameters = new()
|
Dictionary<string, dynamic> parameters = new()
|
||||||
{
|
{
|
||||||
{ "name", name },
|
{ "name", name },
|
||||||
{ "searchdomain", GetSearchdomainID(helper, searchdomain)}
|
{ "searchdomain", await GetSearchdomainID(helper, searchdomain)}
|
||||||
};
|
};
|
||||||
|
|
||||||
helper.ExecuteSQLNonQuery("DELETE embedding.* FROM embedding JOIN datapoint dp ON id_datapoint = dp.id JOIN entity ON id_entity = entity.id WHERE entity.name = @name AND entity.id_searchdomain = @searchdomain", parameters);
|
await helper.ExecuteSQLNonQuery("DELETE embedding.* FROM embedding JOIN entity ON id_entity = entity.id WHERE entity.name = @name AND entity.id_searchdomain = @searchdomain", parameters);
|
||||||
helper.ExecuteSQLNonQuery("DELETE datapoint.* FROM datapoint JOIN entity ON id_entity = entity.id WHERE entity.name = @name AND entity.id_searchdomain = @searchdomain", parameters);
|
await helper.ExecuteSQLNonQuery("DELETE datapoint.* FROM datapoint JOIN entity ON id_entity = entity.id WHERE entity.name = @name AND entity.id_searchdomain = @searchdomain", parameters);
|
||||||
helper.ExecuteSQLNonQuery("DELETE attribute.* FROM attribute JOIN entity ON id_entity = entity.id WHERE entity.name = @name AND entity.id_searchdomain = @searchdomain", parameters);
|
await helper.ExecuteSQLNonQuery("DELETE attribute.* FROM attribute JOIN entity ON id_entity = entity.id WHERE entity.name = @name AND entity.id_searchdomain = @searchdomain", parameters);
|
||||||
helper.ExecuteSQLNonQuery("DELETE FROM entity WHERE name = @name AND entity.id_searchdomain = @searchdomain", parameters);
|
await helper.ExecuteSQLNonQuery("DELETE FROM entity WHERE name = @name AND entity.id_searchdomain = @searchdomain", parameters);
|
||||||
entityCache.RemoveAll(entity => entity.name == name);
|
entityCache.RemoveAll(entity => entity.Name == name);
|
||||||
}
|
}
|
||||||
|
|
||||||
public int RemoveAllEntities(SQLHelper helper, string searchdomain)
|
public async Task<int> RemoveAllEntities(SQLHelper helper, string searchdomain)
|
||||||
{
|
{
|
||||||
Dictionary<string, dynamic> parameters = new()
|
Dictionary<string, dynamic> parameters = new()
|
||||||
{
|
{
|
||||||
{ "searchdomain", GetSearchdomainID(helper, searchdomain)}
|
{ "searchdomain", await GetSearchdomainID(helper, searchdomain)}
|
||||||
};
|
};
|
||||||
|
int count;
|
||||||
helper.ExecuteSQLNonQuery("DELETE embedding.* FROM embedding JOIN datapoint dp ON id_datapoint = dp.id JOIN entity ON id_entity = entity.id WHERE entity.id_searchdomain = @searchdomain", parameters);
|
do
|
||||||
helper.ExecuteSQLNonQuery("DELETE datapoint.* FROM datapoint JOIN entity ON id_entity = entity.id WHERE entity.id_searchdomain = @searchdomain", parameters);
|
{
|
||||||
helper.ExecuteSQLNonQuery("DELETE attribute.* FROM attribute JOIN entity ON id_entity = entity.id WHERE entity.id_searchdomain = @searchdomain", parameters);
|
count = await helper.ExecuteSQLNonQuery("DELETE FROM embedding WHERE id_searchdomain = @searchdomain LIMIT 10000", parameters);
|
||||||
return helper.ExecuteSQLNonQuery("DELETE FROM entity WHERE entity.id_searchdomain = @searchdomain", parameters);
|
} while (count == 10000);
|
||||||
|
do
|
||||||
|
{
|
||||||
|
count = await helper.ExecuteSQLNonQuery("DELETE FROM datapoint WHERE id_entity IN (SELECT id FROM entity WHERE id_searchdomain = @searchdomain) LIMIT 10000", parameters);
|
||||||
|
} while (count == 10000);
|
||||||
|
do
|
||||||
|
{
|
||||||
|
count = await helper.ExecuteSQLNonQuery("DELETE FROM attribute WHERE id_entity IN (SELECT id FROM entity WHERE id_searchdomain = @searchdomain) LIMIT 10000", parameters);
|
||||||
|
} while (count == 10000);
|
||||||
|
int total = 0;
|
||||||
|
do
|
||||||
|
{
|
||||||
|
count = await helper.ExecuteSQLNonQuery("DELETE FROM entity WHERE id_searchdomain = @searchdomain LIMIT 10000", parameters);
|
||||||
|
total += count;
|
||||||
|
} while (count == 10000);
|
||||||
|
return total;
|
||||||
}
|
}
|
||||||
|
|
||||||
public bool HasEntity(SQLHelper helper, string name, string searchdomain)
|
public async Task<bool> HasEntity(SQLHelper helper, string name, string searchdomain)
|
||||||
{
|
{
|
||||||
Dictionary<string, dynamic> parameters = new()
|
Dictionary<string, dynamic> parameters = new()
|
||||||
{
|
{
|
||||||
{ "name", name },
|
{ "name", name },
|
||||||
{ "searchdomain", GetSearchdomainID(helper, searchdomain)}
|
{ "searchdomain", await GetSearchdomainID(helper, searchdomain)}
|
||||||
};
|
};
|
||||||
lock (helper.connection)
|
lock (helper.Connection)
|
||||||
{
|
{
|
||||||
DbDataReader reader = helper.ExecuteSQLCommand("SELECT COUNT(*) FROM entity WHERE name = @name AND id_searchdomain = @searchdomain", parameters);
|
DbDataReader reader = helper.ExecuteSQLCommand("SELECT COUNT(*) FROM entity WHERE name = @name AND id_searchdomain = @searchdomain", parameters);
|
||||||
bool success = reader.Read();
|
try
|
||||||
bool result = success && reader.GetInt32(0) > 0;
|
|
||||||
reader.Close();
|
|
||||||
if (success)
|
|
||||||
{
|
{
|
||||||
return result;
|
bool success = reader.Read();
|
||||||
}
|
bool result = success && reader.GetInt32(0) > 0;
|
||||||
else
|
if (success)
|
||||||
|
{
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
_logger.LogError("Unable to determine whether an entity named {name} exists for {searchdomain}", [name, searchdomain]);
|
||||||
|
throw new Exception($"Unable to determine whether an entity named {name} exists for {searchdomain}");
|
||||||
|
}
|
||||||
|
} finally
|
||||||
{
|
{
|
||||||
_logger.LogError("Unable to determine whether an entity named {name} exists for {searchdomain}", [name, searchdomain]);
|
reader.Close();
|
||||||
throw new Exception($"Unable to determine whether an entity named {name} exists for {searchdomain}");
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public int? GetEntityID(SQLHelper helper, string name, string searchdomain)
|
public async Task<int?> GetEntityID(SQLHelper helper, string name, string searchdomain)
|
||||||
{
|
{
|
||||||
Dictionary<string, dynamic> parameters = new()
|
Dictionary<string, dynamic> parameters = new()
|
||||||
{
|
{
|
||||||
{ "name", name },
|
{ "name", name },
|
||||||
{ "searchdomain", GetSearchdomainID(helper, searchdomain)}
|
{ "searchdomain", await GetSearchdomainID(helper, searchdomain)}
|
||||||
};
|
};
|
||||||
lock (helper.connection)
|
lock (helper.Connection)
|
||||||
{
|
{
|
||||||
DbDataReader reader = helper.ExecuteSQLCommand("SELECT id FROM entity WHERE name = @name AND id_searchdomain = @searchdomain", parameters);
|
DbDataReader reader = helper.ExecuteSQLCommand("SELECT id FROM entity WHERE name = @name AND id_searchdomain = @searchdomain", parameters);
|
||||||
bool success = reader.Read();
|
try
|
||||||
int? result = success ? reader.GetInt32(0) : 0;
|
{
|
||||||
reader.Close();
|
bool success = reader.Read();
|
||||||
return result;
|
int? result = success ? reader.GetInt32(0) : 0;
|
||||||
|
return result;
|
||||||
|
} finally
|
||||||
|
{
|
||||||
|
reader.Close();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -185,30 +295,122 @@ public class DatabaseHelper(ILogger<DatabaseHelper> logger)
|
|||||||
{ "searchdomain", searchdomain}
|
{ "searchdomain", searchdomain}
|
||||||
};
|
};
|
||||||
DbDataReader searchdomainSumReader = helper.ExecuteSQLCommand("SELECT SUM(LENGTH(id) + LENGTH(name) + LENGTH(settings)) AS total_bytes FROM embeddingsearch.searchdomain WHERE name=@searchdomain", parameters);
|
DbDataReader searchdomainSumReader = helper.ExecuteSQLCommand("SELECT SUM(LENGTH(id) + LENGTH(name) + LENGTH(settings)) AS total_bytes FROM embeddingsearch.searchdomain WHERE name=@searchdomain", parameters);
|
||||||
bool success = searchdomainSumReader.Read();
|
bool success;
|
||||||
long result = success && !searchdomainSumReader.IsDBNull(0) ? searchdomainSumReader.GetInt64(0) : 0;
|
long result;
|
||||||
searchdomainSumReader.Close();
|
try
|
||||||
|
{
|
||||||
|
success = searchdomainSumReader.Read();
|
||||||
|
result = success && !searchdomainSumReader.IsDBNull(0) ? searchdomainSumReader.GetInt64(0) : 0;
|
||||||
|
} finally
|
||||||
|
{
|
||||||
|
searchdomainSumReader.Close();
|
||||||
|
}
|
||||||
|
|
||||||
DbDataReader entitySumReader = helper.ExecuteSQLCommand("SELECT SUM(LENGTH(e.id) + LENGTH(e.name) + LENGTH(e.probmethod) + LENGTH(e.id_searchdomain)) AS total_bytes FROM embeddingsearch.entity e JOIN embeddingsearch.searchdomain s ON e.id_searchdomain = s.id WHERE s.name=@searchdomain", parameters);
|
DbDataReader entitySumReader = helper.ExecuteSQLCommand("SELECT SUM(LENGTH(e.id) + LENGTH(e.name) + LENGTH(e.probmethod) + LENGTH(e.id_searchdomain)) AS total_bytes FROM embeddingsearch.entity e JOIN embeddingsearch.searchdomain s ON e.id_searchdomain = s.id WHERE s.name=@searchdomain", parameters);
|
||||||
success = entitySumReader.Read();
|
try
|
||||||
result += success && !entitySumReader.IsDBNull(0) ? entitySumReader.GetInt64(0) : 0;
|
{
|
||||||
entitySumReader.Close();
|
success = entitySumReader.Read();
|
||||||
|
result += success && !entitySumReader.IsDBNull(0) ? entitySumReader.GetInt64(0) : 0;
|
||||||
|
} finally
|
||||||
|
{
|
||||||
|
entitySumReader.Close();
|
||||||
|
}
|
||||||
|
|
||||||
DbDataReader datapointSumReader = helper.ExecuteSQLCommand("SELECT SUM(LENGTH(d.id) + LENGTH(d.name) + LENGTH(d.probmethod_embedding) + LENGTH(d.similaritymethod) + LENGTH(d.id_entity) + LENGTH(d.hash)) AS total_bytes FROM embeddingsearch.datapoint d JOIN embeddingsearch.entity e ON d.id_entity = e.id JOIN embeddingsearch.searchdomain s ON e.id_searchdomain = s.id WHERE s.name=@searchdomain", parameters);
|
DbDataReader datapointSumReader = helper.ExecuteSQLCommand("SELECT SUM(LENGTH(d.id) + LENGTH(d.name) + LENGTH(d.probmethod_embedding) + LENGTH(d.similaritymethod) + LENGTH(d.id_entity) + LENGTH(d.hash)) AS total_bytes FROM embeddingsearch.datapoint d JOIN embeddingsearch.entity e ON d.id_entity = e.id JOIN embeddingsearch.searchdomain s ON e.id_searchdomain = s.id WHERE s.name=@searchdomain", parameters);
|
||||||
success = datapointSumReader.Read();
|
try
|
||||||
result += success && !datapointSumReader.IsDBNull(0) ? datapointSumReader.GetInt64(0) : 0;
|
{
|
||||||
datapointSumReader.Close();
|
success = datapointSumReader.Read();
|
||||||
|
result += success && !datapointSumReader.IsDBNull(0) ? datapointSumReader.GetInt64(0) : 0;
|
||||||
|
} finally
|
||||||
|
{
|
||||||
|
datapointSumReader.Close();
|
||||||
|
}
|
||||||
|
|
||||||
DbDataReader embeddingSumReader = helper.ExecuteSQLCommand("SELECT SUM(LENGTH(em.id) + LENGTH(em.id_datapoint) + LENGTH(em.model) + LENGTH(em.embedding)) AS total_bytes FROM embeddingsearch.embedding em JOIN embeddingsearch.datapoint d ON em.id_datapoint = d.id JOIN embeddingsearch.entity e ON d.id_entity = e.id JOIN embeddingsearch.searchdomain s ON e.id_searchdomain = s.id WHERE s.name=@searchdomain", parameters);
|
DbDataReader embeddingSumReader = helper.ExecuteSQLCommand("SELECT SUM(LENGTH(em.id) + LENGTH(em.id_datapoint) + LENGTH(em.model) + LENGTH(em.embedding)) AS total_bytes FROM embeddingsearch.embedding em JOIN embeddingsearch.datapoint d ON em.id_datapoint = d.id JOIN embeddingsearch.entity e ON d.id_entity = e.id JOIN embeddingsearch.searchdomain s ON e.id_searchdomain = s.id WHERE s.name=@searchdomain", parameters);
|
||||||
success = embeddingSumReader.Read();
|
try
|
||||||
result += success && !embeddingSumReader.IsDBNull(0) ? embeddingSumReader.GetInt64(0) : 0;
|
{
|
||||||
embeddingSumReader.Close();
|
success = embeddingSumReader.Read();
|
||||||
|
result += success && !embeddingSumReader.IsDBNull(0) ? embeddingSumReader.GetInt64(0) : 0;
|
||||||
|
} finally
|
||||||
|
{
|
||||||
|
embeddingSumReader.Close();
|
||||||
|
}
|
||||||
|
|
||||||
DbDataReader attributeSumReader = helper.ExecuteSQLCommand("SELECT SUM(LENGTH(a.id) + LENGTH(a.id_entity) + LENGTH(a.attribute) + LENGTH(a.value)) AS total_bytes FROM embeddingsearch.attribute a JOIN embeddingsearch.entity e ON a.id_entity = e.id JOIN embeddingsearch.searchdomain s ON e.id_searchdomain = s.id WHERE s.name=@searchdomain", parameters);
|
DbDataReader attributeSumReader = helper.ExecuteSQLCommand("SELECT SUM(LENGTH(a.id) + LENGTH(a.id_entity) + LENGTH(a.attribute) + LENGTH(a.value)) AS total_bytes FROM embeddingsearch.attribute a JOIN embeddingsearch.entity e ON a.id_entity = e.id JOIN embeddingsearch.searchdomain s ON e.id_searchdomain = s.id WHERE s.name=@searchdomain", parameters);
|
||||||
success = attributeSumReader.Read();
|
try
|
||||||
result += success && !attributeSumReader.IsDBNull(0) ? attributeSumReader.GetInt64(0) : 0;
|
{
|
||||||
attributeSumReader.Close();
|
success = attributeSumReader.Read();
|
||||||
|
result += success && !attributeSumReader.IsDBNull(0) ? attributeSumReader.GetInt64(0) : 0;
|
||||||
|
} finally
|
||||||
|
{
|
||||||
|
attributeSumReader.Close();
|
||||||
|
}
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static long GetTotalDatabaseSize(SQLHelper helper)
|
||||||
|
{
|
||||||
|
Dictionary<string, dynamic> parameters = [];
|
||||||
|
DbDataReader searchdomainSumReader = helper.ExecuteSQLCommand("SELECT SUM(Data_length) FROM information_schema.tables", parameters);
|
||||||
|
try
|
||||||
|
{
|
||||||
|
bool success = searchdomainSumReader.Read();
|
||||||
|
long result = success && !searchdomainSumReader.IsDBNull(0) ? searchdomainSumReader.GetInt64(0) : 0;
|
||||||
|
return result;
|
||||||
|
} finally
|
||||||
|
{
|
||||||
|
searchdomainSumReader.Close();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static async Task<long> CountEntities(SQLHelper helper)
|
||||||
|
{
|
||||||
|
DbDataReader searchdomainSumReader = helper.ExecuteSQLCommand("SELECT COUNT(*) FROM entity;", []);
|
||||||
|
try
|
||||||
|
{
|
||||||
|
bool success = searchdomainSumReader.Read();
|
||||||
|
long result = success && !searchdomainSumReader.IsDBNull(0) ? searchdomainSumReader.GetInt64(0) : 0;
|
||||||
|
return result;
|
||||||
|
} finally
|
||||||
|
{
|
||||||
|
searchdomainSumReader.Close();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static long CountEntitiesForSearchdomain(SQLHelper helper, string searchdomain)
|
||||||
|
{
|
||||||
|
Dictionary<string, dynamic> parameters = new()
|
||||||
|
{
|
||||||
|
{ "searchdomain", searchdomain}
|
||||||
|
};
|
||||||
|
DbDataReader searchdomainSumReader = helper.ExecuteSQLCommand("SELECT COUNT(*) FROM entity e JOIN searchdomain s on e.id_searchdomain = s.id WHERE e.id_searchdomain = s.id AND s.name = @searchdomain;", parameters);
|
||||||
|
try
|
||||||
|
{
|
||||||
|
bool success = searchdomainSumReader.Read();
|
||||||
|
long result = success && !searchdomainSumReader.IsDBNull(0) ? searchdomainSumReader.GetInt64(0) : 0;
|
||||||
|
return result;
|
||||||
|
} finally
|
||||||
|
{
|
||||||
|
searchdomainSumReader.Close();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static SearchdomainSettings GetSearchdomainSettings(SQLHelper helper, string searchdomain)
|
||||||
|
{
|
||||||
|
Dictionary<string, dynamic> parameters = new()
|
||||||
|
{
|
||||||
|
["name"] = searchdomain
|
||||||
|
};
|
||||||
|
DbDataReader reader = helper.ExecuteSQLCommand("SELECT settings from searchdomain WHERE name = @name", parameters);
|
||||||
|
try
|
||||||
|
{
|
||||||
|
reader.Read();
|
||||||
|
string settingsString = reader.GetString(0);
|
||||||
|
return JsonSerializer.Deserialize<SearchdomainSettings>(settingsString);
|
||||||
|
} finally
|
||||||
|
{
|
||||||
|
reader.Close();
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
@@ -1,3 +1,4 @@
|
|||||||
|
using System.Data;
|
||||||
using System.Data.Common;
|
using System.Data.Common;
|
||||||
using MySql.Data.MySqlClient;
|
using MySql.Data.MySqlClient;
|
||||||
|
|
||||||
@@ -5,46 +6,86 @@ namespace Server.Helper;
|
|||||||
|
|
||||||
public class SQLHelper:IDisposable
|
public class SQLHelper:IDisposable
|
||||||
{
|
{
|
||||||
public MySqlConnection connection;
|
public MySqlConnection Connection;
|
||||||
public string connectionString;
|
public DbDataReader? DbDataReader;
|
||||||
|
public MySqlConnectionPoolElement[] ConnectionPool;
|
||||||
|
public string ConnectionString;
|
||||||
public SQLHelper(MySqlConnection connection, string connectionString)
|
public SQLHelper(MySqlConnection connection, string connectionString)
|
||||||
{
|
{
|
||||||
this.connection = connection;
|
Connection = connection;
|
||||||
this.connectionString = connectionString;
|
ConnectionString = connectionString;
|
||||||
|
ConnectionPool = new MySqlConnectionPoolElement[50];
|
||||||
|
for (int i = 0; i < ConnectionPool.Length; i++)
|
||||||
|
{
|
||||||
|
ConnectionPool[i] = new MySqlConnectionPoolElement(new MySqlConnection(connectionString), new(1, 1));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public SQLHelper DuplicateConnection()
|
public SQLHelper DuplicateConnection() // TODO remove this
|
||||||
{
|
{
|
||||||
MySqlConnection newConnection = new(connectionString);
|
return this;
|
||||||
return new SQLHelper(newConnection, connectionString);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void Dispose()
|
public void Dispose()
|
||||||
{
|
{
|
||||||
connection.Close();
|
Connection.Close();
|
||||||
GC.SuppressFinalize(this);
|
GC.SuppressFinalize(this);
|
||||||
}
|
}
|
||||||
|
|
||||||
public DbDataReader ExecuteSQLCommand(string query, Dictionary<string, dynamic> parameters)
|
public DbDataReader ExecuteSQLCommand(string query, Dictionary<string, dynamic> parameters)
|
||||||
{
|
{
|
||||||
lock (connection)
|
lock (Connection)
|
||||||
{
|
{
|
||||||
EnsureConnected();
|
EnsureConnected();
|
||||||
using MySqlCommand command = connection.CreateCommand();
|
EnsureDbReaderIsClosed();
|
||||||
|
using MySqlCommand command = Connection.CreateCommand();
|
||||||
command.CommandText = query;
|
command.CommandText = query;
|
||||||
foreach (KeyValuePair<string, dynamic> parameter in parameters)
|
foreach (KeyValuePair<string, dynamic> parameter in parameters)
|
||||||
{
|
{
|
||||||
command.Parameters.AddWithValue($"@{parameter.Key}", parameter.Value);
|
command.Parameters.AddWithValue($"@{parameter.Key}", parameter.Value);
|
||||||
}
|
}
|
||||||
return command.ExecuteReader();
|
DbDataReader = command.ExecuteReader();
|
||||||
|
return DbDataReader;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public int ExecuteSQLNonQuery(string query, Dictionary<string, dynamic> parameters)
|
public async Task<List<T>> ExecuteQueryAsync<T>(
|
||||||
|
string sql,
|
||||||
|
Dictionary<string, object?> parameters,
|
||||||
|
Func<DbDataReader, T> map)
|
||||||
{
|
{
|
||||||
lock (connection)
|
var poolElement = await GetMySqlConnectionPoolElement();
|
||||||
|
var connection = poolElement.Connection;
|
||||||
|
try
|
||||||
|
{
|
||||||
|
await using var command = connection.CreateCommand();
|
||||||
|
command.CommandText = sql;
|
||||||
|
|
||||||
|
foreach (var p in parameters)
|
||||||
|
command.Parameters.AddWithValue($"@{p.Key}", p.Value);
|
||||||
|
|
||||||
|
await using var reader = await command.ExecuteReaderAsync();
|
||||||
|
|
||||||
|
var result = new List<T>();
|
||||||
|
while (await reader.ReadAsync())
|
||||||
|
{
|
||||||
|
result.Add(map(reader));
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
} finally
|
||||||
|
{
|
||||||
|
|
||||||
|
poolElement.Semaphore.Release();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public async Task<int> ExecuteSQLNonQuery(string query, Dictionary<string, dynamic> parameters)
|
||||||
|
{
|
||||||
|
var poolElement = await GetMySqlConnectionPoolElement();
|
||||||
|
var connection = poolElement.Connection;
|
||||||
|
try
|
||||||
{
|
{
|
||||||
EnsureConnected();
|
|
||||||
using MySqlCommand command = connection.CreateCommand();
|
using MySqlCommand command = connection.CreateCommand();
|
||||||
|
|
||||||
command.CommandText = query;
|
command.CommandText = query;
|
||||||
@@ -53,14 +94,18 @@ public class SQLHelper:IDisposable
|
|||||||
command.Parameters.AddWithValue($"@{parameter.Key}", parameter.Value);
|
command.Parameters.AddWithValue($"@{parameter.Key}", parameter.Value);
|
||||||
}
|
}
|
||||||
return command.ExecuteNonQuery();
|
return command.ExecuteNonQuery();
|
||||||
|
} finally
|
||||||
|
{
|
||||||
|
poolElement.Semaphore.Release();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public int ExecuteSQLCommandGetInsertedID(string query, Dictionary<string, dynamic> parameters)
|
public async Task<int> ExecuteSQLCommandGetInsertedID(string query, Dictionary<string, dynamic> parameters)
|
||||||
{
|
{
|
||||||
lock (connection)
|
var poolElement = await GetMySqlConnectionPoolElement();
|
||||||
|
var connection = poolElement.Connection;
|
||||||
|
try
|
||||||
{
|
{
|
||||||
EnsureConnected();
|
|
||||||
using MySqlCommand command = connection.CreateCommand();
|
using MySqlCommand command = connection.CreateCommand();
|
||||||
|
|
||||||
command.CommandText = query;
|
command.CommandText = query;
|
||||||
@@ -71,23 +116,173 @@ public class SQLHelper:IDisposable
|
|||||||
command.ExecuteNonQuery();
|
command.ExecuteNonQuery();
|
||||||
command.CommandText = "SELECT LAST_INSERT_ID();";
|
command.CommandText = "SELECT LAST_INSERT_ID();";
|
||||||
return Convert.ToInt32(command.ExecuteScalar());
|
return Convert.ToInt32(command.ExecuteScalar());
|
||||||
|
} finally
|
||||||
|
{
|
||||||
|
poolElement.Semaphore.Release();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public async Task<int> BulkExecuteNonQuery(string sql, IEnumerable<object[]> parameterSets)
|
||||||
|
{
|
||||||
|
var poolElement = await GetMySqlConnectionPoolElement();
|
||||||
|
var connection = poolElement.Connection;
|
||||||
|
try
|
||||||
|
{
|
||||||
|
int affectedRows = 0;
|
||||||
|
int retries = 0;
|
||||||
|
|
||||||
|
while (retries <= 3)
|
||||||
|
{
|
||||||
|
try
|
||||||
|
{
|
||||||
|
using var transaction = connection.BeginTransaction();
|
||||||
|
using var command = connection.CreateCommand();
|
||||||
|
|
||||||
|
command.CommandText = sql;
|
||||||
|
command.Transaction = transaction;
|
||||||
|
|
||||||
|
foreach (var parameters in parameterSets)
|
||||||
|
{
|
||||||
|
command.Parameters.Clear();
|
||||||
|
command.Parameters.AddRange(parameters);
|
||||||
|
affectedRows += command.ExecuteNonQuery();
|
||||||
|
}
|
||||||
|
|
||||||
|
transaction.Commit();
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
catch (Exception)
|
||||||
|
{
|
||||||
|
retries++;
|
||||||
|
if (retries > 3)
|
||||||
|
throw;
|
||||||
|
Thread.Sleep(10);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return affectedRows;
|
||||||
|
} finally
|
||||||
|
{
|
||||||
|
poolElement.Semaphore.Release();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public async Task<MySqlConnectionPoolElement> GetMySqlConnectionPoolElement()
|
||||||
|
{
|
||||||
|
int counter = 0;
|
||||||
|
int sleepTime = 10;
|
||||||
|
do
|
||||||
|
{
|
||||||
|
foreach (var element in ConnectionPool)
|
||||||
|
{
|
||||||
|
if (element.Semaphore.Wait(0))
|
||||||
|
{
|
||||||
|
if (element.Connection.State == ConnectionState.Closed)
|
||||||
|
{
|
||||||
|
await element.Connection.CloseAsync();
|
||||||
|
await element.Connection.OpenAsync();
|
||||||
|
}
|
||||||
|
return element;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Thread.Sleep(sleepTime);
|
||||||
|
} while (++counter <= 50);
|
||||||
|
TimeoutException ex = new("Unable to get MySqlConnection");
|
||||||
|
ElmahCore.ElmahExtensions.RaiseError(ex);
|
||||||
|
throw ex;
|
||||||
|
}
|
||||||
|
|
||||||
public bool EnsureConnected()
|
public bool EnsureConnected()
|
||||||
{
|
{
|
||||||
if (connection.State != System.Data.ConnectionState.Open)
|
if (Connection.State != System.Data.ConnectionState.Open)
|
||||||
{
|
{
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
connection.Close();
|
Connection.Close();
|
||||||
connection.Open();
|
Connection.Open();
|
||||||
}
|
}
|
||||||
catch (Exception)
|
catch (Exception ex)
|
||||||
{
|
{
|
||||||
throw; // TODO add logging here
|
ElmahCore.ElmahExtensions.RaiseError(ex);
|
||||||
|
throw;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void EnsureDbReaderIsClosed()
|
||||||
|
{
|
||||||
|
int counter = 0;
|
||||||
|
int sleepTime = 10;
|
||||||
|
int timeout = 5000;
|
||||||
|
while (!(DbDataReader?.IsClosed ?? true))
|
||||||
|
{
|
||||||
|
if (counter > timeout / sleepTime)
|
||||||
|
{
|
||||||
|
TimeoutException ex = new("Unable to ensure dbDataReader is closed");
|
||||||
|
ElmahCore.ElmahExtensions.RaiseError(ex);
|
||||||
|
throw ex;
|
||||||
|
}
|
||||||
|
Thread.Sleep(sleepTime);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
public async Task ExecuteInTransactionAsync(Func<MySqlConnection, DbTransaction, Task> operation)
|
||||||
|
{
|
||||||
|
var poolElement = await GetMySqlConnectionPoolElement();
|
||||||
|
var connection = poolElement.Connection;
|
||||||
|
try
|
||||||
|
{
|
||||||
|
using var transaction = connection.BeginTransaction();
|
||||||
|
try
|
||||||
|
{
|
||||||
|
await operation(connection, transaction);
|
||||||
|
await transaction.CommitAsync();
|
||||||
|
}
|
||||||
|
catch
|
||||||
|
{
|
||||||
|
await transaction.RollbackAsync();
|
||||||
|
throw;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
finally
|
||||||
|
{
|
||||||
|
poolElement.Semaphore.Release();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void ExecuteInTransaction(Action<MySqlConnection, MySqlTransaction> operation)
|
||||||
|
{
|
||||||
|
var poolElement = GetMySqlConnectionPoolElement().Result;
|
||||||
|
var connection = poolElement.Connection;
|
||||||
|
try
|
||||||
|
{
|
||||||
|
using var transaction = connection.BeginTransaction();
|
||||||
|
try
|
||||||
|
{
|
||||||
|
operation(connection, transaction);
|
||||||
|
transaction.Commit();
|
||||||
|
}
|
||||||
|
catch
|
||||||
|
{
|
||||||
|
transaction.Rollback();
|
||||||
|
throw;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
finally
|
||||||
|
{
|
||||||
|
poolElement.Semaphore.Release();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public struct MySqlConnectionPoolElement
|
||||||
|
{
|
||||||
|
public MySqlConnection Connection;
|
||||||
|
public SemaphoreSlim Semaphore;
|
||||||
|
|
||||||
|
public MySqlConnectionPoolElement(MySqlConnection connection, SemaphoreSlim semaphore)
|
||||||
|
{
|
||||||
|
Connection = connection;
|
||||||
|
Semaphore = semaphore;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
76
src/Server/Helper/SQLiteHelper.cs
Normal file
76
src/Server/Helper/SQLiteHelper.cs
Normal file
@@ -0,0 +1,76 @@
|
|||||||
|
using System.Data;
|
||||||
|
using System.Data.Common;
|
||||||
|
using Microsoft.Data.Sqlite;
|
||||||
|
using Server.Models;
|
||||||
|
using MySql.Data.MySqlClient;
|
||||||
|
using System.Configuration;
|
||||||
|
|
||||||
|
namespace Server.Helper;
|
||||||
|
|
||||||
|
public class SQLiteHelper : SqlHelper, IDisposable
|
||||||
|
{
|
||||||
|
public SQLiteHelper(DbConnection connection, string connectionString) : base(connection, connectionString)
|
||||||
|
{
|
||||||
|
Connection = connection;
|
||||||
|
ConnectionString = connectionString;
|
||||||
|
}
|
||||||
|
|
||||||
|
public SQLiteHelper(EmbeddingSearchOptions options) : base(new SqliteConnection(options.ConnectionStrings.Cache), options.ConnectionStrings.Cache ?? "")
|
||||||
|
{
|
||||||
|
if (options.ConnectionStrings.Cache is null)
|
||||||
|
{
|
||||||
|
throw new ConfigurationErrorsException("Cache options must not be null when instantiating SQLiteHelper");
|
||||||
|
}
|
||||||
|
ConnectionString = options.ConnectionStrings.Cache;
|
||||||
|
Connection = new SqliteConnection(ConnectionString);
|
||||||
|
}
|
||||||
|
|
||||||
|
public override SQLiteHelper DuplicateConnection()
|
||||||
|
{
|
||||||
|
SqliteConnection newConnection = new(ConnectionString);
|
||||||
|
return new SQLiteHelper(newConnection, ConnectionString);
|
||||||
|
}
|
||||||
|
|
||||||
|
public override int ExecuteSQLCommandGetInsertedID(string query, object[] parameters)
|
||||||
|
{
|
||||||
|
lock (Connection)
|
||||||
|
{
|
||||||
|
EnsureConnected();
|
||||||
|
EnsureDbReaderIsClosed();
|
||||||
|
using DbCommand command = Connection.CreateCommand();
|
||||||
|
|
||||||
|
command.CommandText = query;
|
||||||
|
command.Parameters.AddRange(parameters);
|
||||||
|
command.ExecuteNonQuery();
|
||||||
|
command.CommandText = "SELECT last_insert_rowid();";
|
||||||
|
return Convert.ToInt32(command.ExecuteScalar());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public int BulkExecuteNonQuery(string sql, IEnumerable<object[]> parameterSets)
|
||||||
|
{
|
||||||
|
lock (Connection)
|
||||||
|
{
|
||||||
|
EnsureConnected();
|
||||||
|
EnsureDbReaderIsClosed();
|
||||||
|
|
||||||
|
using var transaction = Connection.BeginTransaction();
|
||||||
|
using var command = Connection.CreateCommand();
|
||||||
|
|
||||||
|
command.CommandText = sql;
|
||||||
|
command.Transaction = transaction;
|
||||||
|
|
||||||
|
int affectedRows = 0;
|
||||||
|
|
||||||
|
foreach (var parameters in parameterSets)
|
||||||
|
{
|
||||||
|
command.Parameters.Clear();
|
||||||
|
command.Parameters.AddRange(parameters);
|
||||||
|
affectedRows += command.ExecuteNonQuery();
|
||||||
|
}
|
||||||
|
|
||||||
|
transaction.Commit();
|
||||||
|
return affectedRows;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,9 +1,11 @@
|
|||||||
using System.Collections.Concurrent;
|
using System.Collections.Concurrent;
|
||||||
|
using System.Diagnostics;
|
||||||
using System.Security.Cryptography;
|
using System.Security.Cryptography;
|
||||||
using System.Text;
|
using System.Text;
|
||||||
using System.Text.Json;
|
using System.Text.Json;
|
||||||
using AdaptiveExpressions;
|
using AdaptiveExpressions;
|
||||||
using Server.Exceptions;
|
using Server.Exceptions;
|
||||||
|
using Shared;
|
||||||
using Shared.Models;
|
using Shared.Models;
|
||||||
|
|
||||||
namespace Server.Helper;
|
namespace Server.Helper;
|
||||||
@@ -15,7 +17,7 @@ public class SearchdomainHelper(ILogger<SearchdomainHelper> logger, DatabaseHelp
|
|||||||
|
|
||||||
public static byte[] BytesFromFloatArray(float[] floats)
|
public static byte[] BytesFromFloatArray(float[] floats)
|
||||||
{
|
{
|
||||||
var byteArray = new byte[floats.Length * 4];
|
var byteArray = new byte[floats.Length * sizeof(float)];
|
||||||
var floatArray = floats.ToArray();
|
var floatArray = floats.ToArray();
|
||||||
Buffer.BlockCopy(floatArray, 0, byteArray, 0, byteArray.Length);
|
Buffer.BlockCopy(floatArray, 0, byteArray, 0, byteArray.Length);
|
||||||
return byteArray;
|
return byteArray;
|
||||||
@@ -23,21 +25,21 @@ public class SearchdomainHelper(ILogger<SearchdomainHelper> logger, DatabaseHelp
|
|||||||
|
|
||||||
public static float[] FloatArrayFromBytes(byte[] bytes)
|
public static float[] FloatArrayFromBytes(byte[] bytes)
|
||||||
{
|
{
|
||||||
var floatArray = new float[bytes.Length / 4];
|
var floatArray = new float[bytes.Length / sizeof(float)];
|
||||||
Buffer.BlockCopy(bytes, 0, floatArray, 0, bytes.Length);
|
Buffer.BlockCopy(bytes, 0, floatArray, 0, bytes.Length);
|
||||||
return floatArray;
|
return floatArray;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static bool CacheHasEntity(List<Entity> entityCache, string name)
|
public static bool CacheHasEntity(ConcurrentDictionary<string, Entity> entityCache, string name)
|
||||||
{
|
{
|
||||||
return CacheGetEntity(entityCache, name) is not null;
|
return CacheGetEntity(entityCache, name) is not null;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static Entity? CacheGetEntity(List<Entity> entityCache, string name)
|
public static Entity? CacheGetEntity(ConcurrentDictionary<string, Entity> entityCache, string name)
|
||||||
{
|
{
|
||||||
foreach (Entity entity in entityCache)
|
foreach ((string _, Entity entity) in entityCache)
|
||||||
{
|
{
|
||||||
if (entity.name == name)
|
if (entity.Name == name)
|
||||||
{
|
{
|
||||||
return entity;
|
return entity;
|
||||||
}
|
}
|
||||||
@@ -45,11 +47,11 @@ public class SearchdomainHelper(ILogger<SearchdomainHelper> logger, DatabaseHelp
|
|||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
public List<Entity>? EntitiesFromJSON(SearchdomainManager searchdomainManager, ILogger logger, string json)
|
public async Task<List<Entity>?> EntitiesFromJSON(SearchdomainManager searchdomainManager, ILogger logger, string json)
|
||||||
{
|
{
|
||||||
LRUCache<string, Dictionary<string, float[]>> embeddingCache = searchdomainManager.embeddingCache;
|
EnumerableLruCache<string, Dictionary<string, float[]>> embeddingCache = searchdomainManager.EmbeddingCache;
|
||||||
AIProvider aIProvider = searchdomainManager.aIProvider;
|
AIProvider aIProvider = searchdomainManager.AiProvider;
|
||||||
SQLHelper helper = searchdomainManager.helper;
|
SQLHelper helper = searchdomainManager.Helper;
|
||||||
|
|
||||||
List<JSONEntity>? jsonEntities = JsonSerializer.Deserialize<List<JSONEntity>>(json);
|
List<JSONEntity>? jsonEntities = JsonSerializer.Deserialize<List<JSONEntity>>(json);
|
||||||
if (jsonEntities is null)
|
if (jsonEntities is null)
|
||||||
@@ -57,230 +59,418 @@ public class SearchdomainHelper(ILogger<SearchdomainHelper> logger, DatabaseHelp
|
|||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
// toBeCached: model -> [datapoint.text * n]
|
// Prefetch embeddings
|
||||||
Dictionary<string, List<string>> toBeCached = [];
|
Dictionary<string, List<string>> toBeCached = [];
|
||||||
|
Dictionary<string, List<string>> toBeCachedParallel = [];
|
||||||
foreach (JSONEntity jSONEntity in jsonEntities)
|
foreach (JSONEntity jSONEntity in jsonEntities)
|
||||||
{
|
{
|
||||||
|
Dictionary<string, List<string>> targetDictionary = toBeCached;
|
||||||
|
if (searchdomainManager.GetSearchdomain(jSONEntity.Searchdomain).Settings.ParallelEmbeddingsPrefetch)
|
||||||
|
{
|
||||||
|
targetDictionary = toBeCachedParallel;
|
||||||
|
}
|
||||||
foreach (JSONDatapoint datapoint in jSONEntity.Datapoints)
|
foreach (JSONDatapoint datapoint in jSONEntity.Datapoints)
|
||||||
{
|
{
|
||||||
foreach (string model in datapoint.Model)
|
foreach (string model in datapoint.Model)
|
||||||
{
|
{
|
||||||
if (!toBeCached.ContainsKey(model))
|
if (!targetDictionary.ContainsKey(model))
|
||||||
{
|
{
|
||||||
toBeCached[model] = [];
|
targetDictionary[model] = [];
|
||||||
}
|
}
|
||||||
toBeCached[model].Add(datapoint.Text);
|
targetDictionary[model].Add(datapoint.Text);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
foreach (var toBeCachedKV in toBeCached)
|
||||||
|
{
|
||||||
|
string model = toBeCachedKV.Key;
|
||||||
|
List<string> uniqueStrings = [.. toBeCachedKV.Value.Distinct()];
|
||||||
|
Datapoint.GetEmbeddings([.. uniqueStrings], [model], aIProvider, embeddingCache);
|
||||||
|
}
|
||||||
|
Parallel.ForEach(toBeCachedParallel, toBeCachedParallelKV =>
|
||||||
|
{
|
||||||
|
string model = toBeCachedParallelKV.Key;
|
||||||
|
List<string> uniqueStrings = [.. toBeCachedParallelKV.Value.Distinct()];
|
||||||
|
Datapoint.GetEmbeddings([.. uniqueStrings], [model], aIProvider, embeddingCache);
|
||||||
|
});
|
||||||
|
// Index/parse the entities
|
||||||
ConcurrentQueue<Entity> retVal = [];
|
ConcurrentQueue<Entity> retVal = [];
|
||||||
ParallelOptions parallelOptions = new() { MaxDegreeOfParallelism = 16 }; // <-- This is needed! Otherwise if we try to index 100+ entities at once, it spawns 100 threads, exploding the SQL pool
|
ParallelOptions parallelOptions = new() { MaxDegreeOfParallelism = 16 }; // <-- This is needed! Otherwise if we try to index 100+ entities at once, it spawns 100 threads, exploding the SQL pool
|
||||||
Parallel.ForEach(jsonEntities, parallelOptions, jSONEntity =>
|
|
||||||
|
List<Task> entityTasks = [];
|
||||||
|
foreach (JSONEntity jSONEntity in jsonEntities)
|
||||||
{
|
{
|
||||||
var entity = EntityFromJSON(searchdomainManager, logger, jSONEntity);
|
entityTasks.Add(Task.Run(async () =>
|
||||||
if (entity is not null)
|
|
||||||
{
|
{
|
||||||
retVal.Enqueue(entity);
|
var entity = await EntityFromJSON(searchdomainManager, logger, jSONEntity);
|
||||||
|
if (entity is not null)
|
||||||
|
{
|
||||||
|
retVal.Enqueue(entity);
|
||||||
|
}
|
||||||
|
}));
|
||||||
|
|
||||||
|
if (entityTasks.Count >= parallelOptions.MaxDegreeOfParallelism)
|
||||||
|
{
|
||||||
|
await Task.WhenAny(entityTasks);
|
||||||
|
entityTasks.RemoveAll(t => t.IsCompleted);
|
||||||
}
|
}
|
||||||
});
|
}
|
||||||
|
|
||||||
|
await Task.WhenAll(entityTasks);
|
||||||
|
|
||||||
return [.. retVal];
|
return [.. retVal];
|
||||||
}
|
}
|
||||||
|
|
||||||
public Entity? EntityFromJSON(SearchdomainManager searchdomainManager, ILogger logger, JSONEntity jsonEntity) //string json)
|
public async Task<Entity?> EntityFromJSON(SearchdomainManager searchdomainManager, ILogger logger, JSONEntity jsonEntity)
|
||||||
{
|
{
|
||||||
SQLHelper helper = searchdomainManager.helper.DuplicateConnection();
|
var stopwatch = Stopwatch.StartNew();
|
||||||
|
|
||||||
|
SQLHelper helper = searchdomainManager.Helper;
|
||||||
Searchdomain searchdomain = searchdomainManager.GetSearchdomain(jsonEntity.Searchdomain);
|
Searchdomain searchdomain = searchdomainManager.GetSearchdomain(jsonEntity.Searchdomain);
|
||||||
List<Entity> entityCache = searchdomain.entityCache;
|
int id_searchdomain = searchdomain.Id;
|
||||||
AIProvider aIProvider = searchdomain.aIProvider;
|
ConcurrentDictionary<string, Entity> entityCache = searchdomain.EntityCache;
|
||||||
LRUCache<string, Dictionary<string, float[]>> embeddingCache = searchdomain.embeddingCache;
|
AIProvider aIProvider = searchdomain.AiProvider;
|
||||||
Entity? preexistingEntity = entityCache.FirstOrDefault(entity => entity.name == jsonEntity.Name);
|
EnumerableLruCache<string, Dictionary<string, float[]>> embeddingCache = searchdomain.EmbeddingCache;
|
||||||
bool invalidateSearchCache = false;
|
bool invalidateSearchCache = false;
|
||||||
|
|
||||||
if (preexistingEntity is not null)
|
|
||||||
|
bool hasEntity = entityCache.TryGetValue(jsonEntity.Name, out Entity? preexistingEntity);
|
||||||
|
|
||||||
|
if (hasEntity && preexistingEntity is not null)
|
||||||
{
|
{
|
||||||
int? preexistingEntityID = _databaseHelper.GetEntityID(helper, jsonEntity.Name, jsonEntity.Searchdomain);
|
|
||||||
if (preexistingEntityID is null)
|
int preexistingEntityID = preexistingEntity.Id;
|
||||||
{
|
|
||||||
_logger.LogCritical("Unable to index entity {jsonEntity.Name} because it already exists in the searchdomain but not in the database.", [jsonEntity.Name]);
|
|
||||||
throw new Exception($"Unable to index entity {jsonEntity.Name} because it already exists in the searchdomain but not in the database.");
|
|
||||||
}
|
|
||||||
Dictionary<string, string> attributes = jsonEntity.Attributes;
|
Dictionary<string, string> attributes = jsonEntity.Attributes;
|
||||||
|
|
||||||
// Attribute
|
// Attribute - get changes
|
||||||
foreach (KeyValuePair<string, string> attributesKV in preexistingEntity.attributes.ToList())
|
List<(string attribute, string newValue, int entityId)> updatedAttributes = new(preexistingEntity.Attributes.Count);
|
||||||
|
List<(string attribute, int entityId)> deletedAttributes = new(preexistingEntity.Attributes.Count);
|
||||||
|
List<(string attributeKey, string attribute, int entityId)> addedAttributes = new(jsonEntity.Attributes.Count);
|
||||||
|
foreach (KeyValuePair<string, string> attributesKV in preexistingEntity.Attributes) //.ToList())
|
||||||
{
|
{
|
||||||
string oldAttributeKey = attributesKV.Key;
|
string oldAttributeKey = attributesKV.Key;
|
||||||
string oldAttribute = attributesKV.Value;
|
string oldAttribute = attributesKV.Value;
|
||||||
bool newHasAttribute = jsonEntity.Attributes.TryGetValue(oldAttributeKey, out string? newAttribute);
|
bool newHasAttribute = jsonEntity.Attributes.TryGetValue(oldAttributeKey, out string? newAttribute);
|
||||||
if (newHasAttribute && newAttribute is not null && newAttribute != oldAttribute)
|
if (newHasAttribute && newAttribute is not null && newAttribute != oldAttribute)
|
||||||
{
|
{
|
||||||
// Attribute - Updated
|
updatedAttributes.Add((attribute: oldAttributeKey, newValue: newAttribute, entityId: (int)preexistingEntityID));
|
||||||
Dictionary<string, dynamic> parameters = new()
|
|
||||||
{
|
|
||||||
{ "newValue", newAttribute },
|
|
||||||
{ "entityId", preexistingEntityID },
|
|
||||||
{ "attribute", oldAttributeKey}
|
|
||||||
};
|
|
||||||
helper.ExecuteSQLNonQuery("UPDATE attribute SET value=@newValue WHERE id_entity=@entityId AND attribute=@attribute", parameters);
|
|
||||||
preexistingEntity.attributes[oldAttributeKey] = newAttribute;
|
|
||||||
} else if (!newHasAttribute)
|
} else if (!newHasAttribute)
|
||||||
{
|
{
|
||||||
// Attribute - Deleted
|
deletedAttributes.Add((attribute: oldAttributeKey, entityId: (int)preexistingEntityID));
|
||||||
Dictionary<string, dynamic> parameters = new()
|
|
||||||
{
|
|
||||||
{ "entityId", preexistingEntityID },
|
|
||||||
{ "attribute", oldAttributeKey}
|
|
||||||
};
|
|
||||||
helper.ExecuteSQLNonQuery("DELETE FROM attribute WHERE id_entity=@entityId AND attribute=@attribute", parameters);
|
|
||||||
preexistingEntity.attributes.Remove(oldAttributeKey);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
foreach (var attributesKV in jsonEntity.Attributes)
|
foreach (var attributesKV in jsonEntity.Attributes)
|
||||||
{
|
{
|
||||||
string newAttributeKey = attributesKV.Key;
|
string newAttributeKey = attributesKV.Key;
|
||||||
string newAttribute = attributesKV.Value;
|
string newAttribute = attributesKV.Value;
|
||||||
bool preexistingHasAttribute = preexistingEntity.attributes.TryGetValue(newAttributeKey, out string? preexistingAttribute);
|
bool preexistingHasAttribute = preexistingEntity.Attributes.TryGetValue(newAttributeKey, out string? preexistingAttribute);
|
||||||
if (!preexistingHasAttribute)
|
if (!preexistingHasAttribute)
|
||||||
{
|
{
|
||||||
// Attribute - New
|
// Attribute - New
|
||||||
DatabaseHelper.DatabaseInsertAttribute(helper, newAttributeKey, newAttribute, (int)preexistingEntityID);
|
addedAttributes.Add((attributeKey: newAttributeKey, attribute: newAttribute, entityId: (int)preexistingEntityID));
|
||||||
preexistingEntity.attributes.Add(newAttributeKey, newAttribute);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Datapoint
|
if (updatedAttributes.Count != 0 || deletedAttributes.Count != 0 || addedAttributes.Count != 0)
|
||||||
foreach (Datapoint datapoint_ in preexistingEntity.datapoints.ToList())
|
_logger.LogDebug("EntityFromJSON - Updating existing entity. name: {name}, updatedAttributes: {updatedAttributes}, deletedAttributes: {deletedAttributes}, addedAttributes: {addedAttributes}", [preexistingEntity.Name, updatedAttributes.Count, deletedAttributes.Count, addedAttributes.Count]);
|
||||||
|
// Attribute - apply changes
|
||||||
|
if (updatedAttributes.Count != 0)
|
||||||
|
{
|
||||||
|
// Update
|
||||||
|
await DatabaseHelper.DatabaseUpdateAttributes(helper, updatedAttributes);
|
||||||
|
lock (preexistingEntity.Attributes)
|
||||||
|
{
|
||||||
|
updatedAttributes.ForEach(attribute => preexistingEntity.Attributes[attribute.attribute] = attribute.newValue);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (deletedAttributes.Count != 0)
|
||||||
|
{
|
||||||
|
// Delete
|
||||||
|
await DatabaseHelper.DatabaseDeleteAttributes(helper, deletedAttributes);
|
||||||
|
lock (preexistingEntity.Attributes)
|
||||||
|
{
|
||||||
|
deletedAttributes.ForEach(attribute => preexistingEntity.Attributes.Remove(attribute.attribute));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (addedAttributes.Count != 0)
|
||||||
|
{
|
||||||
|
// Insert
|
||||||
|
await DatabaseHelper.DatabaseInsertAttributes(helper, addedAttributes);
|
||||||
|
lock (preexistingEntity.Attributes)
|
||||||
|
{
|
||||||
|
addedAttributes.ForEach(attribute => preexistingEntity.Attributes.Add(attribute.attributeKey, attribute.attribute));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Datapoint - get changes
|
||||||
|
List<Datapoint> deletedDatapointInstances = new(preexistingEntity.Datapoints.Count);
|
||||||
|
List<string> deletedDatapoints = new(preexistingEntity.Datapoints.Count);
|
||||||
|
List<(string datapointName, int entityId, JSONDatapoint jsonDatapoint, string hash)> updatedDatapointsText = new(preexistingEntity.Datapoints.Count);
|
||||||
|
List<(string datapointName, string probMethod, string similarityMethod, int entityId, JSONDatapoint jsonDatapoint)> updatedDatapointsNonText = new(preexistingEntity.Datapoints.Count);
|
||||||
|
List<Datapoint> createdDatapointInstances = [];
|
||||||
|
List<(string name, ProbMethodEnum probmethod_embedding, SimilarityMethodEnum similarityMethod, string hash, Dictionary<string, float[]> embeddings, JSONDatapoint datapoint)> createdDatapoints = new(jsonEntity.Datapoints.Length);
|
||||||
|
|
||||||
|
foreach (Datapoint datapoint_ in preexistingEntity.Datapoints.ToList())
|
||||||
{
|
{
|
||||||
Datapoint datapoint = datapoint_; // To enable replacing the datapoint reference as foreach iterators cannot be overwritten
|
Datapoint datapoint = datapoint_; // To enable replacing the datapoint reference as foreach iterators cannot be overwritten
|
||||||
bool newEntityHasDatapoint = jsonEntity.Datapoints.Any(x => x.Name == datapoint.name);
|
JSONDatapoint? newEntityDatapoint = jsonEntity.Datapoints.FirstOrDefault(x => x.Name == datapoint.Name);
|
||||||
|
bool newEntityHasDatapoint = newEntityDatapoint is not null;
|
||||||
if (!newEntityHasDatapoint)
|
if (!newEntityHasDatapoint)
|
||||||
{
|
{
|
||||||
// Datapoint - Deleted
|
// Datapoint - Deleted
|
||||||
Dictionary<string, dynamic> parameters = new()
|
deletedDatapointInstances.Add(datapoint);
|
||||||
{
|
deletedDatapoints.Add(datapoint.Name);
|
||||||
{ "datapointName", datapoint.name },
|
|
||||||
{ "entityId", preexistingEntityID}
|
|
||||||
};
|
|
||||||
helper.ExecuteSQLNonQuery("DELETE e FROM embedding e JOIN datapoint d ON e.id_datapoint=d.id WHERE d.name=@datapointName AND d.id_entity=@entityId", parameters);
|
|
||||||
helper.ExecuteSQLNonQuery("DELETE FROM datapoint WHERE id_entity=@entityId AND name=@datapointName", parameters);
|
|
||||||
preexistingEntity.datapoints.Remove(datapoint);
|
|
||||||
invalidateSearchCache = true;
|
invalidateSearchCache = true;
|
||||||
} else
|
} else
|
||||||
{
|
{
|
||||||
JSONDatapoint? newEntityDatapoint = jsonEntity.Datapoints.FirstOrDefault(x => x.Name == datapoint.name);
|
string? hash = newEntityDatapoint?.Text is not null ? GetHash(newEntityDatapoint) : null;
|
||||||
if (newEntityDatapoint is not null && newEntityDatapoint.Text is not null)
|
if (
|
||||||
|
newEntityDatapoint is not null
|
||||||
|
&& newEntityDatapoint.Text is not null
|
||||||
|
&& hash is not null
|
||||||
|
&& hash != datapoint.Hash)
|
||||||
{
|
{
|
||||||
// Datapoint - Updated (text)
|
// Datapoint - Updated (text)
|
||||||
Dictionary<string, dynamic> parameters = new()
|
updatedDatapointsText.Add(new()
|
||||||
{
|
{
|
||||||
{ "datapointName", datapoint.name },
|
datapointName = newEntityDatapoint.Name,
|
||||||
{ "entityId", preexistingEntityID}
|
entityId = (int)preexistingEntityID,
|
||||||
};
|
jsonDatapoint = newEntityDatapoint,
|
||||||
helper.ExecuteSQLNonQuery("DELETE e FROM embedding e JOIN datapoint d ON e.id_datapoint=d.id WHERE d.name=@datapointName AND d.id_entity=@entityId", parameters);
|
hash = hash
|
||||||
helper.ExecuteSQLNonQuery("DELETE FROM datapoint WHERE id_entity=@entityId AND name=@datapointName", parameters);
|
});
|
||||||
preexistingEntity.datapoints.Remove(datapoint);
|
|
||||||
Datapoint newDatapoint = DatabaseInsertDatapointWithEmbeddings(helper, searchdomain, newEntityDatapoint, (int)preexistingEntityID);
|
|
||||||
preexistingEntity.datapoints.Add(newDatapoint);
|
|
||||||
datapoint = newDatapoint;
|
|
||||||
invalidateSearchCache = true;
|
invalidateSearchCache = true;
|
||||||
}
|
}
|
||||||
if (newEntityDatapoint is not null && (newEntityDatapoint.Probmethod_embedding != datapoint.probMethod.probMethodEnum || newEntityDatapoint.SimilarityMethod != datapoint.similarityMethod.similarityMethodEnum))
|
if (
|
||||||
|
newEntityDatapoint is not null
|
||||||
|
&& (newEntityDatapoint.Probmethod_embedding != datapoint.ProbMethod.ProbMethodEnum
|
||||||
|
|| newEntityDatapoint.SimilarityMethod != datapoint.SimilarityMethod.SimilarityMethodEnum))
|
||||||
{
|
{
|
||||||
// Datapoint - Updated (probmethod or similaritymethod)
|
// Datapoint - Updated (probmethod or similaritymethod)
|
||||||
Dictionary<string, dynamic> parameters = new()
|
updatedDatapointsNonText.Add(new()
|
||||||
{
|
{
|
||||||
{ "probmethod", newEntityDatapoint.Probmethod_embedding.ToString() },
|
datapointName = newEntityDatapoint.Name,
|
||||||
{ "similaritymethod", newEntityDatapoint.SimilarityMethod.ToString() },
|
entityId = (int)preexistingEntityID,
|
||||||
{ "datapointName", datapoint.name },
|
probMethod = newEntityDatapoint.Probmethod_embedding.ToString(),
|
||||||
{ "entityId", preexistingEntityID}
|
similarityMethod = newEntityDatapoint.SimilarityMethod.ToString(),
|
||||||
};
|
jsonDatapoint = newEntityDatapoint
|
||||||
helper.ExecuteSQLNonQuery("UPDATE datapoint SET probmethod_embedding=@probmethod, similaritymethod=@similaritymethod WHERE id_entity=@entityId AND name=@datapointName", parameters);
|
});
|
||||||
Datapoint preexistingDatapoint = preexistingEntity.datapoints.First(x => x == datapoint); // The for loop is a copy. This retrieves the original such that it can be updated.
|
|
||||||
preexistingDatapoint.probMethod = new(newEntityDatapoint.Probmethod_embedding, _logger);
|
|
||||||
preexistingDatapoint.similarityMethod = new(newEntityDatapoint.SimilarityMethod, _logger);
|
|
||||||
invalidateSearchCache = true;
|
invalidateSearchCache = true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
foreach (JSONDatapoint jsonDatapoint in jsonEntity.Datapoints)
|
foreach (JSONDatapoint jsonDatapoint in jsonEntity.Datapoints)
|
||||||
{
|
{
|
||||||
bool oldEntityHasDatapoint = preexistingEntity.datapoints.Any(x => x.name == jsonDatapoint.Name);
|
bool oldEntityHasDatapoint = preexistingEntity.Datapoints.Any(x => x.Name == jsonDatapoint.Name);
|
||||||
if (!oldEntityHasDatapoint)
|
if (!oldEntityHasDatapoint)
|
||||||
{
|
{
|
||||||
// Datapoint - New
|
// Datapoint - New
|
||||||
Datapoint datapoint = DatabaseInsertDatapointWithEmbeddings(helper, searchdomain, jsonDatapoint, (int)preexistingEntityID);
|
createdDatapoints.Add(new()
|
||||||
preexistingEntity.datapoints.Add(datapoint);
|
{
|
||||||
|
name = jsonDatapoint.Name,
|
||||||
|
probmethod_embedding = jsonDatapoint.Probmethod_embedding,
|
||||||
|
similarityMethod = jsonDatapoint.SimilarityMethod,
|
||||||
|
hash = GetHash(jsonDatapoint),
|
||||||
|
embeddings = Datapoint.GetEmbeddings(
|
||||||
|
jsonDatapoint.Text ?? throw new Exception("jsonDatapoint.Text must not be null when retrieving embeddings"),
|
||||||
|
[.. jsonDatapoint.Model],
|
||||||
|
aIProvider,
|
||||||
|
embeddingCache
|
||||||
|
),
|
||||||
|
datapoint = jsonDatapoint
|
||||||
|
});
|
||||||
invalidateSearchCache = true;
|
invalidateSearchCache = true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
if (deletedDatapointInstances.Count != 0 || createdDatapoints.Count != 0 || addedAttributes.Count != 0 || updatedDatapointsNonText.Count != 0)
|
||||||
|
_logger.LogDebug(
|
||||||
|
"EntityFromJSON - Updating existing entity. name: {name}, deletedDatapointInstances: {deletedDatapointInstances}, createdDatapoints: {createdDatapoints}, addedAttributes: {addedAttributes}, updatedDatapointsNonText: {updatedDatapointsNonText}",
|
||||||
|
[preexistingEntity.Name, deletedDatapointInstances.Count, createdDatapoints.Count, addedAttributes.Count, updatedDatapointsNonText.Count]);
|
||||||
|
// Datapoint - apply changes
|
||||||
|
// Deleted
|
||||||
|
if (deletedDatapointInstances.Count != 0)
|
||||||
|
{
|
||||||
|
await DatabaseHelper.DatabaseDeleteEmbeddingsAndDatapoints(helper, deletedDatapoints, (int)preexistingEntityID);
|
||||||
|
preexistingEntity.Datapoints = [.. preexistingEntity.Datapoints
|
||||||
|
.Where(x =>
|
||||||
|
!deletedDatapointInstances.Contains(x)
|
||||||
|
)
|
||||||
|
];
|
||||||
|
}
|
||||||
|
// Created
|
||||||
|
if (createdDatapoints.Count != 0)
|
||||||
|
{
|
||||||
|
List<Datapoint> datapoint = await DatabaseInsertDatapointsWithEmbeddings(helper, searchdomain, [.. createdDatapoints.Select(element => (element.datapoint, element.hash))], (int)preexistingEntityID, id_searchdomain);
|
||||||
|
datapoint.ForEach(x => preexistingEntity.Datapoints.Add(x));
|
||||||
|
}
|
||||||
|
// Datapoint - Updated (text)
|
||||||
|
if (updatedDatapointsText.Count != 0)
|
||||||
|
{
|
||||||
|
await DatabaseHelper.DatabaseDeleteEmbeddingsAndDatapoints(helper, [.. updatedDatapointsText.Select(datapoint => datapoint.datapointName)], (int)preexistingEntityID);
|
||||||
|
// Remove from datapoints
|
||||||
|
var namesToRemove = updatedDatapointsText
|
||||||
|
.Select(d => d.datapointName)
|
||||||
|
.ToHashSet();
|
||||||
|
var newBag = new ConcurrentBag<Datapoint>(
|
||||||
|
preexistingEntity.Datapoints
|
||||||
|
.Where(x => !namesToRemove.Contains(x.Name))
|
||||||
|
);
|
||||||
|
preexistingEntity.Datapoints = newBag;
|
||||||
|
// Insert into database
|
||||||
|
List<Datapoint> datapoints = await DatabaseInsertDatapointsWithEmbeddings(helper, searchdomain, [.. updatedDatapointsText.Select(element => (datapoint: element.jsonDatapoint, hash: element.hash))], (int)preexistingEntityID, id_searchdomain);
|
||||||
|
// Insert into datapoints
|
||||||
|
datapoints.ForEach(datapoint => preexistingEntity.Datapoints.Add(datapoint));
|
||||||
|
}
|
||||||
|
// Datapoint - Updated (probmethod or similaritymethod)
|
||||||
|
if (updatedDatapointsNonText.Count != 0)
|
||||||
|
{
|
||||||
|
await DatabaseHelper.DatabaseUpdateDatapoint(
|
||||||
|
helper,
|
||||||
|
[.. updatedDatapointsNonText.Select(element => (element.datapointName, element.probMethod, element.similarityMethod))],
|
||||||
|
(int)preexistingEntityID
|
||||||
|
);
|
||||||
|
updatedDatapointsNonText.ForEach(element =>
|
||||||
|
{
|
||||||
|
Datapoint preexistingDatapoint = preexistingEntity.Datapoints.First(x => x.Name == element.datapointName);
|
||||||
|
preexistingDatapoint.ProbMethod = new(element.jsonDatapoint.Probmethod_embedding);
|
||||||
|
preexistingDatapoint.SimilarityMethod = new(element.jsonDatapoint.SimilarityMethod);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
if (invalidateSearchCache)
|
if (invalidateSearchCache)
|
||||||
{
|
{
|
||||||
|
|
||||||
searchdomain.ReconciliateOrInvalidateCacheForNewOrUpdatedEntity(preexistingEntity);
|
searchdomain.ReconciliateOrInvalidateCacheForNewOrUpdatedEntity(preexistingEntity);
|
||||||
|
searchdomain.UpdateModelsInUse();
|
||||||
}
|
}
|
||||||
searchdomain.UpdateModelsInUse();
|
|
||||||
return preexistingEntity;
|
return preexistingEntity;
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
int id_entity = DatabaseHelper.DatabaseInsertEntity(helper, jsonEntity.Name, jsonEntity.Probmethod, _databaseHelper.GetSearchdomainID(helper, jsonEntity.Searchdomain));
|
int id_entity = await DatabaseHelper.DatabaseInsertEntity(helper, jsonEntity.Name, jsonEntity.Probmethod, id_searchdomain);
|
||||||
|
List<(string attribute, string value, int id_entity)> toBeInsertedAttributes = [];
|
||||||
foreach (KeyValuePair<string, string> attribute in jsonEntity.Attributes)
|
foreach (KeyValuePair<string, string> attribute in jsonEntity.Attributes)
|
||||||
{
|
{
|
||||||
DatabaseHelper.DatabaseInsertAttribute(helper, attribute.Key, attribute.Value, id_entity); // TODO implement bulk insert to reduce number of queries
|
toBeInsertedAttributes.Add(new() {
|
||||||
|
attribute = attribute.Key,
|
||||||
|
value = attribute.Value,
|
||||||
|
id_entity = id_entity
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
List<Datapoint> datapoints = [];
|
var insertAttributesTask = DatabaseHelper.DatabaseInsertAttributes(helper, toBeInsertedAttributes);
|
||||||
|
|
||||||
|
List<(JSONDatapoint datapoint, string hash)> toBeInsertedDatapoints = [];
|
||||||
|
ConcurrentBag<string> usedModels = searchdomain.ModelsInUse;
|
||||||
foreach (JSONDatapoint jsonDatapoint in jsonEntity.Datapoints)
|
foreach (JSONDatapoint jsonDatapoint in jsonEntity.Datapoints)
|
||||||
{
|
{
|
||||||
string hash = Convert.ToBase64String(SHA256.HashData(Encoding.UTF8.GetBytes(jsonDatapoint.Text)));
|
string hash = Convert.ToBase64String(SHA256.HashData(Encoding.UTF8.GetBytes(jsonDatapoint.Text)));
|
||||||
Datapoint datapoint = DatabaseInsertDatapointWithEmbeddings(helper, searchdomain, jsonDatapoint, id_entity, hash);
|
toBeInsertedDatapoints.Add(new()
|
||||||
datapoints.Add(datapoint);
|
{
|
||||||
|
datapoint = jsonDatapoint,
|
||||||
|
hash = hash
|
||||||
|
});
|
||||||
|
foreach (string model in jsonDatapoint.Model)
|
||||||
|
{
|
||||||
|
if (!usedModels.Contains(model))
|
||||||
|
{
|
||||||
|
usedModels.Add(model);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
List<Datapoint> datapoints = await DatabaseInsertDatapointsWithEmbeddings(helper, searchdomain, toBeInsertedDatapoints, id_entity, id_searchdomain);
|
||||||
|
|
||||||
var probMethod = Probmethods.GetMethod(jsonEntity.Probmethod) ?? throw new ProbMethodNotFoundException(jsonEntity.Probmethod);
|
var probMethod = Probmethods.GetMethod(jsonEntity.Probmethod) ?? throw new ProbMethodNotFoundException(jsonEntity.Probmethod);
|
||||||
Entity entity = new(jsonEntity.Attributes, probMethod, jsonEntity.Probmethod.ToString(), datapoints, jsonEntity.Name)
|
Entity entity = new(jsonEntity.Attributes, probMethod, jsonEntity.Probmethod.ToString(), [.. datapoints], jsonEntity.Name, jsonEntity.Searchdomain)
|
||||||
{
|
{
|
||||||
id = id_entity
|
Id = id_entity
|
||||||
};
|
};
|
||||||
entityCache.Add(entity);
|
entityCache[jsonEntity.Name] = entity;
|
||||||
|
|
||||||
searchdomain.ReconciliateOrInvalidateCacheForNewOrUpdatedEntity(entity);
|
searchdomain.ReconciliateOrInvalidateCacheForNewOrUpdatedEntity(entity);
|
||||||
searchdomain.UpdateModelsInUse();
|
await insertAttributesTask;
|
||||||
return entity;
|
return entity;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public Datapoint DatabaseInsertDatapointWithEmbeddings(SQLHelper helper, Searchdomain searchdomain, JSONDatapoint jsonDatapoint, int id_entity, string? hash = null)
|
public async Task<List<Datapoint>> DatabaseInsertDatapointsWithEmbeddings(SQLHelper helper, Searchdomain searchdomain, List<(JSONDatapoint datapoint, string hash)> values, int id_entity, int id_searchdomain)
|
||||||
|
{
|
||||||
|
List<Datapoint> result = [];
|
||||||
|
List<(string name, ProbMethodEnum probmethod_embedding, SimilarityMethodEnum similarityMethod, string hash)> toBeInsertedDatapoints = [];
|
||||||
|
List<(int id_datapoint, string model, byte[] embedding)> toBeInsertedEmbeddings = [];
|
||||||
|
foreach ((JSONDatapoint datapoint, string hash) value in values)
|
||||||
|
{
|
||||||
|
Datapoint datapoint = await BuildDatapointFromJsonDatapoint(value.datapoint, id_entity, searchdomain, value.hash);
|
||||||
|
|
||||||
|
toBeInsertedDatapoints.Add(new()
|
||||||
|
{
|
||||||
|
name = datapoint.Name,
|
||||||
|
probmethod_embedding = datapoint.ProbMethod.ProbMethodEnum,
|
||||||
|
similarityMethod = datapoint.SimilarityMethod.SimilarityMethodEnum,
|
||||||
|
hash = value.hash
|
||||||
|
});
|
||||||
|
foreach ((string, float[]) embedding in datapoint.Embeddings)
|
||||||
|
{
|
||||||
|
toBeInsertedEmbeddings.Add(new()
|
||||||
|
{
|
||||||
|
id_datapoint = datapoint.Id,
|
||||||
|
model = embedding.Item1,
|
||||||
|
embedding = BytesFromFloatArray(embedding.Item2)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
result.Add(datapoint);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
await DatabaseHelper.DatabaseInsertEmbeddingBulk(helper, toBeInsertedEmbeddings, id_entity, id_searchdomain);
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async Task<Datapoint> DatabaseInsertDatapointWithEmbeddings(SQLHelper helper, Searchdomain searchdomain, JSONDatapoint jsonDatapoint, int id_entity, int id_searchdomain, string? hash = null)
|
||||||
{
|
{
|
||||||
if (jsonDatapoint.Text is null)
|
if (jsonDatapoint.Text is null)
|
||||||
{
|
{
|
||||||
throw new Exception("jsonDatapoint.Text must not be null at this point");
|
throw new Exception("jsonDatapoint.Text must not be null at this point");
|
||||||
}
|
}
|
||||||
hash ??= Convert.ToBase64String(SHA256.HashData(Encoding.UTF8.GetBytes(jsonDatapoint.Text)));
|
hash ??= GetHash(jsonDatapoint);
|
||||||
Datapoint datapoint = BuildDatapointFromJsonDatapoint(jsonDatapoint, id_entity, searchdomain, hash);
|
Datapoint datapoint = await BuildDatapointFromJsonDatapoint(jsonDatapoint, id_entity, searchdomain, hash);
|
||||||
int id_datapoint = DatabaseHelper.DatabaseInsertDatapoint(helper, jsonDatapoint.Name, jsonDatapoint.Probmethod_embedding, jsonDatapoint.SimilarityMethod, hash, id_entity); // TODO make this a bulk add action to reduce number of queries
|
int id_datapoint = await DatabaseHelper.DatabaseInsertDatapoint(helper, jsonDatapoint.Name, jsonDatapoint.Probmethod_embedding, jsonDatapoint.SimilarityMethod, hash, id_entity); // TODO make this a bulk add action to reduce number of queries
|
||||||
List<(string model, byte[] embedding)> data = [];
|
List<(string model, byte[] embedding)> data = [];
|
||||||
foreach ((string, float[]) embedding in datapoint.embeddings)
|
foreach ((string, float[]) embedding in datapoint.Embeddings)
|
||||||
{
|
{
|
||||||
data.Add((embedding.Item1, BytesFromFloatArray(embedding.Item2)));
|
data.Add((embedding.Item1, BytesFromFloatArray(embedding.Item2)));
|
||||||
}
|
}
|
||||||
DatabaseHelper.DatabaseInsertEmbeddingBulk(helper, id_datapoint, data);
|
await DatabaseHelper.DatabaseInsertEmbeddingBulk(helper, id_datapoint, data, id_entity, id_searchdomain);
|
||||||
return datapoint;
|
return datapoint;
|
||||||
}
|
}
|
||||||
|
|
||||||
public Datapoint BuildDatapointFromJsonDatapoint(JSONDatapoint jsonDatapoint, int entityId, Searchdomain searchdomain, string? hash = null)
|
public string GetHash(JSONDatapoint jsonDatapoint)
|
||||||
|
{
|
||||||
|
return Convert.ToBase64String(SHA256.HashData(Encoding.UTF8.GetBytes(jsonDatapoint.Text ?? throw new Exception("jsonDatapoint.Text must not be null to compute hash"))));
|
||||||
|
}
|
||||||
|
|
||||||
|
public async Task<Datapoint> BuildDatapointFromJsonDatapoint(JSONDatapoint jsonDatapoint, int entityId, Searchdomain searchdomain, string? hash = null)
|
||||||
{
|
{
|
||||||
if (jsonDatapoint.Text is null)
|
if (jsonDatapoint.Text is null)
|
||||||
{
|
{
|
||||||
throw new Exception("jsonDatapoint.Text must not be null at this point");
|
throw new Exception("jsonDatapoint.Text must not be null at this point");
|
||||||
}
|
}
|
||||||
using SQLHelper helper = searchdomain.helper.DuplicateConnection();
|
SQLHelper helper = searchdomain.Helper;
|
||||||
LRUCache<string, Dictionary<string, float[]>> embeddingCache = searchdomain.embeddingCache;
|
EnumerableLruCache<string, Dictionary<string, float[]>> embeddingCache = searchdomain.EmbeddingCache;
|
||||||
hash ??= Convert.ToBase64String(SHA256.HashData(Encoding.UTF8.GetBytes(jsonDatapoint.Text)));
|
hash ??= Convert.ToBase64String(SHA256.HashData(Encoding.UTF8.GetBytes(jsonDatapoint.Text)));
|
||||||
DatabaseHelper.DatabaseInsertDatapoint(helper, jsonDatapoint.Name, jsonDatapoint.Probmethod_embedding, jsonDatapoint.SimilarityMethod, hash, entityId);
|
int id = await DatabaseHelper.DatabaseInsertDatapoint(helper, jsonDatapoint.Name, jsonDatapoint.Probmethod_embedding, jsonDatapoint.SimilarityMethod, hash, entityId);
|
||||||
Dictionary<string, float[]> embeddings = Datapoint.GenerateEmbeddings(jsonDatapoint.Text, [.. jsonDatapoint.Model], searchdomain.aIProvider, embeddingCache);
|
Dictionary<string, float[]> embeddings = Datapoint.GetEmbeddings(jsonDatapoint.Text, [.. jsonDatapoint.Model], searchdomain.AiProvider, embeddingCache);
|
||||||
var probMethod_embedding = new ProbMethod(jsonDatapoint.Probmethod_embedding, logger) ?? throw new ProbMethodNotFoundException(jsonDatapoint.Probmethod_embedding);
|
var probMethod_embedding = new ProbMethod(jsonDatapoint.Probmethod_embedding) ?? throw new ProbMethodNotFoundException(jsonDatapoint.Probmethod_embedding);
|
||||||
var similarityMethod = new SimilarityMethod(jsonDatapoint.SimilarityMethod, logger) ?? throw new SimilarityMethodNotFoundException(jsonDatapoint.SimilarityMethod);
|
var similarityMethod = new SimilarityMethod(jsonDatapoint.SimilarityMethod) ?? throw new SimilarityMethodNotFoundException(jsonDatapoint.SimilarityMethod);
|
||||||
return new Datapoint(jsonDatapoint.Name, probMethod_embedding, similarityMethod, hash, [.. embeddings.Select(kv => (kv.Key, kv.Value))]);
|
return new Datapoint(jsonDatapoint.Name, probMethod_embedding, similarityMethod, hash, [.. embeddings.Select(kv => (kv.Key, kv.Value))], id);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static (Searchdomain?, int?, string?) TryGetSearchdomain(SearchdomainManager searchdomainManager, string searchdomain, ILogger logger)
|
public static (Searchdomain?, int?, string?) TryGetSearchdomain(SearchdomainManager searchdomainManager, string searchdomain, ILogger logger)
|
||||||
@@ -299,4 +489,9 @@ public class SearchdomainHelper(ILogger<SearchdomainHelper> logger, DatabaseHelp
|
|||||||
return (null, 404, $"Unable to update searchdomain {searchdomain}");
|
return (null, 404, $"Unable to update searchdomain {searchdomain}");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static bool IsSearchdomainLoaded(SearchdomainManager searchdomainManager, string name)
|
||||||
|
{
|
||||||
|
return searchdomainManager.IsSearchdomainLoaded(name);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
@@ -12,6 +12,11 @@ public static class DatabaseMigrations
|
|||||||
int initialDatabaseVersion = DatabaseGetVersion(helper);
|
int initialDatabaseVersion = DatabaseGetVersion(helper);
|
||||||
int databaseVersion = initialDatabaseVersion;
|
int databaseVersion = initialDatabaseVersion;
|
||||||
|
|
||||||
|
if (databaseVersion == 0)
|
||||||
|
{
|
||||||
|
databaseVersion = Create(helper);
|
||||||
|
}
|
||||||
|
|
||||||
var updateMethods = typeof(DatabaseMigrations)
|
var updateMethods = typeof(DatabaseMigrations)
|
||||||
.GetMethods(BindingFlags.Public | BindingFlags.Static)
|
.GetMethods(BindingFlags.Public | BindingFlags.Static)
|
||||||
.Where(m => m.Name.StartsWith("UpdateFrom") && m.ReturnType == typeof(int))
|
.Where(m => m.Name.StartsWith("UpdateFrom") && m.ReturnType == typeof(int))
|
||||||
@@ -24,71 +29,130 @@ public static class DatabaseMigrations
|
|||||||
if (version >= databaseVersion)
|
if (version >= databaseVersion)
|
||||||
{
|
{
|
||||||
databaseVersion = (int)method.Invoke(null, new object[] { helper });
|
databaseVersion = (int)method.Invoke(null, new object[] { helper });
|
||||||
|
var _ = helper.ExecuteSQLNonQuery("UPDATE settings SET value = @databaseVersion", new() { ["databaseVersion"] = databaseVersion.ToString() }).Result;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (databaseVersion != initialDatabaseVersion)
|
|
||||||
{
|
|
||||||
helper.ExecuteSQLNonQuery("UPDATE settings SET value = @databaseVersion", new() { ["databaseVersion"] = databaseVersion.ToString() });
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public static int DatabaseGetVersion(SQLHelper helper)
|
public static int DatabaseGetVersion(SQLHelper helper)
|
||||||
{
|
{
|
||||||
DbDataReader reader = helper.ExecuteSQLCommand("show tables", []);
|
DbDataReader reader = helper.ExecuteSQLCommand("show tables", []);
|
||||||
bool hasTables = reader.Read();
|
try
|
||||||
reader.Close();
|
|
||||||
if (!hasTables)
|
|
||||||
{
|
{
|
||||||
return 0;
|
bool hasTables = reader.Read();
|
||||||
|
if (!hasTables)
|
||||||
|
{
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
} finally
|
||||||
|
{
|
||||||
|
reader.Close();
|
||||||
}
|
}
|
||||||
|
|
||||||
reader = helper.ExecuteSQLCommand("show tables like '%settings%'", []);
|
reader = helper.ExecuteSQLCommand("show tables like '%settings%'", []);
|
||||||
bool hasSystemTable = reader.Read();
|
try
|
||||||
reader.Close();
|
|
||||||
if (!hasSystemTable)
|
|
||||||
{
|
{
|
||||||
return 1;
|
bool hasSystemTable = reader.Read();
|
||||||
|
if (!hasSystemTable)
|
||||||
|
{
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
} finally
|
||||||
|
{
|
||||||
|
reader.Close();
|
||||||
}
|
}
|
||||||
reader = helper.ExecuteSQLCommand("SELECT value FROM settings WHERE name=\"DatabaseVersion\"", []);
|
reader = helper.ExecuteSQLCommand("SELECT value FROM settings WHERE name=\"DatabaseVersion\"", []);
|
||||||
reader.Read();
|
try
|
||||||
string rawVersion = reader.GetString(0);
|
|
||||||
reader.Close();
|
|
||||||
bool success = int.TryParse(rawVersion, out int version);
|
|
||||||
if (!success)
|
|
||||||
{
|
{
|
||||||
throw new DatabaseVersionException();
|
reader.Read();
|
||||||
|
string rawVersion = reader.GetString(0);
|
||||||
|
bool success = int.TryParse(rawVersion, out int version);
|
||||||
|
if (!success)
|
||||||
|
{
|
||||||
|
throw new DatabaseVersionException();
|
||||||
|
}
|
||||||
|
return version;
|
||||||
|
} finally
|
||||||
|
{
|
||||||
|
reader.Close();
|
||||||
}
|
}
|
||||||
return version;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public static int Create(SQLHelper helper)
|
public static int Create(SQLHelper helper)
|
||||||
{
|
{
|
||||||
helper.ExecuteSQLNonQuery("CREATE TABLE searchdomain (id int PRIMARY KEY auto_increment, name varchar(512), settings JSON);", []);
|
var _ = helper.ExecuteSQLNonQuery("CREATE TABLE searchdomain (id int PRIMARY KEY auto_increment, name varchar(512), settings JSON);", []).Result;
|
||||||
helper.ExecuteSQLNonQuery("CREATE TABLE entity (id int PRIMARY KEY auto_increment, name varchar(512), probmethod varchar(128), id_searchdomain int, FOREIGN KEY (id_searchdomain) REFERENCES searchdomain(id));", []);
|
_ = helper.ExecuteSQLNonQuery("CREATE TABLE entity (id int PRIMARY KEY auto_increment, name varchar(512), probmethod varchar(128), id_searchdomain int, FOREIGN KEY (id_searchdomain) REFERENCES searchdomain(id));", []).Result;
|
||||||
helper.ExecuteSQLNonQuery("CREATE TABLE attribute (id int PRIMARY KEY auto_increment, id_entity int, attribute varchar(512), value longtext, FOREIGN KEY (id_entity) REFERENCES entity(id));", []);
|
_ = helper.ExecuteSQLNonQuery("CREATE TABLE attribute (id int PRIMARY KEY auto_increment, id_entity int, attribute varchar(512), value longtext, FOREIGN KEY (id_entity) REFERENCES entity(id));", []).Result;
|
||||||
helper.ExecuteSQLNonQuery("CREATE TABLE datapoint (id int PRIMARY KEY auto_increment, name varchar(512), probmethod_embedding varchar(512), id_entity int, FOREIGN KEY (id_entity) REFERENCES entity(id));", []);
|
_ = helper.ExecuteSQLNonQuery("CREATE TABLE datapoint (id int PRIMARY KEY auto_increment, name varchar(512), probmethod_embedding varchar(512), id_entity int, FOREIGN KEY (id_entity) REFERENCES entity(id));", []).Result;
|
||||||
helper.ExecuteSQLNonQuery("CREATE TABLE embedding (id int PRIMARY KEY auto_increment, id_datapoint int, model varchar(512), embedding blob, FOREIGN KEY (id_datapoint) REFERENCES datapoint(id));", []);
|
_ = helper.ExecuteSQLNonQuery("CREATE TABLE embedding (id int PRIMARY KEY auto_increment, id_datapoint int, model varchar(512), embedding blob, FOREIGN KEY (id_datapoint) REFERENCES datapoint(id));", []).Result;
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static int UpdateFrom1(SQLHelper helper)
|
public static int UpdateFrom1(SQLHelper helper)
|
||||||
{
|
{
|
||||||
helper.ExecuteSQLNonQuery("CREATE TABLE settings (name varchar(512), value varchar(8192));", []);
|
var _ = helper.ExecuteSQLNonQuery("CREATE TABLE settings (name varchar(512), value varchar(8192));", []).Result;
|
||||||
helper.ExecuteSQLNonQuery("INSERT INTO settings (name, value) VALUES (\"DatabaseVersion\", \"2\");", []);
|
_ = helper.ExecuteSQLNonQuery("INSERT INTO settings (name, value) VALUES (\"DatabaseVersion\", \"2\");", []).Result;
|
||||||
return 2;
|
return 2;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static int UpdateFrom2(SQLHelper helper)
|
public static int UpdateFrom2(SQLHelper helper)
|
||||||
{
|
{
|
||||||
helper.ExecuteSQLNonQuery("ALTER TABLE datapoint ADD hash VARCHAR(44);", []);
|
var _ = helper.ExecuteSQLNonQuery("ALTER TABLE datapoint ADD hash VARCHAR(44);", []).Result;
|
||||||
helper.ExecuteSQLNonQuery("UPDATE datapoint SET hash='';", []);
|
_ = helper.ExecuteSQLNonQuery("UPDATE datapoint SET hash='';", []).Result;
|
||||||
return 3;
|
return 3;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static int UpdateFrom3(SQLHelper helper)
|
public static int UpdateFrom3(SQLHelper helper)
|
||||||
{
|
{
|
||||||
helper.ExecuteSQLNonQuery("ALTER TABLE datapoint ADD COLUMN similaritymethod VARCHAR(512) NULL DEFAULT 'Cosine' AFTER probmethod_embedding", []);
|
var _ = helper.ExecuteSQLNonQuery("ALTER TABLE datapoint ADD COLUMN similaritymethod VARCHAR(512) NULL DEFAULT 'Cosine' AFTER probmethod_embedding", []).Result;
|
||||||
return 4;
|
return 4;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static int UpdateFrom4(SQLHelper helper)
|
||||||
|
{
|
||||||
|
var _ = helper.ExecuteSQLNonQuery("UPDATE searchdomain SET settings = JSON_SET(settings, '$.QueryCacheSize', 1000000) WHERE JSON_EXTRACT(settings, '$.QueryCacheSize') is NULL;", []).Result; // Set QueryCacheSize to a default of 1000000
|
||||||
|
return 5;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static int UpdateFrom5(SQLHelper helper)
|
||||||
|
{
|
||||||
|
// Add id_entity to embedding
|
||||||
|
var _ = helper.ExecuteSQLNonQuery("ALTER TABLE embedding ADD COLUMN id_entity INT NULL", []).Result;
|
||||||
|
return 6;
|
||||||
|
}
|
||||||
|
public static int UpdateFrom6(SQLHelper helper)
|
||||||
|
{
|
||||||
|
int count;
|
||||||
|
do
|
||||||
|
{
|
||||||
|
count = helper.ExecuteSQLNonQuery("UPDATE embedding e JOIN datapoint d ON d.id = e.id_datapoint JOIN (SELECT id FROM embedding WHERE id_entity IS NULL LIMIT 10000) x on x.id = e.id SET e.id_entity = d.id_entity;", []).Result;
|
||||||
|
} while (count == 10000);
|
||||||
|
return 7;
|
||||||
|
}
|
||||||
|
public static int UpdateFrom7(SQLHelper helper)
|
||||||
|
{
|
||||||
|
_ = helper.ExecuteSQLNonQuery("ALTER TABLE embedding MODIFY id_entity INT NOT NULL;", []).Result;
|
||||||
|
_ = helper.ExecuteSQLNonQuery("CREATE INDEX idx_embedding_entity_model ON embedding (id_entity, model)", []).Result;
|
||||||
|
|
||||||
|
// Add id_searchdomain to embedding
|
||||||
|
_ = helper.ExecuteSQLNonQuery("ALTER TABLE embedding ADD COLUMN id_searchdomain INT NULL", []).Result;
|
||||||
|
return 8;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static int UpdateFrom8(SQLHelper helper)
|
||||||
|
{
|
||||||
|
int count = 0;
|
||||||
|
do
|
||||||
|
{
|
||||||
|
count = helper.ExecuteSQLNonQuery("UPDATE embedding e JOIN entity en ON en.id = e.id_entity JOIN (SELECT id FROM embedding WHERE id_searchdomain IS NULL LIMIT 10000) x on x.id = e.id SET e.id_searchdomain = en.id_searchdomain;", []).Result;
|
||||||
|
} while (count == 10000);
|
||||||
|
return 9;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static int UpdateFrom9(SQLHelper helper)
|
||||||
|
{
|
||||||
|
_ = helper.ExecuteSQLNonQuery("ALTER TABLE embedding MODIFY id_searchdomain INT NOT NULL;", []).Result;
|
||||||
|
_ = helper.ExecuteSQLNonQuery("CREATE INDEX idx_embedding_searchdomain_model ON embedding (id_searchdomain)", []).Result;
|
||||||
|
return 10;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
65
src/Server/Migrations/SQLiteMigrations.cs
Normal file
65
src/Server/Migrations/SQLiteMigrations.cs
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
using System.Data.Common;
|
||||||
|
|
||||||
|
public static class SQLiteMigrations
|
||||||
|
{
|
||||||
|
public static void Migrate(DbConnection conn)
|
||||||
|
{
|
||||||
|
EnableWal(conn);
|
||||||
|
|
||||||
|
using var cmd = conn.CreateCommand();
|
||||||
|
|
||||||
|
cmd.CommandText = "PRAGMA user_version;";
|
||||||
|
var version = Convert.ToInt32(cmd.ExecuteScalar());
|
||||||
|
|
||||||
|
if (version == 0)
|
||||||
|
{
|
||||||
|
CreateV1(conn);
|
||||||
|
SetVersion(conn, 1);
|
||||||
|
version = 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (version == 1)
|
||||||
|
{
|
||||||
|
// future migration
|
||||||
|
// UpdateFrom1To2(conn);
|
||||||
|
// SetVersion(conn, 2);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void EnableWal(DbConnection conn)
|
||||||
|
{
|
||||||
|
using var cmd = conn.CreateCommand();
|
||||||
|
cmd.CommandText = "PRAGMA journal_mode = WAL;";
|
||||||
|
cmd.ExecuteNonQuery();
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
private static void CreateV1(DbConnection conn)
|
||||||
|
{
|
||||||
|
using var tx = conn.BeginTransaction();
|
||||||
|
using var cmd = conn.CreateCommand();
|
||||||
|
|
||||||
|
cmd.CommandText = """
|
||||||
|
CREATE TABLE embedding_cache (
|
||||||
|
cache_key TEXT NOT NULL,
|
||||||
|
model_key TEXT NOT NULL,
|
||||||
|
embedding BLOB NOT NULL,
|
||||||
|
idx INTEGER NOT NULL,
|
||||||
|
PRIMARY KEY (cache_key, model_key)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX idx_index
|
||||||
|
ON embedding_cache(idx);
|
||||||
|
""";
|
||||||
|
|
||||||
|
cmd.ExecuteNonQuery();
|
||||||
|
tx.Commit();
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void SetVersion(DbConnection conn, int version)
|
||||||
|
{
|
||||||
|
using var cmd = conn.CreateCommand();
|
||||||
|
cmd.CommandText = $"PRAGMA user_version = {version};";
|
||||||
|
cmd.ExecuteNonQuery();
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -6,12 +6,13 @@ namespace Server.Models;
|
|||||||
|
|
||||||
public class EmbeddingSearchOptions : ApiKeyOptions
|
public class EmbeddingSearchOptions : ApiKeyOptions
|
||||||
{
|
{
|
||||||
public required ConnectionStringsSection ConnectionStrings { get; set; }
|
public required ConnectionStringsOptions ConnectionStrings { get; set; }
|
||||||
public ElmahOptions? Elmah { get; set; }
|
public ElmahOptions? Elmah { get; set; }
|
||||||
public required long EmbeddingCacheMaxCount { get; set; }
|
|
||||||
public required Dictionary<string, AiProvider> AiProviders { get; set; }
|
public required Dictionary<string, AiProvider> AiProviders { get; set; }
|
||||||
public required SimpleAuthOptions SimpleAuth { get; set; }
|
public required SimpleAuthOptions SimpleAuth { get; set; }
|
||||||
|
public required CacheOptions Cache { get; set; }
|
||||||
public required bool UseHttpsRedirection { get; set; }
|
public required bool UseHttpsRedirection { get; set; }
|
||||||
|
public int? MaxRequestBodySize { get; set; }
|
||||||
}
|
}
|
||||||
|
|
||||||
public class AiProvider
|
public class AiProvider
|
||||||
@@ -34,3 +35,16 @@ public class SimpleUser
|
|||||||
public string Password { get; set; } = "";
|
public string Password { get; set; } = "";
|
||||||
public string[] Roles { get; set; } = [];
|
public string[] Roles { get; set; } = [];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public class ConnectionStringsOptions
|
||||||
|
{
|
||||||
|
public required string SQL { get; set; }
|
||||||
|
public string? Cache { get; set; }
|
||||||
|
}
|
||||||
|
|
||||||
|
public class CacheOptions
|
||||||
|
{
|
||||||
|
public required long CacheTopN { get; set; }
|
||||||
|
public bool StoreEmbeddingCache { get; set; } = false;
|
||||||
|
public int? StoreTopN { get; set; }
|
||||||
|
}
|
||||||
109
src/Server/Models/SQLHelper.cs
Normal file
109
src/Server/Models/SQLHelper.cs
Normal file
@@ -0,0 +1,109 @@
|
|||||||
|
namespace Server.Models;
|
||||||
|
using System.Data.Common;
|
||||||
|
|
||||||
|
public abstract partial class SqlHelper : IDisposable
|
||||||
|
{
|
||||||
|
public DbConnection Connection { get; set; }
|
||||||
|
public DbDataReader? DbDataReader { get; set; }
|
||||||
|
public string ConnectionString { get; set; }
|
||||||
|
public SqlHelper(DbConnection connection, string connectionString)
|
||||||
|
{
|
||||||
|
Connection = connection;
|
||||||
|
ConnectionString = connectionString;
|
||||||
|
}
|
||||||
|
|
||||||
|
public abstract SqlHelper DuplicateConnection();
|
||||||
|
|
||||||
|
public void Dispose()
|
||||||
|
{
|
||||||
|
Connection.Close();
|
||||||
|
GC.SuppressFinalize(this);
|
||||||
|
}
|
||||||
|
|
||||||
|
public DbDataReader ExecuteSQLCommand(string query, object[] parameters)
|
||||||
|
{
|
||||||
|
lock (Connection)
|
||||||
|
{
|
||||||
|
EnsureConnected();
|
||||||
|
EnsureDbReaderIsClosed();
|
||||||
|
using DbCommand command = Connection.CreateCommand();
|
||||||
|
command.CommandText = query;
|
||||||
|
command.Parameters.AddRange(parameters);
|
||||||
|
DbDataReader = command.ExecuteReader();
|
||||||
|
return DbDataReader;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void ExecuteQuery<T>(string query, object[] parameters, Func<DbDataReader, T> map)
|
||||||
|
{
|
||||||
|
lock (Connection)
|
||||||
|
{
|
||||||
|
EnsureConnected();
|
||||||
|
EnsureDbReaderIsClosed();
|
||||||
|
|
||||||
|
using var command = Connection.CreateCommand();
|
||||||
|
command.CommandText = query;
|
||||||
|
command.Parameters.AddRange(parameters);
|
||||||
|
|
||||||
|
using var reader = command.ExecuteReader();
|
||||||
|
|
||||||
|
while (reader.Read())
|
||||||
|
{
|
||||||
|
map(reader);
|
||||||
|
}
|
||||||
|
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public int ExecuteSQLNonQuery(string query, object[] parameters)
|
||||||
|
{
|
||||||
|
lock (Connection)
|
||||||
|
{
|
||||||
|
EnsureConnected();
|
||||||
|
EnsureDbReaderIsClosed();
|
||||||
|
using DbCommand command = Connection.CreateCommand();
|
||||||
|
|
||||||
|
command.CommandText = query;
|
||||||
|
command.Parameters.AddRange(parameters);
|
||||||
|
return command.ExecuteNonQuery();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public abstract int ExecuteSQLCommandGetInsertedID(string query, object[] parameters);
|
||||||
|
|
||||||
|
public bool EnsureConnected()
|
||||||
|
{
|
||||||
|
if (Connection.State != System.Data.ConnectionState.Open)
|
||||||
|
{
|
||||||
|
try
|
||||||
|
{
|
||||||
|
Connection.Close();
|
||||||
|
Connection.Open();
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
ElmahCore.ElmahExtensions.RaiseError(ex);
|
||||||
|
throw;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void EnsureDbReaderIsClosed()
|
||||||
|
{
|
||||||
|
int counter = 0;
|
||||||
|
int sleepTime = 10;
|
||||||
|
int timeout = 5000;
|
||||||
|
while (!(DbDataReader?.IsClosed ?? true))
|
||||||
|
{
|
||||||
|
if (counter > timeout / sleepTime)
|
||||||
|
{
|
||||||
|
TimeoutException ex = new("Unable to ensure dbDataReader is closed");
|
||||||
|
ElmahCore.ElmahExtensions.RaiseError(ex);
|
||||||
|
throw ex;
|
||||||
|
}
|
||||||
|
Thread.Sleep(sleepTime);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -6,34 +6,29 @@ namespace Server;
|
|||||||
|
|
||||||
public class ProbMethod
|
public class ProbMethod
|
||||||
{
|
{
|
||||||
public Probmethods.probMethodDelegate method;
|
public Probmethods.ProbMethodDelegate Method;
|
||||||
public ProbMethodEnum probMethodEnum;
|
public ProbMethodEnum ProbMethodEnum;
|
||||||
public string name;
|
public string Name;
|
||||||
|
|
||||||
public ProbMethod(ProbMethodEnum probMethodEnum, ILogger logger)
|
public ProbMethod(ProbMethodEnum probMethodEnum)
|
||||||
{
|
{
|
||||||
this.probMethodEnum = probMethodEnum;
|
this.ProbMethodEnum = probMethodEnum;
|
||||||
this.name = probMethodEnum.ToString();
|
this.Name = probMethodEnum.ToString();
|
||||||
Probmethods.probMethodDelegate? probMethod = Probmethods.GetMethod(name);
|
Probmethods.ProbMethodDelegate? probMethod = Probmethods.GetMethod(Name) ?? throw new ProbMethodNotFoundException(probMethodEnum);
|
||||||
if (probMethod is null)
|
Method = probMethod;
|
||||||
{
|
|
||||||
logger.LogError("Unable to retrieve probMethod {name}", [name]);
|
|
||||||
throw new ProbMethodNotFoundException(probMethodEnum);
|
|
||||||
}
|
|
||||||
method = probMethod;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
public static class Probmethods
|
public static class Probmethods
|
||||||
{
|
{
|
||||||
public delegate float probMethodProtoDelegate(List<(string, float)> list, string parameters);
|
public delegate float ProbMethodProtoDelegate(List<(string, float)> list, string parameters);
|
||||||
public delegate float probMethodDelegate(List<(string, float)> list);
|
public delegate float ProbMethodDelegate(List<(string, float)> list);
|
||||||
public static readonly Dictionary<ProbMethodEnum, probMethodProtoDelegate> probMethods;
|
public static readonly Dictionary<ProbMethodEnum, ProbMethodProtoDelegate> ProbMethods;
|
||||||
|
|
||||||
static Probmethods()
|
static Probmethods()
|
||||||
{
|
{
|
||||||
probMethods = new Dictionary<ProbMethodEnum, probMethodProtoDelegate>
|
ProbMethods = new Dictionary<ProbMethodEnum, ProbMethodProtoDelegate>
|
||||||
{
|
{
|
||||||
[ProbMethodEnum.Mean] = Mean,
|
[ProbMethodEnum.Mean] = Mean,
|
||||||
[ProbMethodEnum.HarmonicMean] = HarmonicMean,
|
[ProbMethodEnum.HarmonicMean] = HarmonicMean,
|
||||||
@@ -46,12 +41,12 @@ public static class Probmethods
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
public static probMethodDelegate? GetMethod(ProbMethodEnum probMethodEnum)
|
public static ProbMethodDelegate? GetMethod(ProbMethodEnum probMethodEnum)
|
||||||
{
|
{
|
||||||
return GetMethod(probMethodEnum.ToString());
|
return GetMethod(probMethodEnum.ToString());
|
||||||
}
|
}
|
||||||
|
|
||||||
public static probMethodDelegate? GetMethod(string name)
|
public static ProbMethodDelegate? GetMethod(string name)
|
||||||
{
|
{
|
||||||
string methodName = name;
|
string methodName = name;
|
||||||
string? jsonArg = "";
|
string? jsonArg = "";
|
||||||
@@ -68,7 +63,7 @@ public static class Probmethods
|
|||||||
methodName
|
methodName
|
||||||
);
|
);
|
||||||
|
|
||||||
if (!probMethods.TryGetValue(probMethodEnum, out probMethodProtoDelegate? method))
|
if (!ProbMethods.TryGetValue(probMethodEnum, out ProbMethodProtoDelegate? method))
|
||||||
{
|
{
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -9,11 +9,14 @@ using Server.Helper;
|
|||||||
using Server.Models;
|
using Server.Models;
|
||||||
using Server.Services;
|
using Server.Services;
|
||||||
using System.Text.Json.Serialization;
|
using System.Text.Json.Serialization;
|
||||||
using System.Reflection;
|
|
||||||
using System.Configuration;
|
using System.Configuration;
|
||||||
using Microsoft.OpenApi.Models;
|
using Microsoft.OpenApi;
|
||||||
using Shared.Models;
|
using Shared.Models;
|
||||||
using Microsoft.AspNetCore.ResponseCompression;
|
using Microsoft.AspNetCore.ResponseCompression;
|
||||||
|
using System.Net;
|
||||||
|
using System.Text;
|
||||||
|
using Server.Migrations;
|
||||||
|
using Microsoft.Data.Sqlite;
|
||||||
|
|
||||||
var builder = WebApplication.CreateBuilder(args);
|
var builder = WebApplication.CreateBuilder(args);
|
||||||
|
|
||||||
@@ -29,10 +32,28 @@ builder.Services.AddControllersWithViews()
|
|||||||
// Add Configuration
|
// Add Configuration
|
||||||
IConfigurationSection configurationSection = builder.Configuration.GetSection("Embeddingsearch");
|
IConfigurationSection configurationSection = builder.Configuration.GetSection("Embeddingsearch");
|
||||||
EmbeddingSearchOptions configuration = configurationSection.Get<EmbeddingSearchOptions>() ?? throw new ConfigurationErrorsException("Unable to start server due to an invalid configration");
|
EmbeddingSearchOptions configuration = configurationSection.Get<EmbeddingSearchOptions>() ?? throw new ConfigurationErrorsException("Unable to start server due to an invalid configration");
|
||||||
|
|
||||||
builder.Services.Configure<EmbeddingSearchOptions>(configurationSection);
|
builder.Services.Configure<EmbeddingSearchOptions>(configurationSection);
|
||||||
builder.Services.Configure<ApiKeyOptions>(configurationSection);
|
builder.Services.Configure<ApiKeyOptions>(configurationSection);
|
||||||
|
|
||||||
|
// Configure Kestrel
|
||||||
|
builder.WebHost.ConfigureKestrel(options =>
|
||||||
|
{
|
||||||
|
options.Limits.MaxRequestBodySize = configuration.MaxRequestBodySize ?? 50 * 1024 * 1024;
|
||||||
|
});
|
||||||
|
|
||||||
|
// Migrate database
|
||||||
|
SQLHelper helper = new(new MySql.Data.MySqlClient.MySqlConnection(configuration.ConnectionStrings.SQL), configuration.ConnectionStrings.SQL);
|
||||||
|
DatabaseMigrations.Migrate(helper);
|
||||||
|
|
||||||
|
// Migrate SQLite cache
|
||||||
|
if (configuration.ConnectionStrings.Cache is not null)
|
||||||
|
{
|
||||||
|
|
||||||
|
var SqliteConnection = new SqliteConnection(configuration.ConnectionStrings.Cache);
|
||||||
|
SqliteConnection.Open();
|
||||||
|
SQLiteMigrations.Migrate(SqliteConnection);
|
||||||
|
}
|
||||||
|
|
||||||
// Add Localization
|
// Add Localization
|
||||||
builder.Services.AddLocalization(options => options.ResourcesPath = "Resources");
|
builder.Services.AddLocalization(options => options.ResourcesPath = "Resources");
|
||||||
builder.Services.Configure<RequestLocalizationOptions>(options =>
|
builder.Services.Configure<RequestLocalizationOptions>(options =>
|
||||||
@@ -48,36 +69,37 @@ builder.Services.AddScoped<LocalizationService>();
|
|||||||
|
|
||||||
// Learn more about configuring Swagger/OpenAPI at https://aka.ms/aspnetcore/swashbuckle
|
// Learn more about configuring Swagger/OpenAPI at https://aka.ms/aspnetcore/swashbuckle
|
||||||
builder.Services.AddEndpointsApiExplorer();
|
builder.Services.AddEndpointsApiExplorer();
|
||||||
builder.Services.AddSwaggerGen(c =>
|
builder.Services.AddOpenApi(options =>
|
||||||
{
|
{
|
||||||
var xmlFile = $"{Assembly.GetExecutingAssembly().GetName().Name}.xml";
|
options.AddDocumentTransformer((document, context, _) =>
|
||||||
var xmlPath = Path.Combine(AppContext.BaseDirectory, xmlFile);
|
|
||||||
c.IncludeXmlComments(xmlPath);
|
|
||||||
if (configuration.ApiKeys is not null)
|
|
||||||
{
|
{
|
||||||
c.AddSecurityDefinition("ApiKey", new OpenApiSecurityScheme
|
if (configuration.ApiKeys is null)
|
||||||
{
|
return Task.CompletedTask;
|
||||||
Description = "ApiKey must appear in header",
|
|
||||||
Type = SecuritySchemeType.ApiKey,
|
document.Components ??= new();
|
||||||
Name = "X-API-KEY",
|
document.Components.SecuritySchemes ??= new Dictionary<string, IOpenApiSecurityScheme>();
|
||||||
In = ParameterLocation.Header,
|
|
||||||
Scheme = "ApiKeyScheme"
|
document.Components.SecuritySchemes["ApiKey"] =
|
||||||
});
|
new OpenApiSecurityScheme
|
||||||
var key = new OpenApiSecurityScheme()
|
|
||||||
{
|
|
||||||
Reference = new OpenApiReference
|
|
||||||
{
|
{
|
||||||
Type = ReferenceType.SecurityScheme,
|
Type = SecuritySchemeType.ApiKey,
|
||||||
Id = "ApiKey"
|
Name = "X-API-KEY",
|
||||||
},
|
In = ParameterLocation.Header,
|
||||||
In = ParameterLocation.Header
|
Description = "ApiKey must appear in header"
|
||||||
};
|
};
|
||||||
var requirement = new OpenApiSecurityRequirement
|
|
||||||
{
|
document.Security ??= [];
|
||||||
{ key, []}
|
|
||||||
};
|
// Apply globally
|
||||||
c.AddSecurityRequirement(requirement);
|
document.Security?.Add(
|
||||||
}
|
new OpenApiSecurityRequirement
|
||||||
|
{
|
||||||
|
[new OpenApiSecuritySchemeReference("ApiKey", document)] = []
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
return Task.CompletedTask;
|
||||||
|
});
|
||||||
});
|
});
|
||||||
Log.Logger = new LoggerConfiguration()
|
Log.Logger = new LoggerConfiguration()
|
||||||
.ReadFrom.Configuration(builder.Configuration)
|
.ReadFrom.Configuration(builder.Configuration)
|
||||||
@@ -140,6 +162,57 @@ var app = builder.Build();
|
|||||||
app.UseAuthentication();
|
app.UseAuthentication();
|
||||||
app.UseAuthorization();
|
app.UseAuthorization();
|
||||||
|
|
||||||
|
// Configure Elmah
|
||||||
|
app.Use(async (context, next) =>
|
||||||
|
{
|
||||||
|
if (context.Request.Path.StartsWithSegments("/elmah"))
|
||||||
|
{
|
||||||
|
context.Response.OnStarting(() =>
|
||||||
|
{
|
||||||
|
context.Response.Headers.Append(
|
||||||
|
"Content-Security-Policy",
|
||||||
|
"default-src 'self' 'unsafe-inline' 'unsafe-eval'"
|
||||||
|
);
|
||||||
|
return Task.CompletedTask;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
await next();
|
||||||
|
});
|
||||||
|
app.Use(async (context, next) =>
|
||||||
|
{
|
||||||
|
if (!context.Request.Path.StartsWithSegments("/elmah"))
|
||||||
|
{
|
||||||
|
await next();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
var originalBody = context.Response.Body;
|
||||||
|
using var memStream = new MemoryStream();
|
||||||
|
context.Response.Body = memStream;
|
||||||
|
|
||||||
|
await next();
|
||||||
|
|
||||||
|
memStream.Position = 0;
|
||||||
|
var html = await new StreamReader(memStream).ReadToEndAsync();
|
||||||
|
|
||||||
|
if (context.Response.ContentType?.Contains("text/html") == true)
|
||||||
|
{
|
||||||
|
html = html.Replace(
|
||||||
|
"</head>",
|
||||||
|
"""
|
||||||
|
<link rel="stylesheet" href="/elmah-ui/custom.css" />
|
||||||
|
<script src="/elmah-ui/custom.js"></script>
|
||||||
|
</head>
|
||||||
|
"""
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
var bytes = Encoding.UTF8.GetBytes(html);
|
||||||
|
context.Response.ContentLength = bytes.Length;
|
||||||
|
await originalBody.WriteAsync(bytes);
|
||||||
|
context.Response.Body = originalBody;
|
||||||
|
});
|
||||||
app.UseElmah();
|
app.UseElmah();
|
||||||
|
|
||||||
app.MapHealthChecks("/healthz");
|
app.MapHealthChecks("/healthz");
|
||||||
@@ -161,7 +234,7 @@ app.Use(async (context, next) =>
|
|||||||
{
|
{
|
||||||
if (!context.User.Identity?.IsAuthenticated ?? true)
|
if (!context.User.Identity?.IsAuthenticated ?? true)
|
||||||
{
|
{
|
||||||
context.Response.Redirect("/Account/Login");
|
context.Response.Redirect($"/Account/Login?ReturnUrl={WebUtility.UrlEncode("/swagger")}");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -175,11 +248,16 @@ app.Use(async (context, next) =>
|
|||||||
await next();
|
await next();
|
||||||
});
|
});
|
||||||
|
|
||||||
app.UseSwagger();
|
|
||||||
app.UseSwaggerUI(options =>
|
app.UseSwaggerUI(options =>
|
||||||
{
|
{
|
||||||
|
options.SwaggerEndpoint("/openapi/v1.json", "API v1");
|
||||||
|
options.RoutePrefix = "swagger";
|
||||||
options.EnablePersistAuthorization();
|
options.EnablePersistAuthorization();
|
||||||
|
options.InjectStylesheet("/swagger-ui/custom.css");
|
||||||
|
options.InjectJavascript("/swagger-ui/custom.js");
|
||||||
});
|
});
|
||||||
|
app.MapOpenApi("/openapi/v1.json");
|
||||||
|
|
||||||
//app.UseElmahExceptionPage(); // Messes with JSON response for API calls. Leaving this here so I don't accidentally put this in again later on.
|
//app.UseElmahExceptionPage(); // Messes with JSON response for API calls. Leaving this here so I don't accidentally put this in again later on.
|
||||||
|
|
||||||
if (configuration.ApiKeys is not null)
|
if (configuration.ApiKeys is not null)
|
||||||
|
|||||||
@@ -55,7 +55,7 @@
|
|||||||
<value>Such-Cache</value>
|
<value>Such-Cache</value>
|
||||||
</data>
|
</data>
|
||||||
<data name="Search cache utilization" xml:space="preserve">
|
<data name="Search cache utilization" xml:space="preserve">
|
||||||
<value>Such-Cache Speicherauslastung</value>
|
<value>Such-Cache-Speicherauslastung</value>
|
||||||
</data>
|
</data>
|
||||||
<data name="Clear" xml:space="preserve">
|
<data name="Clear" xml:space="preserve">
|
||||||
<value>Leeren</value>
|
<value>Leeren</value>
|
||||||
@@ -121,7 +121,7 @@
|
|||||||
<value>Searchdomain Name</value>
|
<value>Searchdomain Name</value>
|
||||||
</data>
|
</data>
|
||||||
<data name="Enable cache reconciliation" xml:space="preserve">
|
<data name="Enable cache reconciliation" xml:space="preserve">
|
||||||
<value>Cache Abgleich verwenden</value>
|
<value>Cache-Abgleich verwenden</value>
|
||||||
</data>
|
</data>
|
||||||
<data name="Create entity" xml:space="preserve">
|
<data name="Create entity" xml:space="preserve">
|
||||||
<value>Entity erstellen</value>
|
<value>Entity erstellen</value>
|
||||||
@@ -175,10 +175,10 @@
|
|||||||
<value>Searchdomain konnte nicht erstellt werden</value>
|
<value>Searchdomain konnte nicht erstellt werden</value>
|
||||||
</data>
|
</data>
|
||||||
<data name="Searchdomain cache was cleared successfully" xml:space="preserve">
|
<data name="Searchdomain cache was cleared successfully" xml:space="preserve">
|
||||||
<value>Searchdomain Cache wurde erfolgreich geleert</value>
|
<value>Searchdomain-Cache wurde erfolgreich geleert</value>
|
||||||
</data>
|
</data>
|
||||||
<data name="Failed to clear searchdomain cache" xml:space="preserve">
|
<data name="Failed to clear searchdomain cache" xml:space="preserve">
|
||||||
<value>Searchdomain Cache konnte nicht geleert werden</value>
|
<value>Searchdomain-Cache konnte nicht geleert werden</value>
|
||||||
</data>
|
</data>
|
||||||
<data name="Entity was deleted successfully" xml:space="preserve">
|
<data name="Entity was deleted successfully" xml:space="preserve">
|
||||||
<value>Entity wurde erfolgreich gelöscht</value>
|
<value>Entity wurde erfolgreich gelöscht</value>
|
||||||
@@ -229,7 +229,7 @@
|
|||||||
<value>Searchdomain Einstellungen konnten nicht abgerufen werden</value>
|
<value>Searchdomain Einstellungen konnten nicht abgerufen werden</value>
|
||||||
</data>
|
</data>
|
||||||
<data name="Unable to fetch searchdomain cache utilization" xml:space="preserve">
|
<data name="Unable to fetch searchdomain cache utilization" xml:space="preserve">
|
||||||
<value>Searchdomain Cache-Auslastung konnte nicht abgerufen werden</value>
|
<value>Searchdomain-Cache-Auslastung konnte nicht abgerufen werden</value>
|
||||||
</data>
|
</data>
|
||||||
<data name="Details" xml:space="preserve">
|
<data name="Details" xml:space="preserve">
|
||||||
<value>Details</value>
|
<value>Details</value>
|
||||||
@@ -243,4 +243,97 @@
|
|||||||
<data name="Close alert" xml:space="preserve">
|
<data name="Close alert" xml:space="preserve">
|
||||||
<value>Benachrichtigung schließen</value>
|
<value>Benachrichtigung schließen</value>
|
||||||
</data>
|
</data>
|
||||||
|
<data name="Recent queries" xml:space="preserve">
|
||||||
|
<value>Letzte Queries</value>
|
||||||
|
</data>
|
||||||
|
<data name="Home" xml:space="preserve">
|
||||||
|
<value>Dashboard</value>
|
||||||
|
</data>
|
||||||
|
<data name="Searchdomains" xml:space="preserve">
|
||||||
|
<value>Searchdomains</value>
|
||||||
|
</data>
|
||||||
|
<data name="Swagger" xml:space="preserve">
|
||||||
|
<value>Swagger</value>
|
||||||
|
</data>
|
||||||
|
<data name="Elmah" xml:space="preserve">
|
||||||
|
<value>Elmah</value>
|
||||||
|
</data>
|
||||||
|
<data name="Hi!" xml:space="preserve">
|
||||||
|
<value>Hallo!</value>
|
||||||
|
</data>
|
||||||
|
<data name="Hi, {0}!" xml:space="preserve">
|
||||||
|
<value>Hallo {0}!</value>
|
||||||
|
</data>
|
||||||
|
<data name="Embedding Cache" xml:space="preserve">
|
||||||
|
<value>Embedding-Cache</value>
|
||||||
|
</data>
|
||||||
|
<data name="Size" xml:space="preserve">
|
||||||
|
<value>Größe</value>
|
||||||
|
</data>
|
||||||
|
<data name="Strings" xml:space="preserve">
|
||||||
|
<value>Zeichenketten</value>
|
||||||
|
</data>
|
||||||
|
<data name="stringsCountInfo" xml:space="preserve">
|
||||||
|
<value>Die Anzahl der Zeichenketten, für die Embeddings vorliegen. D.h. wenn zwei Modelle verwendet werden, ist die Zahl der Embeddings zweimal so hoch.</value>
|
||||||
|
</data>
|
||||||
|
<data name="Embeddings" xml:space="preserve">
|
||||||
|
<value>Embeddings</value>
|
||||||
|
</data>
|
||||||
|
<data name="Health Checks" xml:space="preserve">
|
||||||
|
<value>Health Checks</value>
|
||||||
|
</data>
|
||||||
|
<data name="Server" xml:space="preserve">
|
||||||
|
<value>Server</value>
|
||||||
|
</data>
|
||||||
|
<data name="AI Providers" xml:space="preserve">
|
||||||
|
<value>AI Providers</value>
|
||||||
|
</data>
|
||||||
|
<data name="Count" xml:space="preserve">
|
||||||
|
<value>Anzahl</value>
|
||||||
|
</data>
|
||||||
|
<data name="Total Entities" xml:space="preserve">
|
||||||
|
<value>Entities insgesamt</value>
|
||||||
|
</data>
|
||||||
|
<data name="Total query cache utilization" xml:space="preserve">
|
||||||
|
<value>Query-Cache-Verwendung insgesamt</value>
|
||||||
|
</data>
|
||||||
|
<data name="Unable to fetch searchdomain database utilization" xml:space="preserve">
|
||||||
|
<value>Searchdomain Datenbank-Auslastung konnte nicht abgerufen werden</value>
|
||||||
|
</data>
|
||||||
|
<data name="Query cache entry count" xml:space="preserve">
|
||||||
|
<value>Query-Cache Einträge</value>
|
||||||
|
</data>
|
||||||
|
<data name="Query cache capacity (all)" xml:space="preserve">
|
||||||
|
<value>Query-Cache Kapazität (alle)</value>
|
||||||
|
</data>
|
||||||
|
<data name="queryCacheEntryCountAllInfo" xml:space="preserve">
|
||||||
|
<value>Anzahl der Einträge, die insgesamt in den Query-Cache passen. Ungeladene Searchdomains werden berücksichtigt.</value>
|
||||||
|
</data>
|
||||||
|
<data name="Query cache capacity (loaded)" xml:space="preserve">
|
||||||
|
<value>Query-Cache Kapazität (geladen)</value>
|
||||||
|
</data>
|
||||||
|
<data name="queryCacheEntryCountLoadedInfo" xml:space="preserve">
|
||||||
|
<value>Anzahl der Einträge, die insgesamt in den Query-Cache der geladenen Searchdomains passen.</value>
|
||||||
|
</data>
|
||||||
|
<data name="Query cache size" xml:space="preserve">
|
||||||
|
<value>Query Cache Größe</value>
|
||||||
|
</data>
|
||||||
|
<data name="Embeddings parallel prefetching" xml:space="preserve">
|
||||||
|
<value>Embeddings parallel prefetchen</value>
|
||||||
|
</data>
|
||||||
|
<data name="parallelEmbeddingsPrefetchInfo" xml:space="preserve">
|
||||||
|
<value>Wenn diese Einstellung aktiv ist, wird das Abrufen von Embeddings beim Indizieren von Entities parallelisiert. Deaktiviere diese Einstellung, falls Model-unloading ein Problem ist.</value>
|
||||||
|
</data>
|
||||||
|
<data name="Add result" xml:space="preserve">
|
||||||
|
<value>Ergebnis hinzufügen</value>
|
||||||
|
</data>
|
||||||
|
<data name="Search query was updated successfully" xml:space="preserve">
|
||||||
|
<value>Suchanfrage wurde erfolgreich angepasst</value>
|
||||||
|
</data>
|
||||||
|
<data name="Total RAM usage" xml:space="preserve">
|
||||||
|
<value>RAM Verwendung insgesamt</value>
|
||||||
|
</data>
|
||||||
|
<data name="Total Database size" xml:space="preserve">
|
||||||
|
<value>Datenbankgröße insgesamt</value>
|
||||||
|
</data>
|
||||||
</root>
|
</root>
|
||||||
@@ -243,4 +243,97 @@
|
|||||||
<data name="Close alert" xml:space="preserve">
|
<data name="Close alert" xml:space="preserve">
|
||||||
<value>Close alert</value>
|
<value>Close alert</value>
|
||||||
</data>
|
</data>
|
||||||
|
<data name="Recent queries" xml:space="preserve">
|
||||||
|
<value>Recent queries</value>
|
||||||
|
</data>
|
||||||
|
<data name="Home" xml:space="preserve">
|
||||||
|
<value>Dashboard</value>
|
||||||
|
</data>
|
||||||
|
<data name="Searchdomains" xml:space="preserve">
|
||||||
|
<value>Searchdomains</value>
|
||||||
|
</data>
|
||||||
|
<data name="Swagger" xml:space="preserve">
|
||||||
|
<value>Swagger</value>
|
||||||
|
</data>
|
||||||
|
<data name="Elmah" xml:space="preserve">
|
||||||
|
<value>Elmah</value>
|
||||||
|
</data>
|
||||||
|
<data name="Hi!" xml:space="preserve">
|
||||||
|
<value>Hi!</value>
|
||||||
|
</data>
|
||||||
|
<data name="Hi, {0}!" xml:space="preserve">
|
||||||
|
<value>Hi {0}!</value>
|
||||||
|
</data>
|
||||||
|
<data name="Embedding Cache" xml:space="preserve">
|
||||||
|
<value>Embedding Cache</value>
|
||||||
|
</data>
|
||||||
|
<data name="Size" xml:space="preserve">
|
||||||
|
<value>Size</value>
|
||||||
|
</data>
|
||||||
|
<data name="Strings" xml:space="preserve">
|
||||||
|
<value>Strings</value>
|
||||||
|
</data>
|
||||||
|
<data name="stringsCountInfo" xml:space="preserve">
|
||||||
|
<value>The number of strings for which there are embeddings. I.e. If you use two models, the amount of embeddings will be twice this number.</value>
|
||||||
|
</data>
|
||||||
|
<data name="Embeddings" xml:space="preserve">
|
||||||
|
<value>Embeddings</value>
|
||||||
|
</data>
|
||||||
|
<data name="Health Checks" xml:space="preserve">
|
||||||
|
<value>Health Checks</value>
|
||||||
|
</data>
|
||||||
|
<data name="Server" xml:space="preserve">
|
||||||
|
<value>Server</value>
|
||||||
|
</data>
|
||||||
|
<data name="AI Providers" xml:space="preserve">
|
||||||
|
<value>AI Providers</value>
|
||||||
|
</data>
|
||||||
|
<data name="Count" xml:space="preserve">
|
||||||
|
<value>Count</value>
|
||||||
|
</data>
|
||||||
|
<data name="Total Entities" xml:space="preserve">
|
||||||
|
<value>Total Entities</value>
|
||||||
|
</data>
|
||||||
|
<data name="Total query cache utilization" xml:space="preserve">
|
||||||
|
<value>Total query cache utilization</value>
|
||||||
|
</data>
|
||||||
|
<data name="Unable to fetch searchdomain database utilization" xml:space="preserve">
|
||||||
|
<value>Unable to fetch searchdomain database utilization</value>
|
||||||
|
</data>
|
||||||
|
<data name="Query cache entry count" xml:space="preserve">
|
||||||
|
<value>Query cache entry count</value>
|
||||||
|
</data>
|
||||||
|
<data name="Query cache capacity (all)" xml:space="preserve">
|
||||||
|
<value>Query cache capacity (all)</value>
|
||||||
|
</data>
|
||||||
|
<data name="queryCacheEntryCountAllInfo" xml:space="preserve">
|
||||||
|
<value>Number of query cache entries that can be stored in the query cache, including searchdomains that are currently not loaded.</value>
|
||||||
|
</data>
|
||||||
|
<data name="Query cache capacity (loaded)" xml:space="preserve">
|
||||||
|
<value>Query cache capacity (loaded)</value>
|
||||||
|
</data>
|
||||||
|
<data name="queryCacheEntryCountLoadedInfo" xml:space="preserve">
|
||||||
|
<value>Number of query cache entries that can be stored in the query cache of all loaded searchdomains.</value>
|
||||||
|
</data>
|
||||||
|
<data name="Query cache size" xml:space="preserve">
|
||||||
|
<value>Query Cache size</value>
|
||||||
|
</data>
|
||||||
|
<data name="Embeddings parallel prefetching" xml:space="preserve">
|
||||||
|
<value>Embeddings parallel prefetching</value>
|
||||||
|
</data>
|
||||||
|
<data name="parallelEmbeddingsPrefetchInfo" xml:space="preserve">
|
||||||
|
<value>With this setting activated the embeddings retrieval will be parallelized when indexing entities. Disable this setting if model unloading is an issue.</value>
|
||||||
|
</data>
|
||||||
|
<data name="Add result" xml:space="preserve">
|
||||||
|
<value>Add result</value>
|
||||||
|
</data>
|
||||||
|
<data name="Search query was updated successfully" xml:space="preserve">
|
||||||
|
<value>Search query was updated successfully</value>
|
||||||
|
</data>
|
||||||
|
<data name="Total RAM usage" xml:space="preserve">
|
||||||
|
<value>Total RAM usage</value>
|
||||||
|
</data>
|
||||||
|
<data name="Total Database size" xml:space="preserve">
|
||||||
|
<value>Total Database size</value>
|
||||||
|
</data>
|
||||||
</root>
|
</root>
|
||||||
@@ -4,8 +4,10 @@ using System.Text.Json;
|
|||||||
using ElmahCore.Mvc.Logger;
|
using ElmahCore.Mvc.Logger;
|
||||||
using MySql.Data.MySqlClient;
|
using MySql.Data.MySqlClient;
|
||||||
using Server.Helper;
|
using Server.Helper;
|
||||||
|
using Shared;
|
||||||
using Shared.Models;
|
using Shared.Models;
|
||||||
using AdaptiveExpressions;
|
using AdaptiveExpressions;
|
||||||
|
using System.Collections.Concurrent;
|
||||||
|
|
||||||
namespace Server;
|
namespace Server;
|
||||||
|
|
||||||
@@ -13,36 +15,33 @@ public class Searchdomain
|
|||||||
{
|
{
|
||||||
private readonly string _connectionString;
|
private readonly string _connectionString;
|
||||||
private readonly string _provider;
|
private readonly string _provider;
|
||||||
public AIProvider aIProvider;
|
public AIProvider AiProvider;
|
||||||
public string searchdomain;
|
public string SearchdomainName;
|
||||||
public int id;
|
public int Id;
|
||||||
public SearchdomainSettings settings;
|
public SearchdomainSettings Settings;
|
||||||
public Dictionary<string, DateTimedSearchResult> searchCache; // Key: query, Value: Search results for that query (with timestamp)
|
public EnumerableLruCache<string, DateTimedSearchResult> QueryCache; // Key: query, Value: Search results for that query (with timestamp)
|
||||||
public List<Entity> entityCache;
|
public ConcurrentDictionary<string, Entity> EntityCache;
|
||||||
public List<string> modelsInUse;
|
public ConcurrentBag<string> ModelsInUse;
|
||||||
public LRUCache<string, Dictionary<string, float[]>> embeddingCache;
|
public EnumerableLruCache<string, Dictionary<string, float[]>> EmbeddingCache;
|
||||||
private readonly MySqlConnection connection;
|
public SQLHelper Helper;
|
||||||
public SQLHelper helper;
|
|
||||||
private readonly ILogger _logger;
|
private readonly ILogger _logger;
|
||||||
|
|
||||||
public Searchdomain(string searchdomain, string connectionString, AIProvider aIProvider, LRUCache<string, Dictionary<string, float[]>> embeddingCache, ILogger logger, string provider = "sqlserver", bool runEmpty = false)
|
public Searchdomain(string searchdomain, string connectionString, SQLHelper sqlHelper, AIProvider aIProvider, EnumerableLruCache<string, Dictionary<string, float[]>> embeddingCache, ILogger logger, string provider = "sqlserver", bool runEmpty = false)
|
||||||
{
|
{
|
||||||
_connectionString = connectionString;
|
_connectionString = connectionString;
|
||||||
_provider = provider.ToLower();
|
_provider = provider.ToLower();
|
||||||
this.searchdomain = searchdomain;
|
this.SearchdomainName = searchdomain;
|
||||||
this.aIProvider = aIProvider;
|
this.AiProvider = aIProvider;
|
||||||
this.embeddingCache = embeddingCache;
|
this.EmbeddingCache = embeddingCache;
|
||||||
this._logger = logger;
|
this._logger = logger;
|
||||||
searchCache = [];
|
EntityCache = [];
|
||||||
entityCache = [];
|
Helper = sqlHelper;
|
||||||
connection = new MySqlConnection(connectionString);
|
Settings = GetSettings();
|
||||||
connection.Open();
|
QueryCache = new(Settings.QueryCacheSize);
|
||||||
helper = new SQLHelper(connection, connectionString);
|
ModelsInUse = []; // To make the compiler shut up - it is set in UpdateSearchDomain() don't worry // yeah, about that...
|
||||||
settings = GetSettings();
|
|
||||||
modelsInUse = []; // To make the compiler shut up - it is set in UpdateSearchDomain() don't worry // yeah, about that...
|
|
||||||
if (!runEmpty)
|
if (!runEmpty)
|
||||||
{
|
{
|
||||||
GetID();
|
Id = GetID().Result;
|
||||||
UpdateEntityCache();
|
UpdateEntityCache();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -52,118 +51,138 @@ public class Searchdomain
|
|||||||
InvalidateSearchCache();
|
InvalidateSearchCache();
|
||||||
Dictionary<string, dynamic> parametersIDSearchdomain = new()
|
Dictionary<string, dynamic> parametersIDSearchdomain = new()
|
||||||
{
|
{
|
||||||
["id"] = this.id
|
["id"] = this.Id
|
||||||
};
|
};
|
||||||
DbDataReader embeddingReader = helper.ExecuteSQLCommand("SELECT e.id, e.id_datapoint, e.model, e.embedding FROM embedding e JOIN datapoint d ON e.id_datapoint = d.id JOIN entity ent ON d.id_entity = ent.id JOIN searchdomain s ON ent.id_searchdomain = s.id WHERE s.id = @id", parametersIDSearchdomain);
|
DbDataReader embeddingReader = Helper.ExecuteSQLCommand("SELECT id, id_datapoint, model, embedding FROM embedding WHERE id_searchdomain = @id", parametersIDSearchdomain);
|
||||||
Dictionary<int, Dictionary<string, float[]>> embedding_unassigned = [];
|
Dictionary<int, Dictionary<string, float[]>> embedding_unassigned = [];
|
||||||
while (embeddingReader.Read())
|
try
|
||||||
{
|
{
|
||||||
int? id_datapoint_debug = null;
|
while (embeddingReader.Read())
|
||||||
try
|
|
||||||
{
|
{
|
||||||
int id_datapoint = embeddingReader.GetInt32(1);
|
int? id_datapoint_debug = null;
|
||||||
id_datapoint_debug = id_datapoint;
|
try
|
||||||
string model = embeddingReader.GetString(2);
|
|
||||||
long length = embeddingReader.GetBytes(3, 0, null, 0, 0);
|
|
||||||
byte[] embedding = new byte[length];
|
|
||||||
embeddingReader.GetBytes(3, 0, embedding, 0, (int) length);
|
|
||||||
if (embedding_unassigned.TryGetValue(id_datapoint, out Dictionary<string, float[]>? embedding_unassigned_id_datapoint))
|
|
||||||
{
|
{
|
||||||
embedding_unassigned[id_datapoint][model] = SearchdomainHelper.FloatArrayFromBytes(embedding);
|
int id_datapoint = embeddingReader.GetInt32(1);
|
||||||
}
|
id_datapoint_debug = id_datapoint;
|
||||||
else
|
string model = embeddingReader.GetString(2);
|
||||||
{
|
long length = embeddingReader.GetBytes(3, 0, null, 0, 0);
|
||||||
embedding_unassigned[id_datapoint] = new()
|
byte[] embedding = new byte[length];
|
||||||
|
embeddingReader.GetBytes(3, 0, embedding, 0, (int) length);
|
||||||
|
if (embedding_unassigned.TryGetValue(id_datapoint, out Dictionary<string, float[]>? embedding_unassigned_id_datapoint))
|
||||||
{
|
{
|
||||||
[model] = SearchdomainHelper.FloatArrayFromBytes(embedding)
|
embedding_unassigned[id_datapoint][model] = SearchdomainHelper.FloatArrayFromBytes(embedding);
|
||||||
};
|
}
|
||||||
}
|
else
|
||||||
} catch (Exception e)
|
{
|
||||||
{
|
embedding_unassigned[id_datapoint] = new()
|
||||||
_logger.LogError("Error reading embedding (id: {id_datapoint}) from database: {e.Message} - {e.StackTrace}", [id_datapoint_debug, e.Message, e.StackTrace]);
|
{
|
||||||
ElmahCore.ElmahExtensions.RaiseError(e);
|
[model] = SearchdomainHelper.FloatArrayFromBytes(embedding)
|
||||||
}
|
};
|
||||||
}
|
}
|
||||||
embeddingReader.Close();
|
} catch (Exception e)
|
||||||
|
|
||||||
DbDataReader datapointReader = helper.ExecuteSQLCommand("SELECT d.id, d.id_entity, d.name, d.probmethod_embedding, d.similaritymethod, d.hash FROM datapoint d JOIN entity ent ON d.id_entity = ent.id JOIN searchdomain s ON ent.id_searchdomain = s.id WHERE s.id = @id", parametersIDSearchdomain);
|
|
||||||
Dictionary<int, List<Datapoint>> datapoint_unassigned = [];
|
|
||||||
while (datapointReader.Read())
|
|
||||||
{
|
|
||||||
int id = datapointReader.GetInt32(0);
|
|
||||||
int id_entity = datapointReader.GetInt32(1);
|
|
||||||
string name = datapointReader.GetString(2);
|
|
||||||
string probmethodString = datapointReader.GetString(3);
|
|
||||||
string similarityMethodString = datapointReader.GetString(4);
|
|
||||||
string hash = datapointReader.GetString(5);
|
|
||||||
ProbMethodEnum probmethodEnum = (ProbMethodEnum)Enum.Parse(
|
|
||||||
typeof(ProbMethodEnum),
|
|
||||||
probmethodString
|
|
||||||
);
|
|
||||||
SimilarityMethodEnum similairtyMethodEnum = (SimilarityMethodEnum)Enum.Parse(
|
|
||||||
typeof(SimilarityMethodEnum),
|
|
||||||
similarityMethodString
|
|
||||||
);
|
|
||||||
ProbMethod probmethod = new(probmethodEnum, _logger);
|
|
||||||
SimilarityMethod similarityMethod = new(similairtyMethodEnum, _logger);
|
|
||||||
if (embedding_unassigned.TryGetValue(id, out Dictionary<string, float[]>? embeddings) && probmethod is not null)
|
|
||||||
{
|
|
||||||
embedding_unassigned.Remove(id);
|
|
||||||
if (!datapoint_unassigned.ContainsKey(id_entity))
|
|
||||||
{
|
{
|
||||||
datapoint_unassigned[id_entity] = [];
|
_logger.LogError("Error reading embedding (id: {id_datapoint}) from database: {e.Message} - {e.StackTrace}", [id_datapoint_debug, e.Message, e.StackTrace]);
|
||||||
|
ElmahCore.ElmahExtensions.RaiseError(e);
|
||||||
}
|
}
|
||||||
datapoint_unassigned[id_entity].Add(new Datapoint(name, probmethod, similarityMethod, hash, [.. embeddings.Select(kv => (kv.Key, kv.Value))]));
|
|
||||||
}
|
}
|
||||||
|
} finally
|
||||||
|
{
|
||||||
|
embeddingReader.Close();
|
||||||
}
|
}
|
||||||
datapointReader.Close();
|
|
||||||
|
|
||||||
DbDataReader attributeReader = helper.ExecuteSQLCommand("SELECT a.id, a.id_entity, a.attribute, a.value FROM attribute a JOIN entity ent ON a.id_entity = ent.id JOIN searchdomain s ON ent.id_searchdomain = s.id WHERE s.id = @id", parametersIDSearchdomain);
|
DbDataReader datapointReader = Helper.ExecuteSQLCommand("SELECT d.id, d.id_entity, d.name, d.probmethod_embedding, d.similaritymethod, d.hash FROM datapoint d JOIN entity ent ON d.id_entity = ent.id JOIN searchdomain s ON ent.id_searchdomain = s.id WHERE s.id = @id", parametersIDSearchdomain);
|
||||||
|
Dictionary<int, ConcurrentBag<Datapoint>> datapoint_unassigned = [];
|
||||||
|
try
|
||||||
|
{
|
||||||
|
while (datapointReader.Read())
|
||||||
|
{
|
||||||
|
int id = datapointReader.GetInt32(0);
|
||||||
|
int id_entity = datapointReader.GetInt32(1);
|
||||||
|
string name = datapointReader.GetString(2);
|
||||||
|
string probmethodString = datapointReader.GetString(3);
|
||||||
|
string similarityMethodString = datapointReader.GetString(4);
|
||||||
|
string hash = datapointReader.GetString(5);
|
||||||
|
ProbMethodEnum probmethodEnum = (ProbMethodEnum)Enum.Parse(
|
||||||
|
typeof(ProbMethodEnum),
|
||||||
|
probmethodString
|
||||||
|
);
|
||||||
|
SimilarityMethodEnum similairtyMethodEnum = (SimilarityMethodEnum)Enum.Parse(
|
||||||
|
typeof(SimilarityMethodEnum),
|
||||||
|
similarityMethodString
|
||||||
|
);
|
||||||
|
ProbMethod probmethod = new(probmethodEnum);
|
||||||
|
SimilarityMethod similarityMethod = new(similairtyMethodEnum);
|
||||||
|
if (embedding_unassigned.TryGetValue(id, out Dictionary<string, float[]>? embeddings) && probmethod is not null)
|
||||||
|
{
|
||||||
|
embedding_unassigned.Remove(id);
|
||||||
|
if (!datapoint_unassigned.ContainsKey(id_entity))
|
||||||
|
{
|
||||||
|
datapoint_unassigned[id_entity] = [];
|
||||||
|
}
|
||||||
|
datapoint_unassigned[id_entity].Add(new Datapoint(name, probmethod, similarityMethod, hash, [.. embeddings.Select(kv => (kv.Key, kv.Value))], id));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} finally
|
||||||
|
{
|
||||||
|
datapointReader.Close();
|
||||||
|
}
|
||||||
|
|
||||||
|
DbDataReader attributeReader = Helper.ExecuteSQLCommand("SELECT a.id, a.id_entity, a.attribute, a.value FROM attribute a JOIN entity ent ON a.id_entity = ent.id JOIN searchdomain s ON ent.id_searchdomain = s.id WHERE s.id = @id", parametersIDSearchdomain);
|
||||||
Dictionary<int, Dictionary<string, string>> attributes_unassigned = [];
|
Dictionary<int, Dictionary<string, string>> attributes_unassigned = [];
|
||||||
while (attributeReader.Read())
|
try
|
||||||
{
|
{
|
||||||
//"SELECT id, id_entity, attribute, value FROM attribute JOIN entity on attribute.id_entity as en JOIN searchdomain on en.id_searchdomain as sd WHERE sd=@id"
|
while (attributeReader.Read())
|
||||||
int id = attributeReader.GetInt32(0);
|
|
||||||
int id_entity = attributeReader.GetInt32(1);
|
|
||||||
string attribute = attributeReader.GetString(2);
|
|
||||||
string value = attributeReader.GetString(3);
|
|
||||||
if (!attributes_unassigned.ContainsKey(id_entity))
|
|
||||||
{
|
{
|
||||||
attributes_unassigned[id_entity] = [];
|
//"SELECT id, id_entity, attribute, value FROM attribute JOIN entity on attribute.id_entity as en JOIN searchdomain on en.id_searchdomain as sd WHERE sd=@id"
|
||||||
}
|
int id = attributeReader.GetInt32(0);
|
||||||
attributes_unassigned[id_entity].Add(attribute, value);
|
int id_entity = attributeReader.GetInt32(1);
|
||||||
}
|
string attribute = attributeReader.GetString(2);
|
||||||
attributeReader.Close();
|
string value = attributeReader.GetString(3);
|
||||||
|
if (!attributes_unassigned.ContainsKey(id_entity))
|
||||||
entityCache = [];
|
|
||||||
DbDataReader entityReader = helper.ExecuteSQLCommand("SELECT entity.id, name, probmethod FROM entity WHERE id_searchdomain=@id", parametersIDSearchdomain);
|
|
||||||
while (entityReader.Read())
|
|
||||||
{
|
|
||||||
//SELECT id, name, probmethod FROM entity WHERE id_searchdomain=@id
|
|
||||||
int id = entityReader.GetInt32(0);
|
|
||||||
string name = entityReader.GetString(1);
|
|
||||||
string probmethodString = entityReader.GetString(2);
|
|
||||||
if (!attributes_unassigned.TryGetValue(id, out Dictionary<string, string>? attributes))
|
|
||||||
{
|
|
||||||
attributes = [];
|
|
||||||
}
|
|
||||||
Probmethods.probMethodDelegate? probmethod = Probmethods.GetMethod(probmethodString);
|
|
||||||
if (datapoint_unassigned.TryGetValue(id, out List<Datapoint>? datapoints) && probmethod is not null)
|
|
||||||
{
|
|
||||||
Entity entity = new(attributes, probmethod, probmethodString, datapoints, name)
|
|
||||||
{
|
{
|
||||||
id = id
|
attributes_unassigned[id_entity] = [];
|
||||||
};
|
}
|
||||||
entityCache.Add(entity);
|
attributes_unassigned[id_entity].Add(attribute, value);
|
||||||
}
|
}
|
||||||
|
} finally
|
||||||
|
{
|
||||||
|
attributeReader.Close();
|
||||||
}
|
}
|
||||||
entityReader.Close();
|
|
||||||
modelsInUse = GetModels(entityCache);
|
EntityCache = [];
|
||||||
|
DbDataReader entityReader = Helper.ExecuteSQLCommand("SELECT entity.id, name, probmethod FROM entity WHERE id_searchdomain=@id", parametersIDSearchdomain);
|
||||||
|
try
|
||||||
|
{
|
||||||
|
while (entityReader.Read())
|
||||||
|
{
|
||||||
|
//SELECT id, name, probmethod FROM entity WHERE id_searchdomain=@id
|
||||||
|
int id = entityReader.GetInt32(0);
|
||||||
|
string name = entityReader.GetString(1);
|
||||||
|
string probmethodString = entityReader.GetString(2);
|
||||||
|
if (!attributes_unassigned.TryGetValue(id, out Dictionary<string, string>? attributes))
|
||||||
|
{
|
||||||
|
attributes = [];
|
||||||
|
}
|
||||||
|
Probmethods.ProbMethodDelegate? probmethod = Probmethods.GetMethod(probmethodString);
|
||||||
|
if (datapoint_unassigned.TryGetValue(id, out ConcurrentBag<Datapoint>? datapoints) && probmethod is not null)
|
||||||
|
{
|
||||||
|
Entity entity = new(attributes, probmethod, probmethodString, datapoints, name, SearchdomainName)
|
||||||
|
{
|
||||||
|
Id = id
|
||||||
|
};
|
||||||
|
EntityCache[name] = entity;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} finally
|
||||||
|
{
|
||||||
|
entityReader.Close();
|
||||||
|
}
|
||||||
|
ModelsInUse = GetModels(EntityCache);
|
||||||
}
|
}
|
||||||
|
|
||||||
public List<(float, string)> Search(string query, int? topN = null)
|
public List<(float, string)> Search(string query, int? topN = null)
|
||||||
{
|
{
|
||||||
if (searchCache.TryGetValue(query, out DateTimedSearchResult cachedResult))
|
if (QueryCache.TryGetValue(query, out DateTimedSearchResult cachedResult))
|
||||||
{
|
{
|
||||||
cachedResult.AccessDateTimes.Add(DateTime.Now);
|
cachedResult.AccessDateTimes.Add(DateTime.Now);
|
||||||
return [.. cachedResult.Results.Select(r => (r.Score, r.Name))];
|
return [.. cachedResult.Results.Select(r => (r.Score, r.Name))];
|
||||||
@@ -172,10 +191,9 @@ public class Searchdomain
|
|||||||
Dictionary<string, float[]> queryEmbeddings = GetQueryEmbeddings(query);
|
Dictionary<string, float[]> queryEmbeddings = GetQueryEmbeddings(query);
|
||||||
|
|
||||||
List<(float, string)> result = [];
|
List<(float, string)> result = [];
|
||||||
|
foreach ((string name, Entity entity) in EntityCache)
|
||||||
foreach (Entity entity in entityCache)
|
|
||||||
{
|
{
|
||||||
result.Add((EvaluateEntityAgainstQueryEmbeddings(entity, queryEmbeddings), entity.name));
|
result.Add((EvaluateEntityAgainstQueryEmbeddings(entity, queryEmbeddings), entity.Name));
|
||||||
}
|
}
|
||||||
IEnumerable<(float, string)> sortedResults = result.OrderByDescending(s => s.Item1);
|
IEnumerable<(float, string)> sortedResults = result.OrderByDescending(s => s.Item1);
|
||||||
if (topN is not null)
|
if (topN is not null)
|
||||||
@@ -187,26 +205,26 @@ public class Searchdomain
|
|||||||
[.. sortedResults.Select(r =>
|
[.. sortedResults.Select(r =>
|
||||||
new ResultItem(r.Item1, r.Item2 ))]
|
new ResultItem(r.Item1, r.Item2 ))]
|
||||||
);
|
);
|
||||||
searchCache[query] = new DateTimedSearchResult(DateTime.Now, searchResult);
|
QueryCache.Set(query, new DateTimedSearchResult(DateTime.Now, searchResult));
|
||||||
return results;
|
return results;
|
||||||
}
|
}
|
||||||
|
|
||||||
public Dictionary<string, float[]> GetQueryEmbeddings(string query)
|
public Dictionary<string, float[]> GetQueryEmbeddings(string query)
|
||||||
{
|
{
|
||||||
bool hasQuery = embeddingCache.TryGet(query, out Dictionary<string, float[]> queryEmbeddings);
|
bool hasQuery = EmbeddingCache.TryGetValue(query, out Dictionary<string, float[]>? queryEmbeddings);
|
||||||
bool allModelsInQuery = queryEmbeddings is not null && modelsInUse.All(model => queryEmbeddings.ContainsKey(model));
|
bool allModelsInQuery = queryEmbeddings is not null && ModelsInUse.All(model => queryEmbeddings.ContainsKey(model));
|
||||||
if (!(hasQuery && allModelsInQuery) || queryEmbeddings is null)
|
if (!(hasQuery && allModelsInQuery) || queryEmbeddings is null)
|
||||||
{
|
{
|
||||||
queryEmbeddings = Datapoint.GenerateEmbeddings(query, modelsInUse, aIProvider, embeddingCache);
|
queryEmbeddings = Datapoint.GetEmbeddings(query, ModelsInUse, AiProvider, EmbeddingCache);
|
||||||
if (!embeddingCache.TryGet(query, out var embeddingCacheForCurrentQuery))
|
if (!EmbeddingCache.TryGetValue(query, out var embeddingCacheForCurrentQuery))
|
||||||
{
|
{
|
||||||
embeddingCache.Set(query, queryEmbeddings);
|
EmbeddingCache.Set(query, queryEmbeddings);
|
||||||
}
|
}
|
||||||
else // embeddingCache already has an entry for this query, so the missing model-embedding pairs have to be filled in
|
else // embeddingCache already has an entry for this query, so the missing model-embedding pairs have to be filled in
|
||||||
{
|
{
|
||||||
foreach (KeyValuePair<string, float[]> kvp in queryEmbeddings) // kvp.Key = model, kvp.Value = embedding
|
foreach (KeyValuePair<string, float[]> kvp in queryEmbeddings) // kvp.Key = model, kvp.Value = embedding
|
||||||
{
|
{
|
||||||
if (!embeddingCache.TryGet(kvp.Key, out var _))
|
if (!EmbeddingCache.TryGetValue(kvp.Key, out var _))
|
||||||
{
|
{
|
||||||
embeddingCacheForCurrentQuery[kvp.Key] = kvp.Value;
|
embeddingCacheForCurrentQuery[kvp.Key] = kvp.Value;
|
||||||
}
|
}
|
||||||
@@ -218,37 +236,38 @@ public class Searchdomain
|
|||||||
|
|
||||||
public void UpdateModelsInUse()
|
public void UpdateModelsInUse()
|
||||||
{
|
{
|
||||||
modelsInUse = GetModels([.. entityCache]);
|
ModelsInUse = GetModels(EntityCache);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static float EvaluateEntityAgainstQueryEmbeddings(Entity entity, Dictionary<string, float[]> queryEmbeddings)
|
private static float EvaluateEntityAgainstQueryEmbeddings(Entity entity, Dictionary<string, float[]> queryEmbeddings)
|
||||||
{
|
{
|
||||||
List<(string, float)> datapointProbs = [];
|
List<(string, float)> datapointProbs = [];
|
||||||
foreach (Datapoint datapoint in entity.datapoints)
|
foreach (Datapoint datapoint in entity.Datapoints)
|
||||||
{
|
{
|
||||||
SimilarityMethod similarityMethod = datapoint.similarityMethod;
|
SimilarityMethod similarityMethod = datapoint.SimilarityMethod;
|
||||||
List<(string, float)> list = [];
|
List<(string, float)> list = [];
|
||||||
foreach ((string, float[]) embedding in datapoint.embeddings)
|
foreach ((string, float[]) embedding in datapoint.Embeddings)
|
||||||
{
|
{
|
||||||
string key = embedding.Item1;
|
string key = embedding.Item1;
|
||||||
float value = similarityMethod.method(queryEmbeddings[embedding.Item1], embedding.Item2);
|
float value = similarityMethod.Method(queryEmbeddings[embedding.Item1], embedding.Item2);
|
||||||
list.Add((key, value));
|
list.Add((key, value));
|
||||||
}
|
}
|
||||||
datapointProbs.Add((datapoint.name, datapoint.probMethod.method(list)));
|
datapointProbs.Add((datapoint.Name, datapoint.ProbMethod.Method(list)));
|
||||||
}
|
}
|
||||||
return entity.probMethod(datapointProbs);
|
return entity.ProbMethod(datapointProbs);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static List<string> GetModels(List<Entity> entities)
|
public static ConcurrentBag<string> GetModels(ConcurrentDictionary<string, Entity> entities)
|
||||||
{
|
{
|
||||||
List<string> result = [];
|
ConcurrentBag<string> result = [];
|
||||||
lock (entities)
|
foreach (KeyValuePair<string, Entity> element in entities)
|
||||||
{
|
{
|
||||||
foreach (Entity entity in entities)
|
Entity entity = element.Value;
|
||||||
|
lock (entity)
|
||||||
{
|
{
|
||||||
foreach (Datapoint datapoint in entity.datapoints)
|
foreach (Datapoint datapoint in entity.Datapoints)
|
||||||
{
|
{
|
||||||
foreach ((string, float[]) tuple in datapoint.embeddings)
|
foreach ((string, float[]) tuple in datapoint.Embeddings)
|
||||||
{
|
{
|
||||||
string model = tuple.Item1;
|
string model = tuple.Item1;
|
||||||
if (!result.Contains(model))
|
if (!result.Contains(model))
|
||||||
@@ -262,37 +281,25 @@ public class Searchdomain
|
|||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
public int GetID()
|
public async Task<int> GetID()
|
||||||
{
|
{
|
||||||
Dictionary<string, dynamic> parameters = new()
|
Dictionary<string, object?> parameters = new()
|
||||||
{
|
{
|
||||||
["name"] = this.searchdomain
|
{ "name", this.SearchdomainName }
|
||||||
};
|
};
|
||||||
DbDataReader reader = helper.ExecuteSQLCommand("SELECT id from searchdomain WHERE name = @name", parameters);
|
return (await Helper.ExecuteQueryAsync("SELECT id from searchdomain WHERE name = @name", parameters, x => x.GetInt32(0))).First();
|
||||||
reader.Read();
|
|
||||||
this.id = reader.GetInt32(0);
|
|
||||||
reader.Close();
|
|
||||||
return this.id;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public SearchdomainSettings GetSettings()
|
public SearchdomainSettings GetSettings()
|
||||||
{
|
{
|
||||||
Dictionary<string, dynamic> parameters = new()
|
return DatabaseHelper.GetSearchdomainSettings(Helper, SearchdomainName);
|
||||||
{
|
|
||||||
["name"] = searchdomain
|
|
||||||
};
|
|
||||||
DbDataReader reader = helper.ExecuteSQLCommand("SELECT settings from searchdomain WHERE name = @name", parameters);
|
|
||||||
reader.Read();
|
|
||||||
string settingsString = reader.GetString(0);
|
|
||||||
reader.Close();
|
|
||||||
return JsonSerializer.Deserialize<SearchdomainSettings>(settingsString);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void ReconciliateOrInvalidateCacheForNewOrUpdatedEntity(Entity entity)
|
public void ReconciliateOrInvalidateCacheForNewOrUpdatedEntity(Entity entity)
|
||||||
{
|
{
|
||||||
if (settings.CacheReconciliation)
|
if (Settings.CacheReconciliation)
|
||||||
{
|
{
|
||||||
foreach (KeyValuePair<string, DateTimedSearchResult> element in searchCache)
|
foreach (var element in QueryCache)
|
||||||
{
|
{
|
||||||
string query = element.Key;
|
string query = element.Key;
|
||||||
DateTimedSearchResult searchResult = element.Value;
|
DateTimedSearchResult searchResult = element.Value;
|
||||||
@@ -300,9 +307,9 @@ public class Searchdomain
|
|||||||
Dictionary<string, float[]> queryEmbeddings = GetQueryEmbeddings(query);
|
Dictionary<string, float[]> queryEmbeddings = GetQueryEmbeddings(query);
|
||||||
float evaluationResult = EvaluateEntityAgainstQueryEmbeddings(entity, queryEmbeddings);
|
float evaluationResult = EvaluateEntityAgainstQueryEmbeddings(entity, queryEmbeddings);
|
||||||
|
|
||||||
searchResult.Results.RemoveAll(x => x.Name == entity.name); // If entity already exists in that results list: remove it.
|
searchResult.Results.RemoveAll(x => x.Name == entity.Name); // If entity already exists in that results list: remove it.
|
||||||
|
|
||||||
ResultItem newItem = new(evaluationResult, entity.name);
|
ResultItem newItem = new(evaluationResult, entity.Name);
|
||||||
int index = searchResult.Results.BinarySearch(
|
int index = searchResult.Results.BinarySearch(
|
||||||
newItem,
|
newItem,
|
||||||
Comparer<ResultItem>.Create((a, b) => b.Score.CompareTo(a.Score)) // Invert searching order
|
Comparer<ResultItem>.Create((a, b) => b.Score.CompareTo(a.Score)) // Invert searching order
|
||||||
@@ -320,13 +327,13 @@ public class Searchdomain
|
|||||||
|
|
||||||
public void ReconciliateOrInvalidateCacheForDeletedEntity(Entity entity)
|
public void ReconciliateOrInvalidateCacheForDeletedEntity(Entity entity)
|
||||||
{
|
{
|
||||||
if (settings.CacheReconciliation)
|
if (Settings.CacheReconciliation)
|
||||||
{
|
{
|
||||||
foreach (KeyValuePair<string, DateTimedSearchResult> element in searchCache)
|
foreach (KeyValuePair<string, DateTimedSearchResult> element in QueryCache)
|
||||||
{
|
{
|
||||||
string query = element.Key;
|
string query = element.Key;
|
||||||
DateTimedSearchResult searchResult = element.Value;
|
DateTimedSearchResult searchResult = element.Value;
|
||||||
searchResult.Results.RemoveAll(x => x.Name == entity.name);
|
searchResult.Results.RemoveAll(x => x.Name == entity.Name);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
@@ -337,6 +344,18 @@ public class Searchdomain
|
|||||||
|
|
||||||
public void InvalidateSearchCache()
|
public void InvalidateSearchCache()
|
||||||
{
|
{
|
||||||
searchCache = [];
|
QueryCache = new(Settings.QueryCacheSize);
|
||||||
|
}
|
||||||
|
|
||||||
|
public long GetSearchCacheSize()
|
||||||
|
{
|
||||||
|
long EmbeddingCacheUtilization = 0;
|
||||||
|
foreach (var entry in QueryCache)
|
||||||
|
{
|
||||||
|
EmbeddingCacheUtilization += sizeof(int); // string length prefix
|
||||||
|
EmbeddingCacheUtilization += entry.Key.Length * sizeof(char); // string characters
|
||||||
|
EmbeddingCacheUtilization += entry.Value.EstimateSize();
|
||||||
|
}
|
||||||
|
return EmbeddingCacheUtilization;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,54 +6,59 @@ using Server.Exceptions;
|
|||||||
using AdaptiveExpressions;
|
using AdaptiveExpressions;
|
||||||
using Shared.Models;
|
using Shared.Models;
|
||||||
using System.Text.Json;
|
using System.Text.Json;
|
||||||
|
using Microsoft.Extensions.Options;
|
||||||
|
using Server.Models;
|
||||||
|
using Shared;
|
||||||
|
using System.Diagnostics;
|
||||||
|
|
||||||
namespace Server;
|
namespace Server;
|
||||||
|
|
||||||
public class SearchdomainManager
|
public class SearchdomainManager : IDisposable
|
||||||
{
|
{
|
||||||
private Dictionary<string, Searchdomain> searchdomains = [];
|
private Dictionary<string, Searchdomain> _searchdomains = [];
|
||||||
private readonly ILogger<SearchdomainManager> _logger;
|
private readonly ILogger<SearchdomainManager> _logger;
|
||||||
private readonly IConfiguration _config;
|
private readonly EmbeddingSearchOptions _options;
|
||||||
public readonly AIProvider aIProvider;
|
public readonly AIProvider AiProvider;
|
||||||
private readonly DatabaseHelper _databaseHelper;
|
private readonly DatabaseHelper _databaseHelper;
|
||||||
private readonly string connectionString;
|
private readonly string connectionString;
|
||||||
private MySqlConnection connection;
|
private MySqlConnection _connection;
|
||||||
public SQLHelper helper;
|
public SQLHelper Helper;
|
||||||
public LRUCache<string, Dictionary<string, float[]>> embeddingCache;
|
public EnumerableLruCache<string, Dictionary<string, float[]>> EmbeddingCache;
|
||||||
public int EmbeddingCacheMaxCount;
|
public long EmbeddingCacheMaxCount;
|
||||||
|
private bool _disposed = false;
|
||||||
|
|
||||||
public SearchdomainManager(ILogger<SearchdomainManager> logger, IConfiguration config, AIProvider aIProvider, DatabaseHelper databaseHelper)
|
public SearchdomainManager(ILogger<SearchdomainManager> logger, IOptions<EmbeddingSearchOptions> options, AIProvider aIProvider, DatabaseHelper databaseHelper)
|
||||||
{
|
{
|
||||||
_logger = logger;
|
_logger = logger;
|
||||||
_config = config;
|
_options = options.Value;
|
||||||
this.aIProvider = aIProvider;
|
this.AiProvider = aIProvider;
|
||||||
_databaseHelper = databaseHelper;
|
_databaseHelper = databaseHelper;
|
||||||
EmbeddingCacheMaxCount = config.GetValue<int?>("Embeddingsearch:EmbeddingCacheMaxCount") ?? 1000000;
|
EmbeddingCacheMaxCount = _options.Cache.CacheTopN;
|
||||||
embeddingCache = new(EmbeddingCacheMaxCount);
|
if (options.Value.Cache.StoreEmbeddingCache)
|
||||||
connectionString = _config.GetSection("Embeddingsearch").GetConnectionString("SQL") ?? "";
|
|
||||||
connection = new MySqlConnection(connectionString);
|
|
||||||
connection.Open();
|
|
||||||
helper = new SQLHelper(connection, connectionString);
|
|
||||||
try
|
|
||||||
{
|
{
|
||||||
DatabaseMigrations.Migrate(helper);
|
var stopwatch = Stopwatch.StartNew();
|
||||||
}
|
EmbeddingCache = CacheHelper.GetEmbeddingStore(options.Value);
|
||||||
catch (Exception ex)
|
stopwatch.Stop();
|
||||||
|
_logger.LogInformation("GetEmbeddingStore completed in {ElapsedMilliseconds} ms", stopwatch.ElapsedMilliseconds);
|
||||||
|
} else
|
||||||
{
|
{
|
||||||
_logger.LogCritical("Unable to migrate the database due to the exception: {ex}", [ex.Message]);
|
EmbeddingCache = new((int)EmbeddingCacheMaxCount);
|
||||||
throw;
|
|
||||||
}
|
}
|
||||||
|
connectionString = _options.ConnectionStrings.SQL;
|
||||||
|
_connection = new MySqlConnection(connectionString);
|
||||||
|
_connection.Open();
|
||||||
|
Helper = new SQLHelper(_connection, connectionString);
|
||||||
}
|
}
|
||||||
|
|
||||||
public Searchdomain GetSearchdomain(string searchdomain)
|
public Searchdomain GetSearchdomain(string searchdomain)
|
||||||
{
|
{
|
||||||
if (searchdomains.TryGetValue(searchdomain, out Searchdomain? value))
|
if (_searchdomains.TryGetValue(searchdomain, out Searchdomain? value))
|
||||||
{
|
{
|
||||||
return value;
|
return value;
|
||||||
}
|
}
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
return SetSearchdomain(searchdomain, new Searchdomain(searchdomain, connectionString, aIProvider, embeddingCache, _logger));
|
return SetSearchdomain(searchdomain, new Searchdomain(searchdomain, connectionString, Helper, AiProvider, EmbeddingCache, _logger));
|
||||||
}
|
}
|
||||||
catch (MySqlException)
|
catch (MySqlException)
|
||||||
{
|
{
|
||||||
@@ -74,28 +79,19 @@ public class SearchdomainManager
|
|||||||
searchdomain.InvalidateSearchCache();
|
searchdomain.InvalidateSearchCache();
|
||||||
}
|
}
|
||||||
|
|
||||||
public List<string> ListSearchdomains()
|
public async Task<List<string>> ListSearchdomainsAsync()
|
||||||
{
|
{
|
||||||
lock (helper.connection)
|
return await Helper.ExecuteQueryAsync("SELECT name FROM searchdomain", [], x => x.GetString(0));
|
||||||
{
|
|
||||||
DbDataReader reader = helper.ExecuteSQLCommand("SELECT name FROM searchdomain", []);
|
|
||||||
List<string> results = [];
|
|
||||||
while (reader.Read())
|
|
||||||
{
|
|
||||||
results.Add(reader.GetString(0));
|
|
||||||
}
|
|
||||||
reader.Close();
|
|
||||||
return results;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public int CreateSearchdomain(string searchdomain, SearchdomainSettings settings)
|
public async Task<int> CreateSearchdomain(string searchdomain, SearchdomainSettings settings)
|
||||||
{
|
{
|
||||||
return CreateSearchdomain(searchdomain, JsonSerializer.Serialize(settings));
|
return await CreateSearchdomain(searchdomain, JsonSerializer.Serialize(settings));
|
||||||
}
|
}
|
||||||
public int CreateSearchdomain(string searchdomain, string settings = "{}")
|
|
||||||
|
public async Task<int> CreateSearchdomain(string searchdomain, string settings = "{}")
|
||||||
{
|
{
|
||||||
if (searchdomains.TryGetValue(searchdomain, out Searchdomain? value))
|
if (_searchdomains.TryGetValue(searchdomain, out Searchdomain? value))
|
||||||
{
|
{
|
||||||
_logger.LogError("Searchdomain {searchdomain} could not be created, as it already exists", [searchdomain]);
|
_logger.LogError("Searchdomain {searchdomain} could not be created, as it already exists", [searchdomain]);
|
||||||
throw new SearchdomainAlreadyExistsException(searchdomain);
|
throw new SearchdomainAlreadyExistsException(searchdomain);
|
||||||
@@ -105,21 +101,64 @@ public class SearchdomainManager
|
|||||||
{ "name", searchdomain },
|
{ "name", searchdomain },
|
||||||
{ "settings", settings}
|
{ "settings", settings}
|
||||||
};
|
};
|
||||||
return helper.ExecuteSQLCommandGetInsertedID("INSERT INTO searchdomain (name, settings) VALUES (@name, @settings)", parameters);
|
int id = await Helper.ExecuteSQLCommandGetInsertedID("INSERT INTO searchdomain (name, settings) VALUES (@name, @settings)", parameters);
|
||||||
|
_searchdomains.Add(searchdomain, new(searchdomain, connectionString, Helper, AiProvider, EmbeddingCache, _logger));
|
||||||
|
return id;
|
||||||
}
|
}
|
||||||
|
|
||||||
public int DeleteSearchdomain(string searchdomain)
|
public async Task<int> DeleteSearchdomain(string searchdomain)
|
||||||
{
|
{
|
||||||
int counter = _databaseHelper.RemoveAllEntities(helper, searchdomain);
|
int counter = await _databaseHelper.RemoveAllEntities(Helper, searchdomain);
|
||||||
_logger.LogDebug($"Number of entities deleted as part of deleting the searchdomain \"{searchdomain}\": {counter}");
|
_logger.LogDebug($"Number of entities deleted as part of deleting the searchdomain \"{searchdomain}\": {counter}");
|
||||||
helper.ExecuteSQLNonQuery("DELETE FROM searchdomain WHERE name = @name", new() {{"name", searchdomain}});
|
await Helper.ExecuteSQLNonQuery("DELETE FROM searchdomain WHERE name = @name", new() {{"name", searchdomain}});
|
||||||
searchdomains.Remove(searchdomain);
|
_searchdomains.Remove(searchdomain);
|
||||||
_logger.LogDebug($"Searchdomain has been successfully removed");
|
_logger.LogDebug($"Searchdomain has been successfully removed");
|
||||||
return counter;
|
return counter;
|
||||||
}
|
}
|
||||||
|
|
||||||
private Searchdomain SetSearchdomain(string name, Searchdomain searchdomain)
|
private Searchdomain SetSearchdomain(string name, Searchdomain searchdomain)
|
||||||
{
|
{
|
||||||
searchdomains[name] = searchdomain;
|
_searchdomains[name] = searchdomain;
|
||||||
return searchdomain;
|
return searchdomain;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public bool IsSearchdomainLoaded(string name)
|
||||||
|
{
|
||||||
|
return _searchdomains.ContainsKey(name);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Cleanup procedure
|
||||||
|
private async Task Cleanup()
|
||||||
|
{
|
||||||
|
try
|
||||||
|
{
|
||||||
|
if (_options.Cache.StoreEmbeddingCache)
|
||||||
|
{
|
||||||
|
var stopwatch = Stopwatch.StartNew();
|
||||||
|
await CacheHelper.UpdateEmbeddingStore(EmbeddingCache, _options);
|
||||||
|
stopwatch.Stop();
|
||||||
|
_logger.LogInformation("UpdateEmbeddingStore completed in {ElapsedMilliseconds} ms", stopwatch.ElapsedMilliseconds);
|
||||||
|
}
|
||||||
|
_logger.LogInformation("SearchdomainManager cleanup completed");
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
_logger.LogError(ex, "Error during SearchdomainManager cleanup");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void Dispose()
|
||||||
|
{
|
||||||
|
Dispose(true).Wait();
|
||||||
|
GC.SuppressFinalize(this);
|
||||||
|
}
|
||||||
|
|
||||||
|
protected virtual async Task Dispose(bool disposing)
|
||||||
|
{
|
||||||
|
if (!_disposed && disposing)
|
||||||
|
{
|
||||||
|
await Cleanup();
|
||||||
|
_disposed = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
<Project Sdk="Microsoft.NET.Sdk.Web">
|
<Project Sdk="Microsoft.NET.Sdk.Web">
|
||||||
|
|
||||||
<PropertyGroup>
|
<PropertyGroup>
|
||||||
<TargetFramework>net8.0</TargetFramework>
|
<TargetFramework>net10.0</TargetFramework>
|
||||||
<Nullable>enable</Nullable>
|
<Nullable>enable</Nullable>
|
||||||
<ImplicitUsings>enable</ImplicitUsings>
|
<ImplicitUsings>enable</ImplicitUsings>
|
||||||
</PropertyGroup>
|
</PropertyGroup>
|
||||||
@@ -12,21 +12,22 @@
|
|||||||
</PropertyGroup>
|
</PropertyGroup>
|
||||||
|
|
||||||
<ItemGroup>
|
<ItemGroup>
|
||||||
<PackageReference Include="AdaptiveExpressions" Version="4.23.0" />
|
<PackageReference Include="AdaptiveExpressions" Version="4.23.1" />
|
||||||
<PackageReference Include="ElmahCore" Version="2.1.2" />
|
<PackageReference Include="ElmahCore" Version="2.1.2" />
|
||||||
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
|
<PackageReference Include="Microsoft.AspNetCore.OpenApi" Version="10.0.2" />
|
||||||
<PackageReference Include="Serilog.AspNetCore" Version="9.0.0" />
|
<PackageReference Include="Newtonsoft.Json" Version="13.0.4" />
|
||||||
<PackageReference Include="Serilog.Sinks.File" Version="7.0.0" />
|
<PackageReference Include="Serilog.AspNetCore" Version="10.0.0" />
|
||||||
<PackageReference Include="Swashbuckle.AspNetCore" Version="6.6.2" />
|
<PackageReference Include="Serilog.Sinks.File" Version="7.0.0" />
|
||||||
<PackageReference Include="Microsoft.Data.SqlClient" Version="6.0.1" />
|
<PackageReference Include="Swashbuckle.AspNetCore" Version="10.1.0" />
|
||||||
<PackageReference Include="Microsoft.Data.Sqlite" Version="9.0.3" />
|
<PackageReference Include="Microsoft.Data.SqlClient" Version="6.1.4" />
|
||||||
<PackageReference Include="MySql.Data" Version="9.2.0" />
|
<PackageReference Include="Microsoft.Data.Sqlite" Version="10.0.2" />
|
||||||
<PackageReference Include="Npgsql" Version="9.0.3" />
|
<PackageReference Include="MySql.Data" Version="9.6.0" />
|
||||||
|
<PackageReference Include="Npgsql" Version="10.0.1" />
|
||||||
<PackageReference Include="OllamaSharp" Version="5.2.2" />
|
<PackageReference Include="OllamaSharp" Version="5.2.2" />
|
||||||
<PackageReference Include="System.Configuration.ConfigurationManager" Version="9.0.3" />
|
<PackageReference Include="System.Configuration.ConfigurationManager" Version="10.0.2" />
|
||||||
<PackageReference Include="System.Data.SqlClient" Version="4.9.0" />
|
<PackageReference Include="System.Data.SqlClient" Version="4.9.0" />
|
||||||
<PackageReference Include="System.Data.Sqlite" Version="1.0.119" />
|
<PackageReference Include="System.Data.Sqlite" Version="2.0.2" />
|
||||||
<PackageReference Include="System.Numerics.Tensors" Version="9.0.3" />
|
<PackageReference Include="System.Numerics.Tensors" Version="10.0.2" />
|
||||||
</ItemGroup>
|
</ItemGroup>
|
||||||
|
|
||||||
<ItemGroup>
|
<ItemGroup>
|
||||||
|
|||||||
@@ -5,21 +5,16 @@ namespace Server;
|
|||||||
|
|
||||||
public class SimilarityMethod
|
public class SimilarityMethod
|
||||||
{
|
{
|
||||||
public SimilarityMethods.similarityMethodDelegate method;
|
public SimilarityMethods.similarityMethodDelegate Method;
|
||||||
public SimilarityMethodEnum similarityMethodEnum;
|
public SimilarityMethodEnum SimilarityMethodEnum;
|
||||||
public string name;
|
public string Name;
|
||||||
|
|
||||||
public SimilarityMethod(SimilarityMethodEnum similarityMethodEnum, ILogger logger)
|
public SimilarityMethod(SimilarityMethodEnum similarityMethodEnum)
|
||||||
{
|
{
|
||||||
this.similarityMethodEnum = similarityMethodEnum;
|
SimilarityMethodEnum = similarityMethodEnum;
|
||||||
this.name = similarityMethodEnum.ToString();
|
Name = similarityMethodEnum.ToString();
|
||||||
SimilarityMethods.similarityMethodDelegate? probMethod = SimilarityMethods.GetMethod(name);
|
SimilarityMethods.similarityMethodDelegate? probMethod = SimilarityMethods.GetMethod(Name) ?? throw new Exception($"Unable to retrieve similarityMethod {Name}");
|
||||||
if (probMethod is null)
|
Method = probMethod;
|
||||||
{
|
|
||||||
logger.LogError("Unable to retrieve similarityMethod {name}", [name]);
|
|
||||||
throw new Exception("Unable to retrieve similarityMethod");
|
|
||||||
}
|
|
||||||
method = probMethod;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -27,11 +22,11 @@ public static class SimilarityMethods
|
|||||||
{
|
{
|
||||||
public delegate float similarityMethodProtoDelegate(float[] vector1, float[] vector2);
|
public delegate float similarityMethodProtoDelegate(float[] vector1, float[] vector2);
|
||||||
public delegate float similarityMethodDelegate(float[] vector1, float[] vector2);
|
public delegate float similarityMethodDelegate(float[] vector1, float[] vector2);
|
||||||
public static readonly Dictionary<SimilarityMethodEnum, similarityMethodProtoDelegate> probMethods;
|
public static readonly Dictionary<SimilarityMethodEnum, similarityMethodProtoDelegate> ProbMethods;
|
||||||
|
|
||||||
static SimilarityMethods()
|
static SimilarityMethods()
|
||||||
{
|
{
|
||||||
probMethods = new Dictionary<SimilarityMethodEnum, similarityMethodProtoDelegate>
|
ProbMethods = new Dictionary<SimilarityMethodEnum, similarityMethodProtoDelegate>
|
||||||
{
|
{
|
||||||
[SimilarityMethodEnum.Cosine] = CosineSimilarity,
|
[SimilarityMethodEnum.Cosine] = CosineSimilarity,
|
||||||
[SimilarityMethodEnum.Euclidian] = EuclidianDistance,
|
[SimilarityMethodEnum.Euclidian] = EuclidianDistance,
|
||||||
@@ -49,7 +44,7 @@ public static class SimilarityMethods
|
|||||||
methodName
|
methodName
|
||||||
);
|
);
|
||||||
|
|
||||||
if (!probMethods.TryGetValue(probMethodEnum, out similarityMethodProtoDelegate? method))
|
if (!ProbMethods.TryGetValue(probMethodEnum, out similarityMethodProtoDelegate? method))
|
||||||
{
|
{
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -19,7 +19,7 @@ const cookies = await page.cookies();
|
|||||||
await browser.close();
|
await browser.close();
|
||||||
|
|
||||||
async function generateCriticalCSSForViews() {
|
async function generateCriticalCSSForViews() {
|
||||||
const viewsDir = '../Views';
|
const viewsDir = '../../Views';
|
||||||
|
|
||||||
// Helper function to get all .cshtml files recursively
|
// Helper function to get all .cshtml files recursively
|
||||||
function getAllCshtmlFiles(dir) {
|
function getAllCshtmlFiles(dir) {
|
||||||
@@ -29,8 +29,6 @@ async function generateCriticalCSSForViews() {
|
|||||||
list.forEach(file => {
|
list.forEach(file => {
|
||||||
const filePath = path.join(dir, file);
|
const filePath = path.join(dir, file);
|
||||||
const stat = fs.statSync(filePath);
|
const stat = fs.statSync(filePath);
|
||||||
console.log("DEBUG@2");
|
|
||||||
console.log(filePath);
|
|
||||||
if (stat && stat.isDirectory()) {
|
if (stat && stat.isDirectory()) {
|
||||||
// Recursively get files from subdirectories
|
// Recursively get files from subdirectories
|
||||||
results = results.concat(getAllCshtmlFiles(filePath));
|
results = results.concat(getAllCshtmlFiles(filePath));
|
||||||
@@ -78,11 +76,11 @@ async function generateCriticalCSSForViews() {
|
|||||||
// Process each file
|
// Process each file
|
||||||
for (const file of cshtmlFiles) {
|
for (const file of cshtmlFiles) {
|
||||||
try {
|
try {
|
||||||
const urlPath = filePathToUrlPath(file).replace("../", "").replace("/Views", "");
|
const urlPath = filePathToUrlPath(file).replace("../", "").replace("../", "").replace("/Views", "");
|
||||||
|
|
||||||
// Generate critical CSS
|
// Generate critical CSS
|
||||||
await generate({
|
await generate({
|
||||||
src: `http://localhost:5146${urlPath}`,
|
src: `http://localhost:5146${urlPath}?noCriticalCSS`,
|
||||||
inline: false,
|
inline: false,
|
||||||
width: 1920,
|
width: 1920,
|
||||||
height: 1080,
|
height: 1080,
|
||||||
@@ -92,9 +90,9 @@ async function generateCriticalCSSForViews() {
|
|||||||
},
|
},
|
||||||
forceExclude: ['.btn'], // Otherwise buttons end up colorless and .btn overrides other classes like .btn-warning, etc. - so it has to be force-excluded here and re-added later
|
forceExclude: ['.btn'], // Otherwise buttons end up colorless and .btn overrides other classes like .btn-warning, etc. - so it has to be force-excluded here and re-added later
|
||||||
forceInclude: [
|
forceInclude: [
|
||||||
'[data-bs-theme=dark]',
|
'[data-bs-theme="dark"]', '[data-bs-theme="dark"] body', '[data-bs-theme="dark"] .navbar', '[data-bs-theme="dark"] .card', '[data-bs-theme="dark"] .btn',
|
||||||
'.navbar',
|
|
||||||
'.col-md-4',
|
'.col-md-4',
|
||||||
|
'.navbar', '.ms-auto', '.dropdown', '.dropdown-menu',
|
||||||
'.visually-hidden', // visually hidden headings
|
'.visually-hidden', // visually hidden headings
|
||||||
'.bi-info-circle-fill', '.text-info', // info icon
|
'.bi-info-circle-fill', '.text-info', // info icon
|
||||||
'.container', '.col-md-6', '.row', '.g-4', '.row>*',
|
'.container', '.col-md-6', '.row', '.g-4', '.row>*',
|
||||||
@@ -105,14 +103,14 @@ async function generateCriticalCSSForViews() {
|
|||||||
'.d-flex', '.justify-content-between', '.mt-2', // card - content
|
'.d-flex', '.justify-content-between', '.mt-2', // card - content
|
||||||
'.progress', '.mt-3', // card - progress bar
|
'.progress', '.mt-3', // card - progress bar
|
||||||
'.list-group', '.list-group-flush', '.list-group-item', '.list-group-flush>.list-group-item', '.list-group-flush>.list-group-item:last-child', '.badge', '.bg-warning', '.bg-success', '.h-100', // card - health check list
|
'.list-group', '.list-group-flush', '.list-group-item', '.list-group-flush>.list-group-item', '.list-group-flush>.list-group-item:last-child', '.badge', '.bg-warning', '.bg-success', '.h-100', // card - health check list
|
||||||
'.btn', '.btn-sm', '.btn-primary', '.btn-warning', '.btn-danger', // Searchdomains buttons
|
'.btn-primary', '.btn-warning', '.btn-danger', '.btn-info', // Searchdomains buttons
|
||||||
'.col-md-8', '.sidebar',
|
'.col-md-8', '.sidebar',
|
||||||
'.mb-0', '.mb-2', '.align-items-center',
|
'.mb-0', '.mb-2', '.align-items-center',
|
||||||
'h3', '.col-md-3', '.col-md-2', '.text-nowrap', '.overflow-auto'
|
'h3', '.col-md-3', '.col-md-2', '.text-nowrap', '.overflow-auto'
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
target: {
|
target: {
|
||||||
css: path.join(criticalCssDir, urlPath.replace(/\//g, '.').replace(/^\./, '').replace("...", "") + '.css')
|
css: path.join(criticalCssDir, "../../CriticalCSS/" + urlPath.replace(/\//g, '.').replace(/^\./, '').replace("...", "") + '.css')
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -1,5 +1,5 @@
|
|||||||
# How to use CriticalCSS
|
# How to use CriticalCSS
|
||||||
1. Install it here
|
1. Install the dependencies from here
|
||||||
```bash
|
```bash
|
||||||
npm i -D critical
|
npm i -D critical
|
||||||
npm install puppeteer
|
npm install puppeteer
|
||||||
78
src/Server/Tools/LocalizationChecker/LocalizationChecker.py
Normal file
78
src/Server/Tools/LocalizationChecker/LocalizationChecker.py
Normal file
@@ -0,0 +1,78 @@
|
|||||||
|
import re
|
||||||
|
import sys
|
||||||
|
|
||||||
|
def extract_translations_from_View(view_path):
|
||||||
|
"""Extract all translation strings from file A"""
|
||||||
|
translations = {}
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(view_path, 'r', encoding='utf-8') as file_a:
|
||||||
|
for line_num, line in enumerate(file_a, 1):
|
||||||
|
# Match T["..."] patterns
|
||||||
|
matches = re.findall(r'T\["([^"]*)"\]', line)
|
||||||
|
for match in matches:
|
||||||
|
translations[match] = line_num
|
||||||
|
except FileNotFoundError:
|
||||||
|
print(f"Error: File {view_path} not found")
|
||||||
|
sys.exit(1)
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error reading file {view_path}: {e}")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
return translations
|
||||||
|
|
||||||
|
def extract_localizations_from_resource_file(file_b_path):
|
||||||
|
"""Extract all translation strings from file B"""
|
||||||
|
translations = set()
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(file_b_path, 'r', encoding='utf-8') as file_b:
|
||||||
|
for line in file_b:
|
||||||
|
# Match the pattern in file B
|
||||||
|
match = re.search(r'<data name="([^"]*)"', line)
|
||||||
|
if match:
|
||||||
|
translations.add(match.group(1))
|
||||||
|
except FileNotFoundError:
|
||||||
|
print(f"Error: File {file_b_path} not found")
|
||||||
|
sys.exit(1)
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error reading file {file_b_path}: {e}")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
return translations
|
||||||
|
|
||||||
|
def find_missing_translations(view, resource):
|
||||||
|
"""Find translations in file A that don't exist in file B"""
|
||||||
|
# Extract translations from both files
|
||||||
|
file_a_translations = extract_translations_from_View(view)
|
||||||
|
file_b_translations = extract_localizations_from_resource_file(resource)
|
||||||
|
|
||||||
|
# Find missing translations
|
||||||
|
missing_translations = []
|
||||||
|
|
||||||
|
for translation_text, line_number in file_a_translations.items():
|
||||||
|
if translation_text not in file_b_translations:
|
||||||
|
missing_translations.append((translation_text, line_number))
|
||||||
|
|
||||||
|
return missing_translations
|
||||||
|
|
||||||
|
def main():
|
||||||
|
views = ["Shared/_Layout.cshtml", "Home/Index.cshtml", "Home/Searchdomains.cshtml"]
|
||||||
|
resources = ["SharedResources.en.resx", "SharedResources.de.resx"]
|
||||||
|
|
||||||
|
print("Checking for missing translations...")
|
||||||
|
print("=" * 50)
|
||||||
|
for view in views:
|
||||||
|
for resource in resources:
|
||||||
|
missing = find_missing_translations("../../Views/" + view, "../../Resources/" + resource)
|
||||||
|
|
||||||
|
if missing:
|
||||||
|
print(f"Found {len(missing)} missing translations in {view}:")
|
||||||
|
print("-" * 50)
|
||||||
|
for translation_text, line_number in missing:
|
||||||
|
print(f"Line {line_number}: T[\"{translation_text}\"]")
|
||||||
|
else:
|
||||||
|
print(f"All localizations in {view} have a matching resource in {resource}!")
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
@@ -1,3 +1,4 @@
|
|||||||
|
@using Microsoft.Extensions.Primitives
|
||||||
@using Server.Services
|
@using Server.Services
|
||||||
@inject LocalizationService T
|
@inject LocalizationService T
|
||||||
@{
|
@{
|
||||||
@@ -9,6 +10,10 @@
|
|||||||
<h1>Login</h1>
|
<h1>Login</h1>
|
||||||
<form asp-action="Login" method="post" class="mt-4" style="max-width: 400px; margin: auto;">
|
<form asp-action="Login" method="post" class="mt-4" style="max-width: 400px; margin: auto;">
|
||||||
<div class="form-group mb-3">
|
<div class="form-group mb-3">
|
||||||
|
@if (Context.Request.Query.TryGetValue("ReturnUrl", out StringValues returnUrl))
|
||||||
|
{
|
||||||
|
<input type="hidden" name="ReturnUrl" value="@(returnUrl)" />
|
||||||
|
}
|
||||||
<label for="username" class="form-label">@T["Username"]</label>
|
<label for="username" class="form-label">@T["Username"]</label>
|
||||||
<input autofocus type="text" class="form-control" id="username" name="username" autocomplete="username" required>
|
<input autofocus type="text" class="form-control" id="username" name="username" autocomplete="username" required>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -5,7 +5,6 @@
|
|||||||
@using Server
|
@using Server
|
||||||
|
|
||||||
@inject LocalizationService T
|
@inject LocalizationService T
|
||||||
@inject AIProvider AIProvider
|
|
||||||
@model HomeIndexViewModel
|
@model HomeIndexViewModel
|
||||||
@{
|
@{
|
||||||
ViewData["Title"] = "Home Page";
|
ViewData["Title"] = "Home Page";
|
||||||
@@ -25,6 +24,24 @@
|
|||||||
|
|
||||||
<div class="row g-4">
|
<div class="row g-4">
|
||||||
|
|
||||||
|
<!-- Server -->
|
||||||
|
<div class="col-md-6">
|
||||||
|
<div class="card shadow-sm h-100">
|
||||||
|
<div class="card-body">
|
||||||
|
<h2 class="card-title fs-5">@T["Server"]</h2>
|
||||||
|
|
||||||
|
<div class="d-flex justify-content-between mt-2">
|
||||||
|
<span>@T["Total RAM usage"]</span>
|
||||||
|
<strong id="serverMemorySize"></strong>
|
||||||
|
</div>
|
||||||
|
<div class="d-flex justify-content-between mt-2">
|
||||||
|
<span>@T["Total Database size"]</span>
|
||||||
|
<strong id="serverDatabaseSize"></strong>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
<!-- Embedding Cache -->
|
<!-- Embedding Cache -->
|
||||||
<div class="col-md-6">
|
<div class="col-md-6">
|
||||||
<div class="card shadow-sm h-100">
|
<div class="card shadow-sm h-100">
|
||||||
@@ -41,7 +58,7 @@
|
|||||||
@T["Strings"]
|
@T["Strings"]
|
||||||
<i class="bi bi-info-circle-fill text-info"
|
<i class="bi bi-info-circle-fill text-info"
|
||||||
data-bs-toggle="tooltip"
|
data-bs-toggle="tooltip"
|
||||||
title="The number of strings for which there are embeddings. I.e. If you use two models, the amount of embeddings will be twice this number."></i>
|
title="@T["stringsCountInfo"]"></i>
|
||||||
</span>
|
</span>
|
||||||
<strong id="embeddingcacheElementCount"></strong>
|
<strong id="embeddingcacheElementCount"></strong>
|
||||||
</div>
|
</div>
|
||||||
@@ -105,6 +122,43 @@
|
|||||||
<span>@T["Total query cache utilization"]</span>
|
<span>@T["Total query cache utilization"]</span>
|
||||||
<strong id="totalQuerycacheUtilization"></strong>
|
<strong id="totalQuerycacheUtilization"></strong>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
<!-- Query cache -->
|
||||||
|
<div class="d-flex justify-content-between mt-2">
|
||||||
|
<span>@T["Query cache entry count"]</span>
|
||||||
|
<strong id="querycacheCount"></strong>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="d-flex justify-content-between mt-2">
|
||||||
|
<span>
|
||||||
|
@T["Query cache capacity (loaded)"]
|
||||||
|
<i class="bi bi-info-circle-fill text-info"
|
||||||
|
data-bs-toggle="tooltip"
|
||||||
|
title="@T["queryCacheEntryCountLoadedInfo"]"></i>
|
||||||
|
</span>
|
||||||
|
<strong id="querycacheLoadedMaxElementCount"></strong>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="progress mt-3" style="height: 8px;">
|
||||||
|
<div id="querycacheLoadedMaxElementCountProgressBar" class="progress-bar"
|
||||||
|
style="width: 0.00%"></div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
|
||||||
|
<div class="d-flex justify-content-between mt-2">
|
||||||
|
<span>
|
||||||
|
@T["Query cache capacity (all)"]
|
||||||
|
<i class="bi bi-info-circle-fill text-info"
|
||||||
|
data-bs-toggle="tooltip"
|
||||||
|
title="@T["queryCacheEntryCountAllInfo"]"></i>
|
||||||
|
</span>
|
||||||
|
<strong id="querycacheMaxElementCount"></strong>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="progress mt-3" style="height: 8px;">
|
||||||
|
<div id="querycacheMaxElementCountProgressBar" class="progress-bar"
|
||||||
|
style="width: 0.00%"></div>
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
@@ -116,13 +170,6 @@
|
|||||||
var searchdomains = null;
|
var searchdomains = null;
|
||||||
|
|
||||||
document.addEventListener('DOMContentLoaded', async () => {
|
document.addEventListener('DOMContentLoaded', async () => {
|
||||||
// Initialize all tooltips
|
|
||||||
var tooltipTriggerList = [].slice.call(document.querySelectorAll('[data-bs-toggle="tooltip"]'))
|
|
||||||
var tooltipList = tooltipTriggerList.map(function (tooltipTriggerEl) {
|
|
||||||
return new bootstrap.Tooltip(tooltipTriggerEl)
|
|
||||||
});
|
|
||||||
|
|
||||||
|
|
||||||
let searchdomainCount = document.getElementById("searchdomainCount");
|
let searchdomainCount = document.getElementById("searchdomainCount");
|
||||||
showThrobber(searchdomainCount);
|
showThrobber(searchdomainCount);
|
||||||
let searchdomainEntityCount = document.getElementById("searchdomainEntityCount");
|
let searchdomainEntityCount = document.getElementById("searchdomainEntityCount");
|
||||||
@@ -136,6 +183,21 @@
|
|||||||
let embeddingcacheEmbeddingCount = document.getElementById("embeddingcacheEmbeddingCount");
|
let embeddingcacheEmbeddingCount = document.getElementById("embeddingcacheEmbeddingCount");
|
||||||
showThrobber(embeddingcacheEmbeddingCount);
|
showThrobber(embeddingcacheEmbeddingCount);
|
||||||
let embeddingcacheElementCountProgressBar = document.getElementById("embeddingcacheElementCountProgressBar");
|
let embeddingcacheElementCountProgressBar = document.getElementById("embeddingcacheElementCountProgressBar");
|
||||||
|
|
||||||
|
let querycacheCount = document.getElementById("querycacheCount");
|
||||||
|
showThrobber(querycacheCount);
|
||||||
|
let querycacheMaxElementCount = document.getElementById("querycacheMaxElementCount");
|
||||||
|
showThrobber(querycacheMaxElementCount);
|
||||||
|
let querycacheMaxElementCountProgressBar = document.getElementById("querycacheMaxElementCountProgressBar");
|
||||||
|
let querycacheLoadedMaxElementCount = document.getElementById("querycacheLoadedMaxElementCount");
|
||||||
|
showThrobber(querycacheLoadedMaxElementCount);
|
||||||
|
let querycacheLoadedElementCountProgressBar = document.getElementById("querycacheLoadedElementCountProgressBar");
|
||||||
|
|
||||||
|
let serverMemorySize = document.getElementById("serverMemorySize");
|
||||||
|
showThrobber(serverMemorySize);
|
||||||
|
let serverDatabaseSize = document.getElementById("serverDatabaseSize");
|
||||||
|
showThrobber(serverDatabaseSize);
|
||||||
|
|
||||||
let healthchecksServer = document.getElementById("healthchecksServer");
|
let healthchecksServer = document.getElementById("healthchecksServer");
|
||||||
let healthchecksAiProvider = document.getElementById("healthchecksAiProvider");
|
let healthchecksAiProvider = document.getElementById("healthchecksAiProvider");
|
||||||
|
|
||||||
@@ -144,46 +206,40 @@
|
|||||||
searchdomains = result.Searchdomains;
|
searchdomains = result.Searchdomains;
|
||||||
hideThrobber(searchdomainCount);
|
hideThrobber(searchdomainCount);
|
||||||
searchdomainCount.textContent = searchdomains.length;
|
searchdomainCount.textContent = searchdomains.length;
|
||||||
|
|
||||||
const perDomainPromises = searchdomains.map(async domain => {
|
|
||||||
const [entityListResult, querycacheUtilizationResult] = await Promise.all([
|
|
||||||
listEntities(domain),
|
|
||||||
getQuerycacheUtilization(domain)
|
|
||||||
]);
|
|
||||||
|
|
||||||
return {
|
|
||||||
entityCount: entityListResult.Results.length,
|
|
||||||
utilization: querycacheUtilizationResult.QueryCacheSizeBytes
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
const results = await Promise.all(perDomainPromises);
|
|
||||||
|
|
||||||
let entityCount = 0;
|
|
||||||
let totalUtilization = 0;
|
|
||||||
|
|
||||||
for (const r of results) {
|
|
||||||
entityCount += r.entityCount;
|
|
||||||
totalUtilization += r.utilization;
|
|
||||||
}
|
|
||||||
|
|
||||||
hideThrobber(searchdomainEntityCount);
|
|
||||||
hideThrobber(totalQuerycacheUtilization);
|
|
||||||
searchdomainEntityCount.textContent = entityCount;
|
|
||||||
totalQuerycacheUtilization.textContent = NumberOfBytesAsHumanReadable(totalUtilization);
|
|
||||||
});
|
});
|
||||||
getEmbeddingcacheUtilization().then(result => {
|
getServerStats().then(result => {
|
||||||
let utilization = result.SizeInBytes;
|
let utilization = result.EmbeddingCacheUtilization;
|
||||||
let maxElementCount = result.MaxElementCount;
|
let embeddingCacheMaxElementCount = result.EmbeddingCacheMaxElementCount;
|
||||||
let elementCount = result.ElementCount;
|
let embeddingCacheElementCount = result.ElementCount;
|
||||||
let embeddingCount = result.EmbeddingsCount;
|
let embeddingCount = result.EmbeddingsCount;
|
||||||
|
let entityCount = result.EntityCount;
|
||||||
|
let queryCacheUtilization = result.QueryCacheUtilization;
|
||||||
|
let queryCacheElementCount = result.QueryCacheElementCount;
|
||||||
|
let queryCacheMaxElementCountAll = result.QueryCacheMaxElementCountAll;
|
||||||
|
let queryCacheMaxElementCountLoadedSearchdomainsOnly = result.QueryCacheMaxElementCountLoadedSearchdomainsOnly;
|
||||||
hideThrobber(embeddingcacheSize);
|
hideThrobber(embeddingcacheSize);
|
||||||
embeddingcacheSize.textContent = NumberOfBytesAsHumanReadable(utilization);
|
embeddingcacheSize.textContent = NumberOfBytesAsHumanReadable(utilization);
|
||||||
hideThrobber(embeddingcacheElementCount);
|
hideThrobber(embeddingcacheElementCount);
|
||||||
embeddingcacheElementCount.textContent = `${elementCount.toLocaleString()} / ${maxElementCount.toLocaleString()}`;
|
embeddingcacheElementCount.textContent = `${embeddingCacheElementCount.toLocaleString()} / ${embeddingCacheMaxElementCount.toLocaleString()}`;
|
||||||
hideThrobber(embeddingcacheEmbeddingCount);
|
hideThrobber(embeddingcacheEmbeddingCount);
|
||||||
embeddingcacheEmbeddingCount.textContent = embeddingCount;
|
embeddingcacheEmbeddingCount.textContent = embeddingCount;
|
||||||
embeddingcacheElementCountProgressBar.style.width = `${elementCount / maxElementCount * 100}%`;
|
embeddingcacheElementCountProgressBar.style.width = `${embeddingCacheElementCount / embeddingCacheMaxElementCount * 100}%`;
|
||||||
|
hideThrobber(searchdomainEntityCount);
|
||||||
|
searchdomainEntityCount.textContent = entityCount;
|
||||||
|
hideThrobber(totalQuerycacheUtilization);
|
||||||
|
totalQuerycacheUtilization.textContent = NumberOfBytesAsHumanReadable(queryCacheUtilization);
|
||||||
|
hideThrobber(querycacheMaxElementCount);
|
||||||
|
querycacheCount.textContent = queryCacheElementCount;
|
||||||
|
hideThrobber(querycacheCount);
|
||||||
|
querycacheMaxElementCount.textContent = queryCacheMaxElementCountAll.toLocaleString();
|
||||||
|
querycacheMaxElementCountProgressBar.style.width = `${queryCacheElementCount / queryCacheMaxElementCountAll * 100}%`;
|
||||||
|
hideThrobber(querycacheLoadedMaxElementCount);
|
||||||
|
querycacheLoadedMaxElementCount.textContent = queryCacheMaxElementCountLoadedSearchdomainsOnly.toLocaleString();
|
||||||
|
querycacheLoadedMaxElementCountProgressBar.style.width = `${queryCacheElementCount / queryCacheMaxElementCountLoadedSearchdomainsOnly * 100}%`;
|
||||||
|
serverMemorySize.textContent = NumberOfBytesAsHumanReadable(result.RamTotalSize);
|
||||||
|
hideThrobber(serverMemorySize);
|
||||||
|
serverDatabaseSize.textContent = NumberOfBytesAsHumanReadable(result.DatabaseTotalSize);
|
||||||
|
hideThrobber(serverDatabaseSize);
|
||||||
});
|
});
|
||||||
getHealthCheckStatusAndApply(healthchecksServer, "/healthz/Database");
|
getHealthCheckStatusAndApply(healthchecksServer, "/healthz/Database");
|
||||||
getHealthCheckStatusAndApply(healthchecksAiProvider, "/healthz/AIProvider");
|
getHealthCheckStatusAndApply(healthchecksAiProvider, "/healthz/AIProvider");
|
||||||
@@ -206,8 +262,8 @@
|
|||||||
.then(r => r.json());
|
.then(r => r.json());
|
||||||
}
|
}
|
||||||
|
|
||||||
async function getEmbeddingcacheUtilization() {
|
async function getServerStats() {
|
||||||
return await fetch(`/Server/EmbeddingCache/Size`)
|
return await fetch(`/Server/Stats`)
|
||||||
.then(r => r.json());
|
.then(r => r.json());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -62,11 +62,24 @@
|
|||||||
<!-- Settings -->
|
<!-- Settings -->
|
||||||
<div class="row align-items-center mb-3">
|
<div class="row align-items-center mb-3">
|
||||||
<h3>@T["Settings"]</h3>
|
<h3>@T["Settings"]</h3>
|
||||||
<div class="col-md-6">
|
<div class="col-md-3">
|
||||||
|
<label class="form-check-label" for="searchdomainConfigQueryCacheSize">@T["Query cache size"]:</label>
|
||||||
|
<input type="number" class="form-control" id="searchdomainConfigQueryCacheSize" />
|
||||||
|
</div>
|
||||||
|
<div class="col-md-6 mt-3">
|
||||||
<input type="checkbox" class="form-check-input" id="searchdomainConfigCacheReconciliation" />
|
<input type="checkbox" class="form-check-input" id="searchdomainConfigCacheReconciliation" />
|
||||||
<label class="form-check-label" for="searchdomainConfigCacheReconciliation">@T["Cache reconciliation"]</label>
|
<label class="form-check-label" for="searchdomainConfigCacheReconciliation">@T["Cache reconciliation"]</label>
|
||||||
</div>
|
</div>
|
||||||
<div class="col-md-2 mt-3 mt-md-0">
|
<div class="col-md-6 mt-3">
|
||||||
|
<input type="checkbox" class="form-check-input" id="searchdomainConfigParallelEmbeddingsPrefetch" />
|
||||||
|
<label class="form-check-label" for="searchdomainConfigParallelEmbeddingsPrefetch">@T["Embeddings parallel prefetching"]</label>
|
||||||
|
<i class="bi bi-info-circle-fill text-info"
|
||||||
|
data-bs-toggle="tooltip"
|
||||||
|
title="@T["parallelEmbeddingsPrefetchInfo"]"></i>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="row align-items-center mb-3">
|
||||||
|
<div class="col-md-2 mt-md-0">
|
||||||
<button class="btn btn-warning w-100" id="searchdomainConfigUpdate">@T["Update"]</button>
|
<button class="btn btn-warning w-100" id="searchdomainConfigUpdate">@T["Update"]</button>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
@@ -92,7 +105,7 @@
|
|||||||
<div class="card section-card mb-4">
|
<div class="card section-card mb-4">
|
||||||
<div class="card-body">
|
<div class="card-body">
|
||||||
<div class="d-flex justify-content-between align-items-center mb-2">
|
<div class="d-flex justify-content-between align-items-center mb-2">
|
||||||
<h3>Recent queries</h3>
|
<h3>@T["Recent queries"]</h3>
|
||||||
<input
|
<input
|
||||||
type="text"
|
type="text"
|
||||||
class="form-control form-control-sm w-25"
|
class="form-control form-control-sm w-25"
|
||||||
@@ -103,8 +116,8 @@
|
|||||||
<table id="queriesTable" class="table table-striped" style="max-height: 60vh; overflow-y: auto; display: block;">
|
<table id="queriesTable" class="table table-striped" style="max-height: 60vh; overflow-y: auto; display: block;">
|
||||||
<thead>
|
<thead>
|
||||||
<tr>
|
<tr>
|
||||||
<th class="visually-hidden">Name</th>
|
<th class="visually-hidden">@T["Name"]</th>
|
||||||
<th class="visually-hidden">Action</th>
|
<th class="visually-hidden">@T["Action"]</th>
|
||||||
</tr>
|
</tr>
|
||||||
</thead>
|
</thead>
|
||||||
<tbody>
|
<tbody>
|
||||||
@@ -129,8 +142,8 @@
|
|||||||
<table id="entitiesTable" class="table table-striped" style="max-height: 60vh; overflow-y: auto; display: block;">
|
<table id="entitiesTable" class="table table-striped" style="max-height: 60vh; overflow-y: auto; display: block;">
|
||||||
<thead>
|
<thead>
|
||||||
<tr>
|
<tr>
|
||||||
<th class="visually-hidden">Name</th>
|
<th class="visually-hidden">@T["Name"]</th>
|
||||||
<th class="visually-hidden">Action</th>
|
<th class="visually-hidden">@T["Action"]</th>
|
||||||
</tr>
|
</tr>
|
||||||
</thead>
|
</thead>
|
||||||
<tbody>
|
<tbody>
|
||||||
@@ -152,8 +165,8 @@
|
|||||||
<div class="modal-dialog modal-lg modal-dialog-scrollable">
|
<div class="modal-dialog modal-lg modal-dialog-scrollable">
|
||||||
<div class="modal-content">
|
<div class="modal-content">
|
||||||
<div class="modal-header bg-info">
|
<div class="modal-header bg-info">
|
||||||
<h2 class="modal-title" id="entityDetailsTitle">@T["Entity Details"]</h2>
|
<h2 class="modal-title text-dark" id="entityDetailsTitle">@T["Entity Details"]</h2>
|
||||||
<button type="button" class="btn-close" data-bs-dismiss="modal"></button>
|
<button type="button" class="btn-close" data-bs-dismiss="modal" style="filter: brightness(0);"></button>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div class="modal-body">
|
<div class="modal-body">
|
||||||
@@ -200,8 +213,8 @@
|
|||||||
<div class="modal-content">
|
<div class="modal-content">
|
||||||
|
|
||||||
<div class="modal-header bg-info">
|
<div class="modal-header bg-info">
|
||||||
<h2 class="modal-title" id="queryDetailsTitle">@T["Query Details"] - <span id="queryDetailsQueryName"></span></h2>
|
<h2 class="modal-title text-dark" id="queryDetailsTitle">@T["Query Details"] - <span id="queryDetailsQueryName"></span></h2>
|
||||||
<button type="button" class="btn-close" data-bs-dismiss="modal"></button>
|
<button type="button" class="btn-close" data-bs-dismiss="modal" style="filter: brightness(0);"></button>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div class="modal-body">
|
<div class="modal-body">
|
||||||
@@ -240,8 +253,8 @@
|
|||||||
<div class="modal-content">
|
<div class="modal-content">
|
||||||
|
|
||||||
<div class="modal-header bg-warning">
|
<div class="modal-header bg-warning">
|
||||||
<h2 class="modal-title" id="queryUpdateTitle">@T["Query Update"] - <span id="queryUpdateQueryName"></span></h2>
|
<h2 class="modal-title text-dark" id="queryUpdateTitle">@T["Query Update"] - <span id="queryUpdateQueryName"></span></h2>
|
||||||
<button type="button" class="btn-close" data-bs-dismiss="modal"></button>
|
<button type="button" class="btn-close" data-bs-dismiss="modal" style="filter: brightness(0);"></button>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div class="modal-body">
|
<div class="modal-body">
|
||||||
@@ -252,12 +265,13 @@
|
|||||||
|
|
||||||
<!-- Results -->
|
<!-- Results -->
|
||||||
<h3>@T["Results"]</h3>
|
<h3>@T["Results"]</h3>
|
||||||
|
<button class="btn btn-primary btn-sm" onclick="queryUpdateAddResult('', '', null, true)">@T["Add result"]</button>
|
||||||
<table class="table table-sm table-striped">
|
<table class="table table-sm table-striped">
|
||||||
<thead>
|
<thead>
|
||||||
<tr>
|
<tr>
|
||||||
<th style="width: 85px;">@T["Score"]</th>
|
<th style="width: 85px;">@T["Score"]</th>
|
||||||
<th>@T["Name"]</th>
|
<th>@T["Name"]</th>
|
||||||
<th>@T["Action"]</th>
|
<th class="text-center">@T["Action"]</th>
|
||||||
</tr>
|
</tr>
|
||||||
</thead>
|
</thead>
|
||||||
<tbody id="queryUpdateResultsBody"></tbody>
|
<tbody id="queryUpdateResultsBody"></tbody>
|
||||||
@@ -284,8 +298,8 @@
|
|||||||
<div class="modal-content">
|
<div class="modal-content">
|
||||||
|
|
||||||
<div class="modal-header bg-warning">
|
<div class="modal-header bg-warning">
|
||||||
<h2 class="modal-title" id="renameSearchdomainTitle">@T["Rename searchdomain"]</h2>
|
<h2 class="modal-title text-dark" id="renameSearchdomainTitle">@T["Rename searchdomain"]</h2>
|
||||||
<button type="button" class="btn-close" data-bs-dismiss="modal"></button>
|
<button type="button" class="btn-close" data-bs-dismiss="modal" style="filter: brightness(0);"></button>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div class="modal-body">
|
<div class="modal-body">
|
||||||
@@ -298,10 +312,10 @@
|
|||||||
|
|
||||||
<div class="modal-footer">
|
<div class="modal-footer">
|
||||||
<button type="button" class="btn btn-warning" onclick="renameSearchdomain(getSelectedDomainKey(), document.getElementById('renameSearchdomainNewName').value)" data-bs-dismiss="modal">
|
<button type="button" class="btn btn-warning" onclick="renameSearchdomain(getSelectedDomainKey(), document.getElementById('renameSearchdomainNewName').value)" data-bs-dismiss="modal">
|
||||||
Rename
|
@T["Rename"]
|
||||||
</button>
|
</button>
|
||||||
<button type="button" class="btn btn-secondary" data-bs-dismiss="modal">
|
<button type="button" class="btn btn-secondary" data-bs-dismiss="modal">
|
||||||
Close
|
@T["Close"]
|
||||||
</button>
|
</button>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
@@ -347,10 +361,24 @@
|
|||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div class="modal-body">
|
<div class="modal-body">
|
||||||
<label for="createSearchdomainName" class="form-label">@T["Searchdomain name"]</label>
|
<div class="row align-items-center mb-3">
|
||||||
<input type="text" class="form-control mb-3" id="createSearchdomainName" placeholder="@T["Searchdomain name"]" />
|
<div class="col-md-12">
|
||||||
<input type="checkbox" class="form-check-input" id="createSearchdomainWithCacheReconciliation" />
|
<label for="createSearchdomainName" class="form-label">@T["Searchdomain name"]</label>
|
||||||
<label class="form-check-label" for="createSearchdomainWithCacheReconciliation">@T["Enable cache reconciliation"]</label>
|
<input type="text" class="form-control mb-3" id="createSearchdomainName" placeholder="@T["Searchdomain name"]" />
|
||||||
|
</div>
|
||||||
|
<div class="col-md-5">
|
||||||
|
<label class="form-check-label mb-2" for="createSearchdomainQueryCacheSize">@T["Query cache size"]:</label>
|
||||||
|
<input type="number" class="form-control" id="createSearchdomainQueryCacheSize" />
|
||||||
|
</div>
|
||||||
|
<div class="col-md-7 mt-3">
|
||||||
|
<input type="checkbox" class="form-check-input" id="createSearchdomainWithCacheReconciliation" />
|
||||||
|
<label class="form-check-label" for="createSearchdomainWithCacheReconciliation">@T["Enable cache reconciliation"]</label>
|
||||||
|
</div>
|
||||||
|
<div class="col-md-6 mt-3">
|
||||||
|
<input type="checkbox" class="form-check-input" id="createSearchdomainConfigParallelEmbeddingsPrefetch" />
|
||||||
|
<label class="form-check-label" for="createSearchdomainConfigParallelEmbeddingsPrefetch">@T["Embeddings parallel prefetching"]</label>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div class="modal-footer">
|
<div class="modal-footer">
|
||||||
@@ -476,8 +504,8 @@
|
|||||||
<div class="modal-content">
|
<div class="modal-content">
|
||||||
|
|
||||||
<div class="modal-header bg-warning text">
|
<div class="modal-header bg-warning text">
|
||||||
<h2 class="modal-title" id="updateEntityTitle">@T["Update entity"]</h2>
|
<h2 class="modal-title text-dark" id="updateEntityTitle">@T["Update entity"]</h2>
|
||||||
<button type="button" class="btn-close" data-bs-dismiss="modal"></button>
|
<button type="button" class="btn-close" data-bs-dismiss="modal" style="filter: brightness(0);"></button>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div class="modal-body">
|
<div class="modal-body">
|
||||||
@@ -656,7 +684,10 @@
|
|||||||
queriesFilter.addEventListener('input', () => {
|
queriesFilter.addEventListener('input', () => {
|
||||||
populateQueriesTable(queriesFilter.value);
|
populateQueriesTable(queriesFilter.value);
|
||||||
});
|
});
|
||||||
selectDomain(0);
|
try
|
||||||
|
{
|
||||||
|
selectDomain(0);
|
||||||
|
} catch (error) {}
|
||||||
|
|
||||||
document
|
document
|
||||||
.getElementById('searchdomainRename')
|
.getElementById('searchdomainRename')
|
||||||
@@ -694,7 +725,9 @@
|
|||||||
.addEventListener('click', () => {
|
.addEventListener('click', () => {
|
||||||
const domainKey = getSelectedDomainKey();
|
const domainKey = getSelectedDomainKey();
|
||||||
const cacheReconciliation = document.getElementById('searchdomainConfigCacheReconciliation').checked;
|
const cacheReconciliation = document.getElementById('searchdomainConfigCacheReconciliation').checked;
|
||||||
updateSearchdomainConfig(domainKey, { CacheReconciliation: cacheReconciliation});
|
const queryCacheSize = document.getElementById('searchdomainConfigQueryCacheSize').value;
|
||||||
|
const parallelEmbeddingsPrefetch = document.getElementById('searchdomainConfigParallelEmbeddingsPrefetch').checked;
|
||||||
|
updateSearchdomainConfig(domainKey, { CacheReconciliation: cacheReconciliation, QueryCacheSize: queryCacheSize, ParallelEmbeddingsPrefetch: parallelEmbeddingsPrefetch});
|
||||||
});
|
});
|
||||||
|
|
||||||
document
|
document
|
||||||
@@ -745,7 +778,7 @@
|
|||||||
"datapoints": datapoints
|
"datapoints": datapoints
|
||||||
}];
|
}];
|
||||||
showToast("@T["Creating entity"]", "primary");
|
showToast("@T["Creating entity"]", "primary");
|
||||||
fetch(`/Entity`, {
|
fetch(`/Entities`, {
|
||||||
method: 'PUT',
|
method: 'PUT',
|
||||||
headers: {
|
headers: {
|
||||||
'Content-Type': 'application/json'
|
'Content-Type': 'application/json'
|
||||||
@@ -775,9 +808,10 @@
|
|||||||
document.getElementById('createSearchdomainModal')
|
document.getElementById('createSearchdomainModal')
|
||||||
);
|
);
|
||||||
const name = document.getElementById('createSearchdomainName').value;
|
const name = document.getElementById('createSearchdomainName').value;
|
||||||
|
const queryCacheSize = document.getElementById('createSearchdomainQueryCacheSize').value;
|
||||||
const cacheReconciliation = document.getElementById('createSearchdomainWithCacheReconciliation').checked;
|
const cacheReconciliation = document.getElementById('createSearchdomainWithCacheReconciliation').checked;
|
||||||
const settings = { CacheReconciliation: cacheReconciliation };
|
const parallelEmbeddingsPrefetch = document.getElementById('createSearchdomainConfigParallelEmbeddingsPrefetch').checked;
|
||||||
// Implement create logic here
|
const settings = { CacheReconciliation: cacheReconciliation, QueryCacheSize: queryCacheSize, ParallelEmbeddingsPrefetch: parallelEmbeddingsPrefetch };
|
||||||
fetch(`/Searchdomain?searchdomain=${encodeURIComponent(name)}`, {
|
fetch(`/Searchdomain?searchdomain=${encodeURIComponent(name)}`, {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: {
|
headers: {
|
||||||
@@ -869,12 +903,12 @@
|
|||||||
var data = [{
|
var data = [{
|
||||||
"name": name,
|
"name": name,
|
||||||
"probmethod": probMethod,
|
"probmethod": probMethod,
|
||||||
"searchdomain": encodeURIComponent(domains[getSelectedDomainKey()]),
|
"searchdomain": domains[getSelectedDomainKey()],
|
||||||
"attributes": attributes,
|
"attributes": attributes,
|
||||||
"datapoints": datapoints
|
"datapoints": datapoints
|
||||||
}];
|
}];
|
||||||
showToast("@T["Updating entity"]", "primary");
|
showToast("@T["Updating entity"]", "primary");
|
||||||
fetch(`/Entity`, {
|
fetch(`/Entities`, {
|
||||||
method: 'PUT',
|
method: 'PUT',
|
||||||
headers: {
|
headers: {
|
||||||
'Content-Type': 'application/json'
|
'Content-Type': 'application/json'
|
||||||
@@ -937,7 +971,7 @@
|
|||||||
}).then(async response => {
|
}).then(async response => {
|
||||||
result = await response.json();
|
result = await response.json();
|
||||||
if (response.ok && result.Success) {
|
if (response.ok && result.Success) {
|
||||||
showToast("@T["Searchdomain was created successfully"]", "success");
|
showToast("@T["Search query was updated successfully"]", "success");
|
||||||
console.log('Search query was updated successfully');
|
console.log('Search query was updated successfully');
|
||||||
selectDomain(getSelectedDomainKey());
|
selectDomain(getSelectedDomainKey());
|
||||||
} else {
|
} else {
|
||||||
@@ -1053,7 +1087,12 @@
|
|||||||
|
|
||||||
let searchdomainConfigPromise = getSearchdomainConfig(getSelectedDomainKey());
|
let searchdomainConfigPromise = getSearchdomainConfig(getSelectedDomainKey());
|
||||||
let configElementCachereconciliation = document.getElementById('searchdomainConfigCacheReconciliation');
|
let configElementCachereconciliation = document.getElementById('searchdomainConfigCacheReconciliation');
|
||||||
|
let configElementCacheSize = document.getElementById('searchdomainConfigQueryCacheSize');
|
||||||
|
let configElementParallelEmbeddingsPrefetch = document.getElementById('searchdomainConfigParallelEmbeddingsPrefetch');
|
||||||
|
|
||||||
|
showThrobber(document.querySelector('#searchdomainConfigQueryCacheSize'), true);
|
||||||
|
showThrobber(document.querySelector('#searchdomainConfigCacheReconciliation'), true);
|
||||||
|
showThrobber(document.querySelector('#searchdomainConfigParallelEmbeddingsPrefetch'), true);
|
||||||
let cacheUtilizationPromise = getSearchdomainCacheUtilization(getSelectedDomainKey());
|
let cacheUtilizationPromise = getSearchdomainCacheUtilization(getSelectedDomainKey());
|
||||||
let databaseUtilizationPromise = getSearchdomainDatabaseUtilization(getSelectedDomainKey());
|
let databaseUtilizationPromise = getSearchdomainDatabaseUtilization(getSelectedDomainKey());
|
||||||
|
|
||||||
@@ -1095,10 +1134,15 @@
|
|||||||
});
|
});
|
||||||
|
|
||||||
searchdomainConfigPromise.then(searchdomainConfig => {
|
searchdomainConfigPromise.then(searchdomainConfig => {
|
||||||
|
hideThrobber(document.querySelector('#searchdomainConfigCacheReconciliation'), true);
|
||||||
|
hideThrobber(document.querySelector('#searchdomainConfigParallelEmbeddingsPrefetch'), true);
|
||||||
|
|
||||||
if (searchdomainConfig != null && searchdomainConfig.Settings != null)
|
if (searchdomainConfig != null && searchdomainConfig.Settings != null)
|
||||||
{
|
{
|
||||||
|
configElementCacheSize.value = searchdomainConfig.Settings.QueryCacheSize;
|
||||||
configElementCachereconciliation.checked = searchdomainConfig.Settings.CacheReconciliation;
|
configElementCachereconciliation.checked = searchdomainConfig.Settings.CacheReconciliation;
|
||||||
configElementCachereconciliation.disabled = false;
|
configElementCachereconciliation.disabled = false;
|
||||||
|
configElementParallelEmbeddingsPrefetch.checked = searchdomainConfig.Settings.ParallelEmbeddingsPrefetch;
|
||||||
} else {
|
} else {
|
||||||
configElementCachereconciliation.disabled = true;
|
configElementCachereconciliation.disabled = true;
|
||||||
showToast("@T["Unable to fetch searchdomain config"]", "danger");
|
showToast("@T["Unable to fetch searchdomain config"]", "danger");
|
||||||
@@ -1106,10 +1150,11 @@
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
cacheUtilizationPromise.then(cacheUtilization => {
|
cacheUtilizationPromise.then(cacheUtilization => {
|
||||||
if (cacheUtilization != null && cacheUtilization.QueryCacheSizeBytes != null)
|
hideThrobber(document.querySelector('#searchdomainConfigQueryCacheSize'), true);
|
||||||
|
if (cacheUtilization != null && cacheUtilization.SizeBytes != null)
|
||||||
{
|
{
|
||||||
document.querySelector('#cacheUtilization').innerText =
|
document.querySelector('#cacheUtilization').innerText =
|
||||||
`${NumberOfBytesAsHumanReadable(cacheUtilization.QueryCacheSizeBytes)}`;
|
`${NumberOfBytesAsHumanReadable(cacheUtilization.SizeBytes)}`;
|
||||||
} else {
|
} else {
|
||||||
showToast("@T["Unable to fetch searchdomain cache utilization"]", "danger");
|
showToast("@T["Unable to fetch searchdomain cache utilization"]", "danger");
|
||||||
console.error('Failed to fetch searchdomain cache utilization');
|
console.error('Failed to fetch searchdomain cache utilization');
|
||||||
@@ -1289,14 +1334,30 @@
|
|||||||
domainItem.classList.add('list-group-item-danger');
|
domainItem.classList.add('list-group-item-danger');
|
||||||
}
|
}
|
||||||
|
|
||||||
function showThrobber(element = null) {
|
function showThrobber(element = null, direct = false) {
|
||||||
if (element == null) element = document;
|
if (element == null) element = document;
|
||||||
element.querySelector('.spinner').classList.remove('d-none');
|
if (direct) {
|
||||||
|
let spinner = document.createElement('div');
|
||||||
|
spinner.classList.add('spinner');
|
||||||
|
spinner.style.position = "absolute";
|
||||||
|
spinner.style.marginTop = "0.5rem";
|
||||||
|
spinner.style.marginLeft = "0.5rem";
|
||||||
|
element.style.opacity = "0.25";
|
||||||
|
element.parentElement.insertBefore(spinner, element);
|
||||||
|
} else {
|
||||||
|
element.querySelector('.spinner').classList.remove('d-none');
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function hideThrobber(element = null) {
|
function hideThrobber(element = null, direct = false) {
|
||||||
if (element == null) element = document;
|
if (element == null) element = document;
|
||||||
element.querySelector('.spinner').classList.add('d-none');
|
if (direct) {
|
||||||
|
element.previousElementSibling.remove()
|
||||||
|
element.style.opacity = "1";
|
||||||
|
} else {
|
||||||
|
element.querySelector('.spinner').classList.add('d-none');
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function showEntityDetails(entity) {
|
function showEntityDetails(entity) {
|
||||||
@@ -1434,28 +1495,7 @@
|
|||||||
</tr>`;
|
</tr>`;
|
||||||
} else {
|
} else {
|
||||||
query.Results.forEach(r => {
|
query.Results.forEach(r => {
|
||||||
const row = document.createElement('tr');
|
queryUpdateAddResult(r.Score.toFixed(4), r.Name, resultsBody);
|
||||||
row.setAttribute("draggable", true);
|
|
||||||
const tdScore = document.createElement('td');
|
|
||||||
const scoreInput = document.createElement('input');
|
|
||||||
scoreInput.classList.add('form-control');
|
|
||||||
scoreInput.value = r.Score.toFixed(4);
|
|
||||||
tdScore.append(scoreInput);
|
|
||||||
const tdName = document.createElement('td');
|
|
||||||
tdName.classList.add("text-break");
|
|
||||||
tdName.innerText = r.Name;
|
|
||||||
const tdAction = document.createElement('td');
|
|
||||||
const deleteButton = document.createElement('button');
|
|
||||||
deleteButton.classList.add('btn', 'btn-danger', 'btn-sm');
|
|
||||||
deleteButton.innerText = '@Html.Raw(T["Delete"])';
|
|
||||||
deleteButton.onclick = function() {
|
|
||||||
row.remove();
|
|
||||||
};
|
|
||||||
tdAction.append(deleteButton);
|
|
||||||
row.append(tdScore);
|
|
||||||
row.append(tdName);
|
|
||||||
row.append(tdAction);
|
|
||||||
resultsBody.appendChild(row);
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1466,6 +1506,66 @@
|
|||||||
modal.show();
|
modal.show();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function queryUpdateAddResult(score, name, target=null, insertAtTop=false) {
|
||||||
|
target = target ?? document.getElementById('queryUpdateResultsBody');
|
||||||
|
|
||||||
|
const row = document.createElement('tr');
|
||||||
|
row.setAttribute("draggable", true);
|
||||||
|
const tdScore = document.createElement('td');
|
||||||
|
const scoreInput = document.createElement('input');
|
||||||
|
scoreInput.classList.add('form-control');
|
||||||
|
scoreInput.value = score;
|
||||||
|
scoreInput.ariaLabel = "@T["Score"]";
|
||||||
|
tdScore.append(scoreInput);
|
||||||
|
const tdName = document.createElement('td');
|
||||||
|
const tdNameInput = document.createElement('input');
|
||||||
|
tdNameInput.classList.add("form-control");
|
||||||
|
tdNameInput.value = name;
|
||||||
|
tdNameInput.ariaLabel = "@T["Name"]";
|
||||||
|
tdName.append(tdNameInput);
|
||||||
|
const tdAction = document.createElement('td');
|
||||||
|
tdAction.classList.add('text-center');
|
||||||
|
|
||||||
|
const upButton = document.createElement('button');
|
||||||
|
upButton.classList.add('btn', 'btn-primary', 'btn-sm');
|
||||||
|
upButton.innerText = '↑';
|
||||||
|
upButton.onclick = function() {
|
||||||
|
const currentRow = this.closest('tr');
|
||||||
|
const previousRow = currentRow.previousElementSibling;
|
||||||
|
if (previousRow) {
|
||||||
|
target.insertBefore(currentRow, previousRow);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
const downButton = document.createElement('button');
|
||||||
|
downButton.classList.add('btn', 'btn-primary', 'btn-sm', 'mx-1');
|
||||||
|
downButton.innerText = '↓';
|
||||||
|
downButton.onclick = function() {
|
||||||
|
const currentRow = this.closest('tr');
|
||||||
|
const nextRow = currentRow.nextElementSibling;
|
||||||
|
if (nextRow) {
|
||||||
|
target.insertBefore(nextRow, currentRow);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const deleteButton = document.createElement('button');
|
||||||
|
deleteButton.classList.add('btn', 'btn-danger', 'btn-sm', 'mx-2');
|
||||||
|
deleteButton.innerText = '@Html.Raw(T["Delete"])';
|
||||||
|
deleteButton.onclick = function() {
|
||||||
|
row.remove();
|
||||||
|
};
|
||||||
|
tdAction.append(upButton);
|
||||||
|
tdAction.append(downButton);
|
||||||
|
tdAction.append(deleteButton);
|
||||||
|
row.append(tdScore);
|
||||||
|
row.append(tdName);
|
||||||
|
row.append(tdAction);
|
||||||
|
if (!insertAtTop) {
|
||||||
|
target.appendChild(row);
|
||||||
|
} else {
|
||||||
|
target.insertBefore(row, target.firstChild);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
function NumberOfBytesAsHumanReadable(bytes, decimals = 2) {
|
function NumberOfBytesAsHumanReadable(bytes, decimals = 2) {
|
||||||
if (bytes === 0) return '0 B';
|
if (bytes === 0) return '0 B';
|
||||||
if (bytes > 1.20892581961*(10**27)) return "∞ B";
|
if (bytes > 1.20892581961*(10**27)) return "∞ B";
|
||||||
@@ -1672,7 +1772,7 @@
|
|||||||
|
|
||||||
// Get the text content from the second cell (index 1) which contains the path
|
// Get the text content from the second cell (index 1) which contains the path
|
||||||
const score = parseFloat(cells[0].firstChild.value);
|
const score = parseFloat(cells[0].firstChild.value);
|
||||||
const name = cells[1].textContent.trim();
|
const name = cells[1].firstChild.value;
|
||||||
|
|
||||||
result.push({
|
result.push({
|
||||||
"Score": score,
|
"Score": score,
|
||||||
|
|||||||
@@ -1,7 +1,10 @@
|
|||||||
@using System.Globalization
|
@using System.Globalization
|
||||||
@using Server.Services
|
@using Server.Services
|
||||||
|
@using System.Net
|
||||||
@inject LocalizationService T
|
@inject LocalizationService T
|
||||||
|
@{
|
||||||
|
var currentUrl = WebUtility.HtmlEncode(Context.Request.Path);
|
||||||
|
}
|
||||||
<!DOCTYPE html>
|
<!DOCTYPE html>
|
||||||
<html lang="@CultureInfo.CurrentUICulture.TwoLetterISOLanguageName">
|
<html lang="@CultureInfo.CurrentUICulture.TwoLetterISOLanguageName">
|
||||||
<head>
|
<head>
|
||||||
@@ -9,13 +12,19 @@
|
|||||||
<meta name="description" content="Embeddingsearch server" />
|
<meta name="description" content="Embeddingsearch server" />
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||||
<title>@ViewData["Title"] - embeddingsearch</title>
|
<title>@ViewData["Title"] - embeddingsearch</title>
|
||||||
@if (!Context.Request.Query.ContainsKey("renderRaw"))
|
<link rel="preload" href="~/fonts/bootstrap-icons.woff2" as="font" type="font/woff2" crossorigin="anonymous"/>
|
||||||
|
@if (!Context.Request.Query.ContainsKey("renderRaw") && !Context.Request.Query.ContainsKey("noCriticalCSS"))
|
||||||
{
|
{
|
||||||
<link rel="preload" href="~/lib/bootstrap/dist/css/bootstrap.min.css" as="style"/>
|
<link rel="preload" href="~/lib/bootstrap/dist/css/bootstrap.min.css" as="style"/>
|
||||||
<link rel="stylesheet" fetchpriority="high"
|
<link rel="stylesheet" fetchpriority="high"
|
||||||
href="~/lib/bootstrap/dist/css/bootstrap.min.css"
|
href="~/lib/bootstrap/dist/css/bootstrap.min.css"
|
||||||
media="print"
|
media="print"
|
||||||
onload="this.media='all'">
|
onload="this.media='all'">
|
||||||
|
} else if (Context.Request.Query.ContainsKey("noCriticalCSS"))
|
||||||
|
{
|
||||||
|
<link rel="preload" href="~/lib/bootstrap/dist/css/bootstrap.min.css" as="style"/>
|
||||||
|
<link rel="stylesheet" fetchpriority="high"
|
||||||
|
href="~/lib/bootstrap/dist/css/bootstrap.min.css">
|
||||||
}
|
}
|
||||||
<style>
|
<style>
|
||||||
@Html.Raw(File.ReadAllText(System.IO.Path.Combine(Directory.GetCurrentDirectory(), "wwwroot", "css", "site.css")))
|
@Html.Raw(File.ReadAllText(System.IO.Path.Combine(Directory.GetCurrentDirectory(), "wwwroot", "css", "site.css")))
|
||||||
@@ -26,7 +35,6 @@
|
|||||||
@if (Context.Request.Path.Value is not null)
|
@if (Context.Request.Path.Value is not null)
|
||||||
{
|
{
|
||||||
string path = System.IO.Path.Combine("CriticalCSS", Context.Request.Path.Value.Trim('/').Replace("/", ".") + ".css");
|
string path = System.IO.Path.Combine("CriticalCSS", Context.Request.Path.Value.Trim('/').Replace("/", ".") + ".css");
|
||||||
Console.WriteLine(path);
|
|
||||||
if (File.Exists(path))
|
if (File.Exists(path))
|
||||||
{
|
{
|
||||||
@Html.Raw(File.ReadAllText(path));
|
@Html.Raw(File.ReadAllText(path));
|
||||||
@@ -40,11 +48,13 @@
|
|||||||
};
|
};
|
||||||
</script>
|
</script>
|
||||||
</head>
|
</head>
|
||||||
<body>
|
<body data-bs-theme="dark">
|
||||||
<header>
|
<header>
|
||||||
<nav class="navbar navbar-expand-sm navbar-toggleable-sm navbar-light bg-white border-bottom box-shadow mb-3">
|
<nav class="navbar navbar-expand-sm navbar-toggleable-sm navbar-light border-bottom box-shadow mb-3">
|
||||||
<div class="container-fluid">
|
<div class="container-fluid">
|
||||||
<a class="navbar-brand" asp-area="" asp-controller="Home" asp-action="Index">embeddingsearch</a>
|
<a class="navbar-brand" asp-area="" asp-controller="Home" asp-action="Index">
|
||||||
|
<img fetchpriority="high" alt="Logo" src="/logo.png" width="40" height="40" style="width: 40px; height: 40px;">
|
||||||
|
</a>
|
||||||
<button class="navbar-toggler" type="button" data-bs-toggle="collapse" data-bs-target=".navbar-collapse" aria-controls="navbarSupportedContent"
|
<button class="navbar-toggler" type="button" data-bs-toggle="collapse" data-bs-target=".navbar-collapse" aria-controls="navbarSupportedContent"
|
||||||
aria-expanded="false" aria-label="Toggle navigation">
|
aria-expanded="false" aria-label="Toggle navigation">
|
||||||
<span class="navbar-toggler-icon"></span>
|
<span class="navbar-toggler-icon"></span>
|
||||||
@@ -54,19 +64,34 @@
|
|||||||
@if (User.Identity?.IsAuthenticated == true)
|
@if (User.Identity?.IsAuthenticated == true)
|
||||||
{
|
{
|
||||||
<li class="nav-item">
|
<li class="nav-item">
|
||||||
<a class="nav-link text-dark" asp-area="" asp-controller="Home" asp-action="Index">@T["Home"]</a>
|
<a class="nav-link text" asp-area="" asp-controller="Home" asp-action="Index">@T["Home"]</a>
|
||||||
</li>
|
</li>
|
||||||
<li class="nav-item">
|
<li class="nav-item">
|
||||||
<a class="nav-link text-dark" asp-area="" asp-controller="Home" asp-action="Searchdomains">@T["Searchdomains"]</a>
|
<a class="nav-link text" asp-area="" asp-controller="Home" asp-action="Searchdomains">@T["Searchdomains"]</a>
|
||||||
</li>
|
</li>
|
||||||
<li class="nav-item">
|
@if (User.IsInRole("Admin") || User.IsInRole("Swagger"))
|
||||||
<a class="nav-link text-dark" asp-area="" asp-controller="Account" asp-action="Logout">@T["Logout"]</a>
|
{
|
||||||
|
<li class="nav-item dropdown">
|
||||||
|
<a class="nav-link dropdown-toggle" href="#" id="navbarDropdownMenuLink" data-bs-toggle="dropdown" aria-haspopup="true" aria-expanded="false">
|
||||||
|
@T["Tools"]
|
||||||
|
</a>
|
||||||
|
<div class="dropdown-menu" aria-labelledby="navbarDropdownMenuLink">
|
||||||
|
<a class="dropdown-item" href="/swagger/index.html?ReturnUrl=@(currentUrl)">@T["Swagger"]</a>
|
||||||
|
@if (User.IsInRole("Admin"))
|
||||||
|
{
|
||||||
|
<a class="dropdown-item" href="/elmah?ReturnUrl=@(currentUrl)">@T["Elmah"]</a>
|
||||||
|
}
|
||||||
|
</div>
|
||||||
|
</li>
|
||||||
|
}
|
||||||
|
<li class="nav-item ms-auto">
|
||||||
|
<a class="nav-link text" asp-area="" asp-controller="Account" asp-action="Logout">@T["Logout"]</a>
|
||||||
</li>
|
</li>
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
<li class="nav-item">
|
<li class="nav-item">
|
||||||
<a class="nav-link text-dark" asp-area="" asp-controller="Account" asp-action="Login">@T["Login"]</a>
|
<a class="nav-link text" asp-area="" asp-controller="Account" asp-action="Login">@T["Login"]</a>
|
||||||
</li>
|
</li>
|
||||||
}
|
}
|
||||||
</ul>
|
</ul>
|
||||||
@@ -91,3 +116,16 @@
|
|||||||
@await RenderSectionAsync("Scripts", required: false)
|
@await RenderSectionAsync("Scripts", required: false)
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
|
<script>
|
||||||
|
const mediaQuery = window.matchMedia('(prefers-color-scheme: dark)');
|
||||||
|
|
||||||
|
function applyTheme(e) {
|
||||||
|
document.body.setAttribute(
|
||||||
|
'data-bs-theme',
|
||||||
|
e.matches ? 'dark' : 'light'
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
applyTheme(mediaQuery);
|
||||||
|
mediaQuery.addEventListener('change', applyTheme);
|
||||||
|
</script>
|
||||||
|
|||||||
@@ -15,12 +15,12 @@
|
|||||||
|
|
||||||
"Embeddingsearch": {
|
"Embeddingsearch": {
|
||||||
"ConnectionStrings": {
|
"ConnectionStrings": {
|
||||||
"SQL": "server=localhost;database=embeddingsearch;uid=embeddingsearch;pwd=somepassword!;"
|
"SQL": "server=localhost;database=embeddingsearch;uid=embeddingsearch;pwd=somepassword!;",
|
||||||
|
"Cache": "Data Source=embeddings.db;Mode=ReadWriteCreate;Cache=Shared"
|
||||||
},
|
},
|
||||||
"Elmah": {
|
"Elmah": {
|
||||||
"LogPath": "~/logs"
|
"LogPath": "~/logs"
|
||||||
},
|
},
|
||||||
"EmbeddingCacheMaxCount": 10000000,
|
|
||||||
"AiProviders": {
|
"AiProviders": {
|
||||||
"ollama": {
|
"ollama": {
|
||||||
"handler": "ollama",
|
"handler": "ollama",
|
||||||
@@ -46,6 +46,11 @@
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
"ApiKeys": ["Some UUID here", "Another UUID here"],
|
"ApiKeys": ["Some UUID here", "Another UUID here"],
|
||||||
"UseHttpsRedirection": true
|
"UseHttpsRedirection": true,
|
||||||
|
"Cache": {
|
||||||
|
"CacheTopN": 100000,
|
||||||
|
"StoreEmbeddingCache": true,
|
||||||
|
"StoreTopN": 20000
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -15,27 +15,41 @@
|
|||||||
"UseSwagger": true,
|
"UseSwagger": true,
|
||||||
"Embeddingsearch": {
|
"Embeddingsearch": {
|
||||||
"ConnectionStrings": {
|
"ConnectionStrings": {
|
||||||
"SQL": "server=localhost;database=embeddingsearch;uid=embeddingsearch;pwd=somepassword!;"
|
"SQL": "server=localhost;database=embeddingsearch;uid=embeddingsearch;pwd=somepassword!;",
|
||||||
|
"Cache": "Data Source=embeddings.db;Mode=ReadWriteCreate;Cache=Shared"
|
||||||
},
|
},
|
||||||
"Elmah": {
|
"Elmah": {
|
||||||
"AllowedHosts": [
|
"LogPath": "~/logs"
|
||||||
"127.0.0.1",
|
|
||||||
"::1",
|
|
||||||
"172.17.0.1"
|
|
||||||
]
|
|
||||||
},
|
},
|
||||||
"AiProviders": {
|
"AiProviders": {
|
||||||
"ollama": {
|
"ollama": {
|
||||||
"handler": "ollama",
|
"handler": "ollama",
|
||||||
"baseURL": "http://localhost:11434"
|
"baseURL": "http://localhost:11434",
|
||||||
|
"Allowlist": [".*"],
|
||||||
|
"Denylist": ["qwen3-coder:latest", "qwen3:0.6b", "qwen3-vl", "deepseek-ocr"]
|
||||||
},
|
},
|
||||||
"localAI": {
|
"localAI": {
|
||||||
"handler": "openai",
|
"handler": "openai",
|
||||||
"baseURL": "http://localhost:8080",
|
"baseURL": "http://localhost:8080",
|
||||||
"ApiKey": "Some API key here"
|
"ApiKey": "Some API key here",
|
||||||
|
"Allowlist": [".*"],
|
||||||
|
"Denylist": ["cross-encoder", "jina-reranker-v1-tiny-en", "whisper-small"]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"ApiKeys": ["Some UUID here", "Another UUID here"],
|
"SimpleAuth": {
|
||||||
"UseHttpsRedirection": true
|
"Users": [
|
||||||
|
{
|
||||||
|
"Username": "admin",
|
||||||
|
"Password": "UnsafePractice.67",
|
||||||
|
"Roles": ["Admin"]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"ApiKeys": ["APIKeyOfYourChoice", "AnotherOneIfYouLike"],
|
||||||
|
"Cache": {
|
||||||
|
"CacheTopN": 10000,
|
||||||
|
"StoreEmbeddingCache": true,
|
||||||
|
"StoreTopN": 10000
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -16,5 +16,8 @@
|
|||||||
"Application": "Embeddingsearch.Server"
|
"Application": "Embeddingsearch.Server"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"Embeddingsearch": {
|
||||||
|
"MaxRequestBodySize": 524288000
|
||||||
|
},
|
||||||
"AllowedHosts": "*"
|
"AllowedHosts": "*"
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -75,3 +75,12 @@ url("/fonts/bootstrap-icons.woff") format("woff");
|
|||||||
}
|
}
|
||||||
|
|
||||||
.bi-info-circle-fill::before { content: "\f430"; }
|
.bi-info-circle-fill::before { content: "\f430"; }
|
||||||
|
|
||||||
|
td.btn-group {
|
||||||
|
display: revert;
|
||||||
|
min-width: 15rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
[data-bs-theme="light"] img[alt="Logo"] {
|
||||||
|
filter: invert(100%);
|
||||||
|
}
|
||||||
56
src/Server/wwwroot/elmah-ui/custom.css
Normal file
56
src/Server/wwwroot/elmah-ui/custom.css
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
.elmah-return-btn {
|
||||||
|
position: fixed;
|
||||||
|
top: 6px;
|
||||||
|
right: 24px;
|
||||||
|
z-index: 9999;
|
||||||
|
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
|
||||||
|
height: 44px;
|
||||||
|
min-width: 44px;
|
||||||
|
padding: 0 14px;
|
||||||
|
|
||||||
|
background: #85ea2d;
|
||||||
|
color: black;
|
||||||
|
border-radius: 999px;
|
||||||
|
font-weight: 600;
|
||||||
|
text-decoration: none;
|
||||||
|
box-shadow: 0 4px 12px rgba(0,0,0,0.2);
|
||||||
|
|
||||||
|
overflow: hidden;
|
||||||
|
white-space: nowrap;
|
||||||
|
|
||||||
|
justify-content: center;
|
||||||
|
text-decoration: none !important;
|
||||||
|
|
||||||
|
transition:
|
||||||
|
top 0.25s ease,
|
||||||
|
background-color 0.2s ease;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* hidden label */
|
||||||
|
.elmah-return-btn::before {
|
||||||
|
content: "Return to Front-end";
|
||||||
|
font-family: 'Segoe UI', Tahoma, Geneva, Verdana, sans-serif;
|
||||||
|
max-width: 0;
|
||||||
|
opacity: 0;
|
||||||
|
transition:
|
||||||
|
max-width 0.3s ease,
|
||||||
|
opacity 0.2s ease;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* expand on hover */
|
||||||
|
.elmah-return-btn.show-label::before,
|
||||||
|
.elmah-return-btn:hover::before {
|
||||||
|
max-width: 220px;
|
||||||
|
padding: 0.5rem;
|
||||||
|
opacity: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* hover colors */
|
||||||
|
.elmah-return-btn.show-label,
|
||||||
|
.elmah-return-btn:hover {
|
||||||
|
background: #0b5ed7;
|
||||||
|
color: white;
|
||||||
|
}
|
||||||
17
src/Server/wwwroot/elmah-ui/custom.js
Normal file
17
src/Server/wwwroot/elmah-ui/custom.js
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
document.addEventListener('DOMContentLoaded', async () => {
|
||||||
|
const url = new URL(window.location.href);
|
||||||
|
const btn = document.createElement("a");
|
||||||
|
btn.href = url.searchParams.get('ReturnUrl') ?? "/";
|
||||||
|
btn.innerText = "⎋";
|
||||||
|
btn.setAttribute("aria-label", "Return to Front-End");
|
||||||
|
btn.className = "elmah-return-btn";
|
||||||
|
|
||||||
|
document.body.appendChild(btn);
|
||||||
|
|
||||||
|
const showLabelBriefly = () => {
|
||||||
|
btn.classList.add("show-label");
|
||||||
|
setTimeout(() => btn.classList.remove("show-label"), 2000);
|
||||||
|
};
|
||||||
|
|
||||||
|
setTimeout(showLabelBriefly, 1000);
|
||||||
|
});
|
||||||
@@ -49,3 +49,13 @@ function showToast(message, type) {
|
|||||||
bsToast.show();
|
bsToast.show();
|
||||||
toast.addEventListener('hidden.bs.toast', () => toast.remove());
|
toast.addEventListener('hidden.bs.toast', () => toast.remove());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
document.addEventListener('DOMContentLoaded', async () => {
|
||||||
|
// Initialize all tooltips
|
||||||
|
var tooltipTriggerList = [].slice.call(document.querySelectorAll('[data-bs-toggle="tooltip"]'))
|
||||||
|
var tooltipList = tooltipTriggerList.map(function (tooltipTriggerEl) {
|
||||||
|
let retVal = new bootstrap.Tooltip(tooltipTriggerEl);
|
||||||
|
tooltipTriggerEl.role = "tooltip";
|
||||||
|
return retVal;
|
||||||
|
});
|
||||||
|
});
|
||||||
File diff suppressed because one or more lines are too long
|
Before Width: | Height: | Size: 9.1 KiB After Width: | Height: | Size: 9.1 KiB |
58
src/Server/wwwroot/swagger-ui/custom.css
Normal file
58
src/Server/wwwroot/swagger-ui/custom.css
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
.swagger-return-btn {
|
||||||
|
position: fixed;
|
||||||
|
top: 6px;
|
||||||
|
left: 24px;
|
||||||
|
z-index: 9999;
|
||||||
|
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
|
||||||
|
height: 44px;
|
||||||
|
min-width: 44px;
|
||||||
|
padding: 0 14px;
|
||||||
|
|
||||||
|
background: #85ea2d;
|
||||||
|
color: black;
|
||||||
|
border-radius: 999px;
|
||||||
|
font-weight: 600;
|
||||||
|
text-decoration: none;
|
||||||
|
box-shadow: 0 4px 12px rgba(0,0,0,0.2);
|
||||||
|
|
||||||
|
overflow: hidden;
|
||||||
|
white-space: nowrap;
|
||||||
|
|
||||||
|
justify-content: center;
|
||||||
|
|
||||||
|
transition:
|
||||||
|
top 0.25s ease,
|
||||||
|
background-color 0.2s ease;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* hidden label */
|
||||||
|
.swagger-return-btn::after {
|
||||||
|
content: "Return to Front-end";
|
||||||
|
font-family: 'Segoe UI', Tahoma, Geneva, Verdana, sans-serif;
|
||||||
|
max-width: 0;
|
||||||
|
opacity: 0;
|
||||||
|
transition:
|
||||||
|
max-width 0.3s ease,
|
||||||
|
opacity 0.2s ease;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* expand on hover */
|
||||||
|
.swagger-return-btn:hover::after {
|
||||||
|
max-width: 220px;
|
||||||
|
padding: 0.5rem;
|
||||||
|
opacity: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* hover colors */
|
||||||
|
.swagger-return-btn:hover {
|
||||||
|
background: #0b5ed7;
|
||||||
|
color: white;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* scrolled state */
|
||||||
|
.swagger-return-btn.scrolled {
|
||||||
|
top: 24px;
|
||||||
|
}
|
||||||
24
src/Server/wwwroot/swagger-ui/custom.js
Normal file
24
src/Server/wwwroot/swagger-ui/custom.js
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
document.addEventListener('DOMContentLoaded', async () => {
|
||||||
|
const url = new URL(window.location.href);
|
||||||
|
const btn = document.createElement("a");
|
||||||
|
btn.href = url.searchParams.get('ReturnUrl') ?? "/";
|
||||||
|
btn.innerText = "⎋";
|
||||||
|
btn.setAttribute("aria-label", "Return to Front-End");
|
||||||
|
btn.className = "swagger-return-btn";
|
||||||
|
|
||||||
|
document.body.appendChild(btn);
|
||||||
|
|
||||||
|
const togglePosition = () => {
|
||||||
|
if (window.scrollY > 0) {
|
||||||
|
btn.classList.add("scrolled");
|
||||||
|
} else {
|
||||||
|
btn.classList.remove("scrolled");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Initial state
|
||||||
|
togglePosition();
|
||||||
|
|
||||||
|
// On scroll
|
||||||
|
window.addEventListener("scroll", togglePosition, { passive: true });
|
||||||
|
});
|
||||||
240
src/Shared/LRUCache.cs
Normal file
240
src/Shared/LRUCache.cs
Normal file
@@ -0,0 +1,240 @@
|
|||||||
|
namespace Shared;
|
||||||
|
|
||||||
|
public sealed class EnumerableLruCache<TKey, TValue> where TKey : notnull
|
||||||
|
{
|
||||||
|
private sealed record CacheItem(TKey Key, TValue Value);
|
||||||
|
|
||||||
|
private readonly Dictionary<TKey, LinkedListNode<CacheItem>> _map;
|
||||||
|
private readonly LinkedList<CacheItem> _lruList;
|
||||||
|
private readonly ReaderWriterLockSlim _lock = new();
|
||||||
|
|
||||||
|
private int _capacity;
|
||||||
|
|
||||||
|
public EnumerableLruCache(int capacity)
|
||||||
|
{
|
||||||
|
if (capacity <= 0)
|
||||||
|
throw new ArgumentOutOfRangeException(nameof(capacity));
|
||||||
|
|
||||||
|
_capacity = capacity;
|
||||||
|
_map = new Dictionary<TKey, LinkedListNode<CacheItem>>(capacity);
|
||||||
|
_lruList = new LinkedList<CacheItem>();
|
||||||
|
}
|
||||||
|
|
||||||
|
public int Capacity
|
||||||
|
{
|
||||||
|
get
|
||||||
|
{
|
||||||
|
_lock.EnterReadLock();
|
||||||
|
try
|
||||||
|
{
|
||||||
|
return _capacity;
|
||||||
|
}
|
||||||
|
finally
|
||||||
|
{
|
||||||
|
_lock.ExitReadLock();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
set
|
||||||
|
{
|
||||||
|
ArgumentOutOfRangeException.ThrowIfNegativeOrZero(value);
|
||||||
|
|
||||||
|
_lock.EnterWriteLock();
|
||||||
|
try
|
||||||
|
{
|
||||||
|
_capacity = value;
|
||||||
|
TrimIfNeeded();
|
||||||
|
}
|
||||||
|
finally
|
||||||
|
{
|
||||||
|
_lock.ExitWriteLock();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public int Count
|
||||||
|
{
|
||||||
|
get
|
||||||
|
{
|
||||||
|
_lock.EnterReadLock();
|
||||||
|
try
|
||||||
|
{
|
||||||
|
return _map.Count;
|
||||||
|
}
|
||||||
|
finally
|
||||||
|
{
|
||||||
|
_lock.ExitReadLock();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public TValue this[TKey key]
|
||||||
|
{
|
||||||
|
get
|
||||||
|
{
|
||||||
|
if (!TryGetValue(key, out var value))
|
||||||
|
throw new KeyNotFoundException();
|
||||||
|
|
||||||
|
return value!;
|
||||||
|
}
|
||||||
|
set => Set(key, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
public bool TryGetValue(TKey key, out TValue? value)
|
||||||
|
{
|
||||||
|
_lock.EnterUpgradeableReadLock();
|
||||||
|
try
|
||||||
|
{
|
||||||
|
if (!_map.TryGetValue(key, out var node))
|
||||||
|
{
|
||||||
|
value = default;
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
value = node.Value.Value;
|
||||||
|
|
||||||
|
// LRU aktualisieren
|
||||||
|
_lock.EnterWriteLock();
|
||||||
|
try
|
||||||
|
{
|
||||||
|
_lruList.Remove(node);
|
||||||
|
_lruList.AddFirst(node);
|
||||||
|
}
|
||||||
|
finally
|
||||||
|
{
|
||||||
|
_lock.ExitWriteLock();
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
finally
|
||||||
|
{
|
||||||
|
_lock.ExitUpgradeableReadLock();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void Set(TKey key, TValue value)
|
||||||
|
{
|
||||||
|
_lock.EnterWriteLock();
|
||||||
|
try
|
||||||
|
{
|
||||||
|
if (_map.TryGetValue(key, out var existing))
|
||||||
|
{
|
||||||
|
// Update + nach vorne
|
||||||
|
existing.Value = existing.Value with { Value = value };
|
||||||
|
_lruList.Remove(existing);
|
||||||
|
_lruList.AddFirst(existing);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
var item = new CacheItem(key, value);
|
||||||
|
var node = new LinkedListNode<CacheItem>(item);
|
||||||
|
|
||||||
|
_lruList.AddFirst(node);
|
||||||
|
_map[key] = node;
|
||||||
|
|
||||||
|
TrimIfNeeded();
|
||||||
|
}
|
||||||
|
finally
|
||||||
|
{
|
||||||
|
_lock.ExitWriteLock();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public bool Remove(TKey key)
|
||||||
|
{
|
||||||
|
_lock.EnterWriteLock();
|
||||||
|
try
|
||||||
|
{
|
||||||
|
if (!_map.TryGetValue(key, out var node))
|
||||||
|
return false;
|
||||||
|
|
||||||
|
_lruList.Remove(node);
|
||||||
|
_map.Remove(key);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
finally
|
||||||
|
{
|
||||||
|
_lock.ExitWriteLock();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public bool ContainsKey(TKey key)
|
||||||
|
{
|
||||||
|
_lock.EnterReadLock();
|
||||||
|
try
|
||||||
|
{
|
||||||
|
return _map.ContainsKey(key);
|
||||||
|
}
|
||||||
|
finally
|
||||||
|
{
|
||||||
|
_lock.ExitReadLock();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public Dictionary<TKey, TValue> AsDictionary()
|
||||||
|
{
|
||||||
|
_lock.EnterReadLock();
|
||||||
|
try
|
||||||
|
{
|
||||||
|
return _map.Values.ToDictionary(
|
||||||
|
n => n.Value.Key,
|
||||||
|
n => n.Value.Value
|
||||||
|
);
|
||||||
|
}
|
||||||
|
finally
|
||||||
|
{
|
||||||
|
_lock.ExitReadLock();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public IEnumerable<KeyValuePair<TKey, TValue>> Items()
|
||||||
|
{
|
||||||
|
_lock.EnterReadLock();
|
||||||
|
try
|
||||||
|
{
|
||||||
|
foreach (var item in _lruList)
|
||||||
|
{
|
||||||
|
yield return new KeyValuePair<TKey, TValue>(item.Key, item.Value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
finally
|
||||||
|
{
|
||||||
|
_lock.ExitReadLock();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public IEnumerator<KeyValuePair<TKey, TValue>> GetEnumerator()
|
||||||
|
{
|
||||||
|
List<KeyValuePair<TKey, TValue>> snapshot;
|
||||||
|
|
||||||
|
_lock.EnterReadLock();
|
||||||
|
try
|
||||||
|
{
|
||||||
|
snapshot = new List<KeyValuePair<TKey, TValue>>(_map.Count);
|
||||||
|
|
||||||
|
foreach (var item in _lruList)
|
||||||
|
{
|
||||||
|
snapshot.Add(new KeyValuePair<TKey, TValue>(
|
||||||
|
item.Key,
|
||||||
|
item.Value
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
finally
|
||||||
|
{
|
||||||
|
_lock.ExitReadLock();
|
||||||
|
}
|
||||||
|
|
||||||
|
return snapshot.GetEnumerator();
|
||||||
|
}
|
||||||
|
|
||||||
|
private void TrimIfNeeded()
|
||||||
|
{
|
||||||
|
while (_map.Count > _capacity)
|
||||||
|
{
|
||||||
|
var lruNode = _lruList.Last!;
|
||||||
|
_lruList.RemoveLast();
|
||||||
|
_map.Remove(lruNode.Value.Key);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -95,10 +95,14 @@ public struct DateTimedSearchResult(DateTime dateTime, List<ResultItem> results)
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public struct SearchdomainSettings(bool cacheReconciliation = false)
|
public struct SearchdomainSettings(bool cacheReconciliation = false, int queryCacheSize = 1_000_000, bool parallelEmbeddingsPrefetch = false)
|
||||||
{
|
{
|
||||||
[JsonPropertyName("CacheReconciliation")]
|
[JsonPropertyName("CacheReconciliation")]
|
||||||
public bool CacheReconciliation { get; set; } = cacheReconciliation;
|
public bool CacheReconciliation { get; set; } = cacheReconciliation;
|
||||||
|
[JsonPropertyName("QueryCacheSize")]
|
||||||
|
public int QueryCacheSize { get; set; } = queryCacheSize;
|
||||||
|
[JsonPropertyName("ParallelEmbeddingsPrefetch")]
|
||||||
|
public bool ParallelEmbeddingsPrefetch { get; set; } = parallelEmbeddingsPrefetch;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static class MemorySizes
|
public static class MemorySizes
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
using System.Text.Json.Serialization;
|
using System.Text.Json.Serialization;
|
||||||
|
using Shared;
|
||||||
|
|
||||||
namespace Shared.Models;
|
namespace Shared.Models;
|
||||||
|
|
||||||
@@ -25,7 +26,7 @@ public class SearchdomainDeleteResults : SuccesMessageBaseModel
|
|||||||
public required int DeletedEntities { get; set; }
|
public required int DeletedEntities { get; set; }
|
||||||
}
|
}
|
||||||
|
|
||||||
public class SearchdomainSearchesResults : SuccesMessageBaseModel
|
public class SearchdomainQueriesResults : SuccesMessageBaseModel
|
||||||
{
|
{
|
||||||
[JsonPropertyName("Searches")]
|
[JsonPropertyName("Searches")]
|
||||||
public required Dictionary<string, DateTimedSearchResult> Searches { get; set; }
|
public required Dictionary<string, DateTimedSearchResult> Searches { get; set; }
|
||||||
@@ -41,10 +42,14 @@ public class SearchdomainSettingsResults : SuccesMessageBaseModel
|
|||||||
public required SearchdomainSettings? Settings { get; set; }
|
public required SearchdomainSettings? Settings { get; set; }
|
||||||
}
|
}
|
||||||
|
|
||||||
public class SearchdomainSearchCacheSizeResults : SuccesMessageBaseModel
|
public class SearchdomainQueryCacheSizeResults : SuccesMessageBaseModel
|
||||||
{
|
{
|
||||||
[JsonPropertyName("QueryCacheSizeBytes")]
|
[JsonPropertyName("ElementCount")]
|
||||||
public required long? QueryCacheSizeBytes { get; set; }
|
public required int? ElementCount { get; set; }
|
||||||
|
[JsonPropertyName("ElementMaxCount")]
|
||||||
|
public required int? ElementMaxCount { get; set; }
|
||||||
|
[JsonPropertyName("SizeBytes")]
|
||||||
|
public required long? SizeBytes { get; set; }
|
||||||
}
|
}
|
||||||
|
|
||||||
public class SearchdomainInvalidateCacheResults : SuccesMessageBaseModel {}
|
public class SearchdomainInvalidateCacheResults : SuccesMessageBaseModel {}
|
||||||
|
|||||||
@@ -8,14 +8,28 @@ public class ServerGetModelsResult : SuccesMessageBaseModel
|
|||||||
public string[]? Models { get; set; }
|
public string[]? Models { get; set; }
|
||||||
}
|
}
|
||||||
|
|
||||||
public class ServerGetEmbeddingCacheSizeResult : SuccesMessageBaseModel
|
public class ServerGetStatsResult : SuccesMessageBaseModel
|
||||||
{
|
{
|
||||||
[JsonPropertyName("SizeInBytes")]
|
[JsonPropertyName("EmbeddingCacheUtilization")]
|
||||||
public required long? SizeInBytes { get; set; }
|
public long? EmbeddingCacheUtilization { get; set; }
|
||||||
[JsonPropertyName("MaxElementCount")]
|
[JsonPropertyName("EmbeddingCacheMaxElementCount")]
|
||||||
public required long? MaxElementCount { get; set; }
|
public long? EmbeddingCacheMaxElementCount { get; set; }
|
||||||
[JsonPropertyName("ElementCount")]
|
[JsonPropertyName("ElementCount")]
|
||||||
public required long? ElementCount { get; set; }
|
public long? EmbeddingCacheElementCount { get; set; }
|
||||||
[JsonPropertyName("EmbeddingsCount")]
|
[JsonPropertyName("EmbeddingsCount")]
|
||||||
public required long? EmbeddingsCount { get; set; }
|
public long? EmbeddingsCount { get; set; }
|
||||||
|
[JsonPropertyName("EntityCount")]
|
||||||
|
public long? EntityCount { get; set; }
|
||||||
|
[JsonPropertyName("QueryCacheElementCount")]
|
||||||
|
public long? QueryCacheElementCount { get; set; }
|
||||||
|
[JsonPropertyName("QueryCacheMaxElementCountAll")]
|
||||||
|
public long? QueryCacheMaxElementCountAll { get; set; }
|
||||||
|
[JsonPropertyName("QueryCacheMaxElementCountLoadedSearchdomainsOnly")]
|
||||||
|
public long? QueryCacheMaxElementCountLoadedSearchdomainsOnly { get; set; }
|
||||||
|
[JsonPropertyName("QueryCacheUtilization")]
|
||||||
|
public long? QueryCacheUtilization { get; set; }
|
||||||
|
[JsonPropertyName("DatabaseTotalSize")]
|
||||||
|
public long? DatabaseTotalSize { get; set; }
|
||||||
|
[JsonPropertyName("RamTotalSize")]
|
||||||
|
public long? RamTotalSize { get; set; }
|
||||||
}
|
}
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
<Project Sdk="Microsoft.NET.Sdk">
|
<Project Sdk="Microsoft.NET.Sdk">
|
||||||
|
|
||||||
<PropertyGroup>
|
<PropertyGroup>
|
||||||
<TargetFramework>net8.0</TargetFramework>
|
<TargetFramework>net10.0</TargetFramework>
|
||||||
<ImplicitUsings>enable</ImplicitUsings>
|
<ImplicitUsings>enable</ImplicitUsings>
|
||||||
<Nullable>enable</Nullable>
|
<Nullable>enable</Nullable>
|
||||||
</PropertyGroup>
|
</PropertyGroup>
|
||||||
|
|||||||
Reference in New Issue
Block a user