diff --git a/.github/actions/dotnet-sdk/action.yml b/.github/actions/dotnet-sdk/action.yml index e53f87b5..8a0b68c6 100644 --- a/.github/actions/dotnet-sdk/action.yml +++ b/.github/actions/dotnet-sdk/action.yml @@ -14,7 +14,7 @@ runs: with: key: dotnet-sdk-windows-${{ inputs.UET_FRAMEWORK_TARGET }} restore-keys: dotnet-sdk-windows-${{ inputs.UET_FRAMEWORK_TARGET }} - path: .dotnet-${{ inputs.UET_FRAMEWORK_TARGET }} + path: .dotnet - name: Cache .NET SDK (macOS) if: ${{ runner.os == 'macOS' }} id: cache-sdk-mac @@ -22,64 +22,80 @@ runs: with: key: dotnet-sdk-mac-${{ inputs.UET_FRAMEWORK_TARGET }} restore-keys: dotnet-sdk-mac-${{ inputs.UET_FRAMEWORK_TARGET }} - path: .dotnet-${{ inputs.UET_FRAMEWORK_TARGET }} + path: .dotnet + - name: Cache .NET SDK (Linux) + if: ${{ runner.os == 'Linux' }} + id: cache-sdk-linux + uses: actions/cache@v4 + with: + key: dotnet-sdk-linux-${{ inputs.UET_FRAMEWORK_TARGET }} + restore-keys: dotnet-sdk-linux-${{ inputs.UET_FRAMEWORK_TARGET }} + path: .dotnet - name: Download .NET SDK (Windows) if: ${{ runner.os == 'Windows' && steps.cache-sdk-win.outputs.cache-hit != 'true' }} shell: pwsh - env: - UET_FRAMEWORK_TARGET: ${{ inputs.UET_FRAMEWORK_TARGET }} - UET_DOTNET_WIN_DL: https://download.visualstudio.microsoft.com/download/pr/6902745c-34bd-4d66-8e84-d5b61a17dfb7/e61732b00f7e144e162d7e6914291f16/dotnet-sdk-8.0.101-win-x64.zip run: | - if (!(Test-Path .dotnet-${env:UET_FRAMEWORK_TARGET}\dotnet\dotnet-extracted)) { - if (Test-Path ".dotnet-${env:UET_FRAMEWORK_TARGET}") { - Remove-Item -Recurse -Force ".dotnet-${env:UET_FRAMEWORK_TARGET}" + if (!(Test-Path .dotnet\dotnet\dotnet-extracted)) { + if (Test-Path ".dotnet") { + Remove-Item -Recurse -Force ".dotnet" } Write-Host "Setting up .NET SDK..." - New-Item -ItemType Directory ".dotnet-${env:UET_FRAMEWORK_TARGET}" | Out-Null - curl.exe -L -o ".dotnet-${env:UET_FRAMEWORK_TARGET}\dotnet.zip" "${env:UET_DOTNET_WIN_DL}" + New-Item -ItemType Directory ".dotnet" | Out-Null + Invoke-WebRequest https://dot.net/v1/dotnet-install.ps1 -UseBasicParsing -OutFile ".dotnet\dotnet-install.ps1" + .\.dotnet\dotnet-install.ps1 -Channel STS -InstallDir ".dotnet\dotnet" if ($LastExitCode -ne 0) { exit $LastExitCode } - Expand-Archive -Path ".dotnet-${env:UET_FRAMEWORK_TARGET}\dotnet.zip" -DestinationPath ".dotnet-${env:UET_FRAMEWORK_TARGET}\dotnet" -Force | Out-Null - Set-Content -Path .dotnet-${env:UET_FRAMEWORK_TARGET}\dotnet\dotnet-extracted -Value "done" + Set-Content -Path .dotnet\dotnet\dotnet-extracted -Value "done" } - name: Download .NET SDK (macOS) if: ${{ runner.os == 'macOS' && steps.cache-sdk-mac.outputs.cache-hit != 'true' }} shell: pwsh - env: - UET_FRAMEWORK_TARGET: ${{ inputs.UET_FRAMEWORK_TARGET }} - UET_DOTNET_MAC_DL: https://download.visualstudio.microsoft.com/download/pr/ef083c06-7aee-4a4f-b18b-50c9a8990753/e206864e7910e81bbd9cb7e674ff1b4c/dotnet-sdk-8.0.101-osx-arm64.tar.gz run: | - if (!(Test-Path .dotnet-${env:UET_FRAMEWORK_TARGET}/dotnet/dotnet-extracted)) { - if (Test-Path ".dotnet-${env:UET_FRAMEWORK_TARGET}") { - Remove-Item -Recurse -Force ".dotnet-${env:UET_FRAMEWORK_TARGET}" + if (!(Test-Path .dotnet/dotnet/dotnet-extracted)) { + if (Test-Path ".dotnet") { + Remove-Item -Recurse -Force ".dotnet" } Write-Host "Setting up .NET SDK..." - New-Item -ItemType Directory ".dotnet-${env:UET_FRAMEWORK_TARGET}" | Out-Null - curl -L -o ".dotnet-${env:UET_FRAMEWORK_TARGET}/dotnet.tar.gz" "${env:UET_DOTNET_MAC_DL}" + New-Item -ItemType Directory ".dotnet" | Out-Null + Invoke-WebRequest https://dot.net/v1/dotnet-install.sh -UseBasicParsing -OutFile ".dotnet/dotnet-install.sh" + chmod a+x .dotnet/dotnet-install.sh + ./.dotnet/dotnet-install.sh --channel STS --install-dir ".dotnet/dotnet" if ($LastExitCode -ne 0) { exit $LastExitCode } - New-Item -ItemType Directory ".dotnet-${env:UET_FRAMEWORK_TARGET}/dotnet" | Out-Null - Push-Location ".dotnet-${env:UET_FRAMEWORK_TARGET}/dotnet" - try { - tar -xvf "../dotnet.tar.gz" - } finally { - Pop-Location + Set-Content -Path .dotnet/dotnet/dotnet-extracted -Value "done" + } + - name: Download .NET SDK (Linux) + if: ${{ runner.os == 'Linux' && steps.cache-sdk-linux.outputs.cache-hit != 'true' }} + shell: pwsh + run: | + if (!(Test-Path .dotnet/dotnet/dotnet-extracted)) { + if (Test-Path ".dotnet") { + Remove-Item -Recurse -Force ".dotnet" } - Set-Content -Path .dotnet-${env:UET_FRAMEWORK_TARGET}/dotnet/dotnet-extracted -Value "done" + Write-Host "Setting up .NET SDK..." + New-Item -ItemType Directory ".dotnet" | Out-Null + Invoke-WebRequest https://dot.net/v1/dotnet-install.sh -UseBasicParsing -OutFile ".dotnet/dotnet-install.sh" + chmod a+x .dotnet/dotnet-install.sh + ./.dotnet/dotnet-install.sh --channel STS --install-dir ".dotnet/dotnet" + if ($LastExitCode -ne 0) { + exit $LastExitCode + } + Set-Content -Path .dotnet/dotnet/dotnet-extracted -Value "done" } - name: Add .NET SDK to PATH (Windows) if: ${{ runner.os == 'Windows' }} shell: pwsh - env: - UET_FRAMEWORK_TARGET: ${{ inputs.UET_FRAMEWORK_TARGET }} run: | - Add-Content -Path "${env:GITHUB_PATH}" -Value ".dotnet-${env:UET_FRAMEWORK_TARGET}\dotnet" + Add-Content -Path "${env:GITHUB_PATH}" -Value ".dotnet\dotnet" - name: Add .NET SDK to PATH (macOS) if: ${{ runner.os == 'macOS' }} shell: pwsh - env: - UET_FRAMEWORK_TARGET: ${{ inputs.UET_FRAMEWORK_TARGET }} run: | - Add-Content -Path "${env:GITHUB_PATH}" -Value ".dotnet-${env:UET_FRAMEWORK_TARGET}/dotnet" \ No newline at end of file + Add-Content -Path "${env:GITHUB_PATH}" -Value ".dotnet/dotnet" + - name: Add .NET SDK to PATH (Linux) + if: ${{ runner.os == 'Linux' }} + shell: pwsh + run: | + Add-Content -Path "${env:GITHUB_PATH}" -Value ".dotnet/dotnet" \ No newline at end of file diff --git a/.github/actions/upload-artifact/action.yml b/.github/actions/upload-artifact/action.yml index 7c841144..8b603667 100644 --- a/.github/actions/upload-artifact/action.yml +++ b/.github/actions/upload-artifact/action.yml @@ -129,12 +129,27 @@ runs: path: | **/*.nupkg - - name: Upload Test Results - if: ${{ inputs.UET_ARTIFACT_NAME == 'test-results' }} + - name: Upload Windows Test Results + if: ${{ inputs.UET_ARTIFACT_NAME == 'test-results-win' }} uses: actions/upload-artifact@v4 with: - name: test-results + name: test-results-win if-no-files-found: error path: | TestResults/*.test-result.trx - \ No newline at end of file + - name: Upload Linux Test Results + if: ${{ inputs.UET_ARTIFACT_NAME == 'test-results-linux' }} + uses: actions/upload-artifact@v4 + with: + name: test-results-linux + if-no-files-found: error + path: | + TestResults/*.test-result.trx + - name: Upload Cloud Framework Test Results + if: ${{ inputs.UET_ARTIFACT_NAME == 'test-results-cf' }} + uses: actions/upload-artifact@v4 + with: + name: test-results-cf + if-no-files-found: error + path: | + TestResults/*.test-result.trx \ No newline at end of file diff --git a/.github/workflows/test-publish.yml b/.github/workflows/test-publish.yml index eb0c90e8..644027c5 100644 --- a/.github/workflows/test-publish.yml +++ b/.github/workflows/test-publish.yml @@ -19,7 +19,21 @@ jobs: - name: Report Test Results uses: dorny/test-reporter@v1 with: - artifact: test-results + artifact: test-results-win name: Windows Test Results path: '*.test-result.trx' + reporter: dotnet-trx + - name: Report Test Results + uses: dorny/test-reporter@v1 + with: + artifact: test-results-linux + name: Linux Test Results + path: '*.test-result.trx' + reporter: dotnet-trx + - name: Report Test Results + uses: dorny/test-reporter@v1 + with: + artifact: test-results-cf + name: Cloud Framework Test Results + path: '*.test-result.trx' reporter: dotnet-trx \ No newline at end of file diff --git a/.github/workflows/uet.yml b/.github/workflows/uet.yml index 83c00c89..41e78b6b 100644 --- a/.github/workflows/uet.yml +++ b/.github/workflows/uet.yml @@ -412,7 +412,7 @@ jobs: exit $LastExitCode } foreach ($Item in (Get-ChildItem UET -Filter *.Tests)) { - if (Test-Path "$($Item.FullName)/$($Item.Name).csproj") { + if ((Test-Path "$($Item.FullName)/$($Item.Name).csproj") -and ($Item.Name -ne "Redpoint.CloudFramework.Tests")) { Write-Host "============ STARTING: $($Item.Name) ============" dotnet test --logger:"console" --logger:"trx;LogFileName=$($Item.Name).test-result.trx" --results-directory "$((Get-Location).Path)\TestResults" "$($Item.FullName)/bin/Release/${{ env.UET_FRAMEWORK_TARGET }}/$($Item.Name).dll" if ($LastExitCode -ne 0) { @@ -425,7 +425,7 @@ jobs: - name: Upload Test Results uses: ./.github/actions/upload-artifact with: - UET_ARTIFACT_NAME: test-results + UET_ARTIFACT_NAME: test-results-win UET_FRAMEWORK_TARGET: ${{ env.UET_FRAMEWORK_TARGET }} - name: Upload Packages uses: ./.github/actions/upload-artifact @@ -457,6 +457,47 @@ jobs: echo "Package version: $UET_PACKAGE_VERSION" dotnet build -c Release /p:PackageVersion=$UET_PACKAGE_VERSION UET/UET.sln + libs-linux: + name: "Build and Test Libraries on Linux" + runs-on: ubuntu-latest + needs: + - timestamp + env: + UET_PACKAGE_VERSION: ${{ needs.timestamp.outputs.version }} + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + - name: Install .NET SDK + uses: ./.github/actions/dotnet-sdk + with: + UET_FRAMEWORK_TARGET: ${{ env.UET_FRAMEWORK_TARGET }} + - name: Build and Test Libraries on Linux + shell: pwsh + run: | + dotnet build -c Release "/p:PackageVersion=${env:UET_PACKAGE_VERSION}" UET/UET.sln + if ($LastExitCode -ne 0) { + Write-Host "dotnet build (UET.sln) failed with exit code $LastExitCode" + exit $LastExitCode + } + foreach ($Item in (Get-ChildItem UET -Filter *.Tests)) { + if ((Test-Path "$($Item.FullName)/$($Item.Name).csproj") -and ($Item.Name -ne "Redpoint.CloudFramework.Tests")) { + Write-Host "============ STARTING: $($Item.Name) ============" + dotnet test --logger:"console" --logger:"trx;LogFileName=$($Item.Name).test-result.trx" --results-directory "$((Get-Location).Path)\TestResults" "$($Item.FullName)/bin/Release/${{ env.UET_FRAMEWORK_TARGET }}/$($Item.Name).dll" + if ($LastExitCode -ne 0) { + Write-Host "============ FAILED: $($Item.Name) ============" + exit $LastExitCode + } + Write-Host "============ PASSED: $($Item.Name) ============" + } + } + - name: Upload Test Results + uses: ./.github/actions/upload-artifact + with: + UET_ARTIFACT_NAME: test-results-linux + UET_FRAMEWORK_TARGET: ${{ env.UET_FRAMEWORK_TARGET }} + pass-2-win: name: "Build Windows Pass 2" runs-on: windows-latest @@ -667,6 +708,7 @@ jobs: - timestamp - libs-win - libs-mac + - libs-linux - uefs-win - uefs-mac - shim-win @@ -675,6 +717,7 @@ jobs: - pass-2-win - pass-2-mac - pass-2-linux + - cf-linux-tests env: UET_PACKAGE_VERSION: ${{ needs.timestamp.outputs.version }} steps: @@ -715,6 +758,7 @@ jobs: - timestamp - libs-win - libs-mac + - libs-linux - uefs-win - uefs-mac - shim-win @@ -723,6 +767,7 @@ jobs: - pass-2-win - pass-2-mac - pass-2-linux + - cf-linux-tests env: UET_PACKAGE_VERSION: ${{ needs.timestamp.outputs.version }} steps: @@ -808,6 +853,7 @@ jobs: - timestamp - libs-win - libs-mac + - libs-linux - uefs-win - uefs-mac - shim-win @@ -816,6 +862,7 @@ jobs: - pass-2-win - pass-2-mac - pass-2-linux + - cf-linux-tests env: UET_PACKAGE_VERSION: ${{ needs.timestamp.outputs.version }} steps: @@ -853,6 +900,7 @@ jobs: - timestamp - libs-win - libs-mac + - libs-linux - uefs-win - uefs-mac - shim-win @@ -861,6 +909,7 @@ jobs: - pass-2-win - pass-2-mac - pass-2-linux + - cf-linux-tests env: UET_PACKAGE_VERSION: ${{ needs.timestamp.outputs.version }} steps: @@ -907,6 +956,7 @@ jobs: - timestamp - libs-win - libs-mac + - libs-linux - uefs-win - uefs-mac - shim-win @@ -915,6 +965,7 @@ jobs: - pass-2-win - pass-2-mac - pass-2-linux + - cf-linux-tests env: UET_PACKAGE_VERSION: ${{ needs.timestamp.outputs.version }} steps: @@ -948,3 +999,56 @@ jobs: docker buildx create --name img-builder-linux --use --platform linux/amd64 docker buildx build --platform linux/amd64 --output=type=registry -f UET/Lib/Container/linux-wine.Dockerfile -t "ghcr.io/redpointgames/uet/uet:${env:UET_PACKAGE_VERSION}-wine" --build-arg UET_TARGET_FRAMEWORK=${{ env.UET_FRAMEWORK_TARGET }} . docker buildx build --platform linux/amd64 --output=type=registry -f UET/Lib/Container/linux-wine.Dockerfile -t "ghcr.io/redpointgames/uet/uet:latest-wine" --build-arg UET_TARGET_FRAMEWORK=${{ env.UET_FRAMEWORK_TARGET }} . + + cf-linux-tests: + name: "Test Cloud Framework" + runs-on: ubuntu-latest + container: + image: mcr.microsoft.com/dotnet/sdk:latest + services: + redis: + image: redis:6.0.10 + ports: + - 6379:6379 + pubsub: + image: ghcr.io/redpointgames/uet/pubsub-emulator:latest + ports: + - 9000:9000 + datastore: + image: ghcr.io/redpointgames/uet/datastore-emulator:latest + ports: + - 9001:9001 + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + - name: Install .NET SDK + uses: ./.github/actions/dotnet-sdk + with: + UET_FRAMEWORK_TARGET: ${{ env.UET_FRAMEWORK_TARGET }} + - name: Build and Test Cloud Framework + shell: pwsh + run: | + foreach ($Item in (Get-ChildItem UET -Filter Redpoint.CloudFramework.Tests)) { + if (Test-Path "$($Item.FullName)/$($Item.Name).csproj") { + dotnet build -c Release "$($Item.FullName)/$($Item.Name).csproj" + if ($LastExitCode -ne 0) { + Write-Host "dotnet build ($($Item.FullName)/$($Item.Name).csproj) failed with exit code $LastExitCode" + exit $LastExitCode + } + + Write-Host "============ STARTING: $($Item.Name) ============" + dotnet test --logger:"console" --logger:"trx;LogFileName=$($Item.Name).test-result.trx" --results-directory "$((Get-Location).Path)\TestResults" "$($Item.FullName)/bin/Release/${{ env.UET_FRAMEWORK_TARGET }}/$($Item.Name).dll" + if ($LastExitCode -ne 0) { + Write-Host "============ FAILED: $($Item.Name) ============" + exit $LastExitCode + } + Write-Host "============ PASSED: $($Item.Name) ============" + } + } + - name: Upload Test Results + uses: ./.github/actions/upload-artifact + with: + UET_ARTIFACT_NAME: test-results-cf + UET_FRAMEWORK_TARGET: ${{ env.UET_FRAMEWORK_TARGET }} diff --git a/UET/Lib/Framework.AspNetCore.Build.props b/UET/Lib/Framework.AspNetCore.Build.props new file mode 100644 index 00000000..9c3637f2 --- /dev/null +++ b/UET/Lib/Framework.AspNetCore.Build.props @@ -0,0 +1,8 @@ + + + + + + + + diff --git a/UET/Lib/Redpoint.ThirdParty.React.AspNet.Middleware/AspNetFileSystem.cs b/UET/Lib/Redpoint.ThirdParty.React.AspNet.Middleware/AspNetFileSystem.cs new file mode 100644 index 00000000..4b6b2ad2 --- /dev/null +++ b/UET/Lib/Redpoint.ThirdParty.React.AspNet.Middleware/AspNetFileSystem.cs @@ -0,0 +1,51 @@ +/* + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +using Microsoft.AspNetCore.Hosting; +using React.Exceptions; + +namespace React.AspNet +{ + /// + /// Handles file system functionality, such as reading files. Maps all paths from + /// application-relative (~/...) to full paths using ASP.NET's MapPath method. + /// + public class AspNetFileSystem : FileSystemBase + { + private readonly IWebHostEnvironment _hostingEnv; + + /// + /// Initializes a new instance of the class. + /// + /// The .NET Core hosting environment + public AspNetFileSystem(IWebHostEnvironment hostingEnv) + { + _hostingEnv = hostingEnv; + } + + /// + /// Converts a path from an application relative path (~/...) to a full filesystem path + /// + /// App-relative path of the file + /// Full path of the file + public override string MapPath(string relativePath) + { + if (_hostingEnv.WebRootPath == null) + { + throw new ReactException("WebRootPath was null, has the wwwroot folder been deployed along with your app?"); + } + + if (relativePath.StartsWith(_hostingEnv.WebRootPath)) + { + return relativePath; + } + relativePath = relativePath.TrimStart('~').TrimStart('/').TrimStart('\\'); + + return Path.GetFullPath(Path.Combine(_hostingEnv.WebRootPath, relativePath)); + } + } +} diff --git a/UET/Lib/Redpoint.ThirdParty.React.AspNet.Middleware/MemoryFileCacheCore.cs b/UET/Lib/Redpoint.ThirdParty.React.AspNet.Middleware/MemoryFileCacheCore.cs new file mode 100644 index 00000000..fa663bc7 --- /dev/null +++ b/UET/Lib/Redpoint.ThirdParty.React.AspNet.Middleware/MemoryFileCacheCore.cs @@ -0,0 +1,83 @@ +/* + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ +using Microsoft.Extensions.Caching.Memory; +using Microsoft.AspNetCore.Hosting; + +namespace React.AspNet +{ + /// + /// Memory cache implementation for React.ICache. Uses IMemoryCache from .NET Core. + /// + public class MemoryFileCacheCore : ICache + { + private readonly IMemoryCache _cache; + private readonly IWebHostEnvironment _hostingEnv; + + /// + /// Initializes a new instance of the class. + /// + /// The cache to use + /// The ASP.NET hosting environment. + public MemoryFileCacheCore(IMemoryCache cache, IWebHostEnvironment hostingEnv) + { + _cache = cache; + _hostingEnv = hostingEnv; + } + + /// + /// Get an item from the cache. Returns if the item does + /// not exist. + /// + /// Type of data + /// The cache key + /// Value to return if item is not in the cache + /// Data from cache, otherwise + public T Get(string key, T fallback = default(T)) + { + return (T)(_cache.Get(key) ?? fallback); + } + + /// + /// Sets an item in the cache. + /// + /// Type of data + /// The cache key + /// Data to cache + /// + /// Sliding expiration, if cache key is not accessed in this time period it will + /// automatically be removed from the cache + /// + /// + /// Filenames this cached item is dependent on. If any of these files change, the cache + /// will be cleared automatically + /// + public void Set(string key, T data, TimeSpan slidingExpiration, IEnumerable cacheDependencyFiles = null) + { + if (data == null) + { + _cache.Remove(key); + return; + } + + var options = new MemoryCacheEntryOptions + { + SlidingExpiration = slidingExpiration, + }; + + if (cacheDependencyFiles != null) + { + foreach (var file in cacheDependencyFiles) + { + var relativePath = file.Replace(_hostingEnv.WebRootPath, string.Empty).TrimStart('\\', '/'); + options.AddExpirationToken(_hostingEnv.WebRootFileProvider.Watch(relativePath)); + } + } + + _cache.Set(key, data, options); + } + } +} diff --git a/UET/Lib/Redpoint.ThirdParty.React.AspNet.Middleware/README.md b/UET/Lib/Redpoint.ThirdParty.React.AspNet.Middleware/README.md new file mode 100644 index 00000000..b6d307a7 --- /dev/null +++ b/UET/Lib/Redpoint.ThirdParty.React.AspNet.Middleware/README.md @@ -0,0 +1,5 @@ +# Redpoint.ThirdParty.React.AspNet + +This is a fork of React.NET (https://github.com/reactjs/react.net) that adds support for React v18. + +If you want to use this fork in an ASP.NET Core project, use the `Redpoint.ThirdParty.React.AspNet` package. \ No newline at end of file diff --git a/UET/Lib/Redpoint.ThirdParty.React.AspNet.Middleware/ReactBuilderExtensions.cs b/UET/Lib/Redpoint.ThirdParty.React.AspNet.Middleware/ReactBuilderExtensions.cs new file mode 100644 index 00000000..b91bc3a0 --- /dev/null +++ b/UET/Lib/Redpoint.ThirdParty.React.AspNet.Middleware/ReactBuilderExtensions.cs @@ -0,0 +1,35 @@ +/* + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +using Microsoft.AspNetCore.Builder; +using Microsoft.Extensions.DependencyInjection; + +namespace React.AspNet +{ + /// + /// Handles registering ReactJS.NET middleware in an ASP.NET . + /// + public static class ReactBuilderExtensions + { + /// + /// Initialises ReactJS.NET for this application + /// + /// ASP.NET application builder + /// ReactJS.NET configuration + /// The application builder (for chaining) + public static IApplicationBuilder UseReact( + this IApplicationBuilder app, + Action configure + ) + { + // Apply configuration. + configure(app.ApplicationServices.GetRequiredService()); + + return app; + } + } +} diff --git a/UET/Lib/Redpoint.ThirdParty.React.AspNet.Middleware/ReactServiceCollectionExtensions.cs b/UET/Lib/Redpoint.ThirdParty.React.AspNet.Middleware/ReactServiceCollectionExtensions.cs new file mode 100644 index 00000000..148883aa --- /dev/null +++ b/UET/Lib/Redpoint.ThirdParty.React.AspNet.Middleware/ReactServiceCollectionExtensions.cs @@ -0,0 +1,38 @@ +/* + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +using JavaScriptEngineSwitcher.Core; +using Microsoft.Extensions.DependencyInjection; + +namespace React.AspNet +{ + /// + /// Handles registering ReactJS.NET services in the ASP.NET . + /// + public static class ReactServiceCollectionExtensions + { + /// + /// Registers all services required for ReactJS.NET + /// + /// ASP.NET services + /// The service collection (for chaining) + public static IServiceCollection AddReact(this IServiceCollection services) + { + services.AddSingleton(); + services.AddScoped(); + services.AddSingleton(sp => JsEngineSwitcher.Current); + services.AddSingleton(); + services.AddSingleton(); + services.AddScoped(); + + services.AddSingleton(); + services.AddSingleton(); + + return services; + } + } +} diff --git a/UET/Lib/Redpoint.ThirdParty.React.AspNet.Middleware/Redpoint.ThirdParty.React.AspNet.Middleware.csproj b/UET/Lib/Redpoint.ThirdParty.React.AspNet.Middleware/Redpoint.ThirdParty.React.AspNet.Middleware.csproj new file mode 100644 index 00000000..be4904c3 --- /dev/null +++ b/UET/Lib/Redpoint.ThirdParty.React.AspNet.Middleware/Redpoint.ThirdParty.React.AspNet.Middleware.csproj @@ -0,0 +1,30 @@ + + + + + + React.AspNet.Middleware + React.AspNet.Middleware + + + + + + + A fork of React.NET (https://github.com/reactjs/react.net) that adds support for React v18. + Redpoint.ThirdParty.React.AspNet.Middleware + react, react.net + MIT + June Rhodes, Daniel Lo Nigro + + + + + + + + + 7035 + + + diff --git a/UET/Lib/Redpoint.ThirdParty.React.AspNet/HtmlHelperExtensions.cs b/UET/Lib/Redpoint.ThirdParty.React.AspNet/HtmlHelperExtensions.cs new file mode 100644 index 00000000..85faed07 --- /dev/null +++ b/UET/Lib/Redpoint.ThirdParty.React.AspNet/HtmlHelperExtensions.cs @@ -0,0 +1,221 @@ +/* + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +using System.Text; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.AspNetCore.Mvc.Rendering; +using Microsoft.AspNetCore.Html; +using IHtmlString = Microsoft.AspNetCore.Html.IHtmlContent; +using Microsoft.AspNetCore.Mvc; + +namespace React.AspNet +{ + /// + /// HTML Helpers for utilising React from an ASP.NET MVC application. + /// + public static class HtmlHelperExtensions + { + [ThreadStatic] + private static StringWriter _sharedStringWriter; + + /// + /// Renders the specified React component + /// + /// Type of the props + /// HTML helper + /// Name of the component + /// Props to initialise the component with + /// HTML tag to wrap the component in. Defaults to <div> + /// ID to use for the container HTML tag. Defaults to an auto-generated ID + /// Skip rendering server-side and only output client-side initialisation code. Defaults to false + /// Skip rendering React specific data-attributes, container and client-side initialisation during server side rendering. Defaults to false + /// HTML class(es) to set on the container tag + /// A custom exception handler that will be called if a component throws during a render. Args: (Exception ex, string componentName, string containerId) + /// Functions to call during component render + /// The component's HTML + public static IHtmlString React( + this IHtmlHelper htmlHelper, + string componentName, + T props, + string htmlTag = null, + string containerId = null, + bool clientOnly = false, + bool serverOnly = false, + string containerClass = null, + Action exceptionHandler = null, + IRenderFunctions renderFunctions = null + ) + { + var instance = htmlHelper.ViewContext.HttpContext.RequestServices.GetRequiredService(); + + try + { + var reactComponent = instance.CreateComponent(componentName, props, containerId, clientOnly, serverOnly); + if (!string.IsNullOrEmpty(htmlTag)) + { + reactComponent.ContainerTag = htmlTag; + } + + if (!string.IsNullOrEmpty(containerClass)) + { + reactComponent.ContainerClass = containerClass; + } + + return RenderToString(writer => reactComponent.RenderHtml(writer, clientOnly, serverOnly, exceptionHandler, renderFunctions)); + } + finally + { + instance.ReturnEngineToPool(); + } + } + + /// + /// Renders the specified React component, along with its client-side initialisation code. + /// Normally you would use the method, but + /// is useful when rendering self-contained partial views. + /// + /// Type of the props + /// HTML helper + /// Name of the component + /// Props to initialise the component with + /// HTML tag to wrap the component in. Defaults to <div> + /// ID to use for the container HTML tag. Defaults to an auto-generated ID + /// Skip rendering server-side and only output client-side initialisation code. Defaults to false + /// Skip rendering React specific data-attributes, container and client-side initialisation during server side rendering. Defaults to false + /// HTML class(es) to set on the container tag + /// A custom exception handler that will be called if a component throws during a render. Args: (Exception ex, string componentName, string containerId) + /// Functions to call during component render + /// The component's HTML + public static IHtmlString ReactWithInit( + this IHtmlHelper htmlHelper, + string componentName, + T props, + string htmlTag = null, + string containerId = null, + bool clientOnly = false, + bool serverOnly = false, + string containerClass = null, + Action exceptionHandler = null, + IRenderFunctions renderFunctions = null + ) + { + var instance = htmlHelper.ViewContext.HttpContext.RequestServices.GetRequiredService(); + + try + { + var reactComponent = instance.CreateComponent(componentName, props, containerId, clientOnly); + if (!string.IsNullOrEmpty(htmlTag)) + { + reactComponent.ContainerTag = htmlTag; + } + + if (!string.IsNullOrEmpty(containerClass)) + { + reactComponent.ContainerClass = containerClass; + } + + return RenderToString(writer => + { + reactComponent.RenderHtml(writer, clientOnly, serverOnly, exceptionHandler: exceptionHandler, renderFunctions); + writer.WriteLine(); + WriteScriptTag(instance, writer, bodyWriter => reactComponent.RenderJavaScript(bodyWriter, waitForDOMContentLoad: true)); + }); + } + finally + { + instance.ReturnEngineToPool(); + } + } + + /// + /// Renders the JavaScript required to initialise all components client-side. This will + /// attach event handlers to the server-rendered HTML. + /// + /// JavaScript for all components + public static IHtmlString ReactInitJavaScript(this IHtmlHelper htmlHelper, bool clientOnly = false) + { + var instance = htmlHelper.ViewContext.HttpContext.RequestServices.GetRequiredService(); + + try + { + return RenderToString(writer => + { + WriteScriptTag(instance, writer, bodyWriter => instance.GetInitJavaScript(bodyWriter, clientOnly)); + }); + } + finally + { + instance.ReturnEngineToPool(); + } + } + + /// + /// Returns script tags based on the webpack asset manifest + /// + /// + /// Optional IUrlHelper instance. Enables the use of tilde/relative (~/) paths inside the expose-components.js file. + /// + public static IHtmlString ReactGetScriptPaths(this IHtmlHelper htmlHelper, IUrlHelper urlHelper = null) + { + var instance = htmlHelper.ViewContext.HttpContext.RequestServices.GetRequiredService(); + + string nonce = instance.Configuration.ScriptNonceProvider != null + ? $" nonce=\"{instance.Configuration.ScriptNonceProvider()}\"" + : ""; + + return new HtmlString(string.Join("", instance.GetScriptPaths() + .Select(scriptPath => $""))); + } + + /// + /// Returns style tags based on the webpack asset manifest + /// + /// + /// Optional IUrlHelper instance. Enables the use of tilde/relative (~/) paths inside the expose-components.js file. + /// + public static IHtmlString ReactGetStylePaths(this IHtmlHelper htmlHelper, IUrlHelper urlHelper = null) + { + var instance = htmlHelper.ViewContext.HttpContext.RequestServices.GetRequiredService(); + + return new HtmlString(string.Join("", instance.GetStylePaths() + .Select(stylePath => $""))); + } + + private static IHtmlString RenderToString(Action withWriter) + { + var stringWriter = _sharedStringWriter; + if (stringWriter != null) + { + stringWriter.GetStringBuilder().Clear(); + } + else + { + _sharedStringWriter = stringWriter = new StringWriter(new StringBuilder(128)); + } + + withWriter(stringWriter); + return new HtmlString(stringWriter.ToString()); + } + + private static void WriteScriptTag(IReactEnvironment instance, TextWriter writer, Action bodyWriter) + { + writer.Write(""); + + bodyWriter(writer); + + writer.Write(""); + } + } +} diff --git a/UET/Lib/Redpoint.ThirdParty.React.AspNet/README.md b/UET/Lib/Redpoint.ThirdParty.React.AspNet/README.md new file mode 100644 index 00000000..d465a966 --- /dev/null +++ b/UET/Lib/Redpoint.ThirdParty.React.AspNet/README.md @@ -0,0 +1,5 @@ +# Redpoint.ThirdParty.React.AspNet + +This is a fork of React.NET (https://github.com/reactjs/react.net) that adds support for React v18. + +If you want to use this fork in an ASP.NET Core project, this package is the one to use. \ No newline at end of file diff --git a/UET/Lib/Redpoint.ThirdParty.React.AspNet/Redpoint.ThirdParty.React.AspNet.csproj b/UET/Lib/Redpoint.ThirdParty.React.AspNet/Redpoint.ThirdParty.React.AspNet.csproj new file mode 100644 index 00000000..ab296654 --- /dev/null +++ b/UET/Lib/Redpoint.ThirdParty.React.AspNet/Redpoint.ThirdParty.React.AspNet.csproj @@ -0,0 +1,31 @@ + + + + + + React.AspNet + React.AspNet + + + + + + + + + + + + A fork of React.NET (https://github.com/reactjs/react.net) that adds support for React v18. + Redpoint.ThirdParty.React.AspNet + react, react.net + MIT + June Rhodes, Daniel Lo Nigro + + + + + 7035 + + + diff --git a/UET/Lib/Redpoint.ThirdParty.React.Core/Exceptions/ReactConfigurationException.cs b/UET/Lib/Redpoint.ThirdParty.React.Core/Exceptions/ReactConfigurationException.cs new file mode 100644 index 00000000..c7a48f99 --- /dev/null +++ b/UET/Lib/Redpoint.ThirdParty.React.Core/Exceptions/ReactConfigurationException.cs @@ -0,0 +1,28 @@ +/* + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +namespace React.Exceptions +{ + /// + /// Thrown when an error occurs while reading a site configuration file. + /// + public class ReactConfigurationException : ReactException + { + /// + /// Initializes a new instance of the class. + /// + /// The message that describes the error. + public ReactConfigurationException(string message) : base(message) { } + /// + /// Initializes a new instance of the class. + /// + /// The error message that explains the reason for the exception. + /// The exception that is the cause of the current exception, or a null reference (Nothing in Visual Basic) if no inner exception is specified. + public ReactConfigurationException(string message, Exception innerException) + : base(message, innerException) { } + } +} diff --git a/UET/Lib/Redpoint.ThirdParty.React.Core/Exceptions/ReactEngineNotFoundException.cs b/UET/Lib/Redpoint.ThirdParty.React.Core/Exceptions/ReactEngineNotFoundException.cs new file mode 100644 index 00000000..4d95352b --- /dev/null +++ b/UET/Lib/Redpoint.ThirdParty.React.Core/Exceptions/ReactEngineNotFoundException.cs @@ -0,0 +1,21 @@ +/* + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +namespace React.Exceptions +{ + /// + /// Thrown when no valid JavaScript engine is found. + /// + public class ReactEngineNotFoundException : ReactException + { + /// + /// Initializes a new instance of the class. + /// + /// The message that describes the error. + public ReactEngineNotFoundException(string message) : base(message) { } + } +} diff --git a/UET/Lib/Redpoint.ThirdParty.React.Core/Exceptions/ReactException.cs b/UET/Lib/Redpoint.ThirdParty.React.Core/Exceptions/ReactException.cs new file mode 100644 index 00000000..fada5e62 --- /dev/null +++ b/UET/Lib/Redpoint.ThirdParty.React.Core/Exceptions/ReactException.cs @@ -0,0 +1,32 @@ +/* + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +namespace React.Exceptions +{ + /// + /// Base class for all ReactJS.NET exceptions + /// + public class ReactException : Exception + { + /// + /// Initializes a new instance of the class. + /// + public ReactException() : base() { } + /// + /// Initializes a new instance of the class. + /// + /// The message that describes the error. + public ReactException(string message) : base(message) { } + /// + /// Initializes a new instance of the class. + /// + /// The error message that explains the reason for the exception. + /// The exception that is the cause of the current exception, or a null reference (Nothing in Visual Basic) if no inner exception is specified. + public ReactException(string message, Exception innerException) + : base(message, innerException) { } + } +} diff --git a/UET/Lib/Redpoint.ThirdParty.React.Core/Exceptions/ReactInvalidComponentException.cs b/UET/Lib/Redpoint.ThirdParty.React.Core/Exceptions/ReactInvalidComponentException.cs new file mode 100644 index 00000000..62263dcd --- /dev/null +++ b/UET/Lib/Redpoint.ThirdParty.React.Core/Exceptions/ReactInvalidComponentException.cs @@ -0,0 +1,28 @@ +/* + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +namespace React.Exceptions +{ + /// + /// Thrown when a non-existent component is rendered. + /// + public class ReactInvalidComponentException : ReactException + { + /// + /// Initializes a new instance of the class. + /// + /// The message that describes the error. + public ReactInvalidComponentException(string message) : base(message) { } + /// + /// Initializes a new instance of the class. + /// + /// The error message that explains the reason for the exception. + /// The exception that is the cause of the current exception, or a null reference (Nothing in Visual Basic) if no inner exception is specified. + public ReactInvalidComponentException(string message, Exception innerException) + : base(message, innerException) { } + } +} diff --git a/UET/Lib/Redpoint.ThirdParty.React.Core/Exceptions/ReactNotInitialisedException.cs b/UET/Lib/Redpoint.ThirdParty.React.Core/Exceptions/ReactNotInitialisedException.cs new file mode 100644 index 00000000..7d836828 --- /dev/null +++ b/UET/Lib/Redpoint.ThirdParty.React.Core/Exceptions/ReactNotInitialisedException.cs @@ -0,0 +1,29 @@ +/* + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +namespace React.Exceptions +{ + /// + /// Thrown when React has not been initialised correctly. + /// + public class ReactNotInitialisedException : ReactException + { + /// + /// Initializes a new instance of the class. + /// + /// The message that describes the error. + public ReactNotInitialisedException(string message) : base(message) { } + + /// + /// Initializes a new instance of the class. + /// + /// The error message that explains the reason for the exception. + /// The exception that is the cause of the current exception, or a null reference (Nothing in Visual Basic) if no inner exception is specified. + public ReactNotInitialisedException(string message, Exception innerException) + : base(message, innerException) { } + } +} diff --git a/UET/Lib/Redpoint.ThirdParty.React.Core/Exceptions/ReactScriptLoadException.cs b/UET/Lib/Redpoint.ThirdParty.React.Core/Exceptions/ReactScriptLoadException.cs new file mode 100644 index 00000000..1c6b3eb4 --- /dev/null +++ b/UET/Lib/Redpoint.ThirdParty.React.Core/Exceptions/ReactScriptLoadException.cs @@ -0,0 +1,29 @@ +/* + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +namespace React.Exceptions +{ + /// + /// Thrown when an error is encountered while loading a JavaScript file. + /// + public class ReactScriptLoadException : ReactException + { + /// + /// Initializes a new instance of the class. + /// + /// The message that describes the error. + public ReactScriptLoadException(string message) : base(message) { } + + /// + /// Initializes a new instance of the class. + /// + /// The error message that explains the reason for the exception. + /// The exception that is the cause of the current exception, or a null reference (Nothing in Visual Basic) if no inner exception is specified. + public ReactScriptLoadException(string message, Exception innerException) + : base(message, innerException) { } + } +} diff --git a/UET/Lib/Redpoint.ThirdParty.React.Core/Exceptions/ReactScriptPrecompilationNotAvailableException.cs b/UET/Lib/Redpoint.ThirdParty.React.Core/Exceptions/ReactScriptPrecompilationNotAvailableException.cs new file mode 100644 index 00000000..b93702fc --- /dev/null +++ b/UET/Lib/Redpoint.ThirdParty.React.Core/Exceptions/ReactScriptPrecompilationNotAvailableException.cs @@ -0,0 +1,21 @@ +/* + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +namespace React.Exceptions +{ + /// + /// Thrown when the script pre-compilation is not available. + /// + public class ReactScriptPrecompilationNotAvailableException : ReactException + { + /// + /// Initializes a new instance of the class. + /// + /// The message that describes the error. + public ReactScriptPrecompilationNotAvailableException(string message) : base(message) { } + } +} diff --git a/UET/Lib/Redpoint.ThirdParty.React.Core/Exceptions/ReactServerRenderingException.cs b/UET/Lib/Redpoint.ThirdParty.React.Core/Exceptions/ReactServerRenderingException.cs new file mode 100644 index 00000000..024712c8 --- /dev/null +++ b/UET/Lib/Redpoint.ThirdParty.React.Core/Exceptions/ReactServerRenderingException.cs @@ -0,0 +1,29 @@ +/* + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +namespace React.Exceptions +{ + /// + /// Thrown when an error occurs during server rendering of a React component. + /// + public class ReactServerRenderingException : ReactException + { + /// + /// Initializes a new instance of the class. + /// + /// The message that describes the error. + public ReactServerRenderingException(string message) : base(message) { } + + /// + /// Initializes a new instance of the class. + /// + /// The error message that explains the reason for the exception. + /// The exception that is the cause of the current exception, or a null reference (Nothing in Visual Basic) if no inner exception is specified. + public ReactServerRenderingException(string message, Exception innerException) + : base(message, innerException) { } + } +} diff --git a/UET/Lib/Redpoint.ThirdParty.React.Core/FileCacheHash.cs b/UET/Lib/Redpoint.ThirdParty.React.Core/FileCacheHash.cs new file mode 100644 index 00000000..d449b6cc --- /dev/null +++ b/UET/Lib/Redpoint.ThirdParty.React.Core/FileCacheHash.cs @@ -0,0 +1,81 @@ +/* + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +using System.Security.Cryptography; +using System.Text; + +namespace React +{ + /// + /// Handles calculating a hash value for validating a file-based cache. + /// + public class FileCacheHash : IFileCacheHash + { + /// + /// Prefix used for hash line in transformed file. Used for caching. + /// + private const string HASH_PREFIX = "// @hash v3-"; + + // TODO: Do we really need to use SHA1Cng specifically? + /// + /// Algorithm for calculating file hashes + /// + private readonly HashAlgorithm _hash = SHA1.Create(); + + /// + /// Calculates a hash for the specified input + /// + /// Input string + /// Hash of the input + public string CalculateHash(string input) + { + var inputBytes = Encoding.UTF8.GetBytes(input); + var hash = _hash.ComputeHash(inputBytes); + return BitConverter.ToString(hash).Replace("-", string.Empty); + } + + /// + /// Validates that the cache's hash is valid. This is used to ensure the input has not + /// changed, and to invalidate the cache if so. + /// + /// Contents retrieved from cache + /// Hash of the input + /// true if the cache is still valid + public virtual bool ValidateHash(string cacheContents, string hash) + { + if (string.IsNullOrWhiteSpace(cacheContents)) + { + return false; + } + + // Check if first line is hash + var firstLineBreak = cacheContents.IndexOfAny(new[] { '\r', '\n' }); + if (firstLineBreak == -1) + { + return false; + } + var firstLine = cacheContents.Substring(0, firstLineBreak); + if (!firstLine.StartsWith(HASH_PREFIX)) + { + // Cache doesn't have hash - Err on the side of caution and invalidate it. + return false; + } + var cacheHash = firstLine.Replace(HASH_PREFIX, string.Empty); + return cacheHash == hash; + } + + /// + /// Prepends the hash prefix to the hash + /// + /// Hash to prepend prefix to + /// Hash with prefix + public virtual string AddPrefix(string hash) + { + return HASH_PREFIX + hash; + } + } +} diff --git a/UET/Lib/Redpoint.ThirdParty.React.Core/FileSystemBase.cs b/UET/Lib/Redpoint.ThirdParty.React.Core/FileSystemBase.cs new file mode 100644 index 00000000..f6598761 --- /dev/null +++ b/UET/Lib/Redpoint.ThirdParty.React.Core/FileSystemBase.cs @@ -0,0 +1,82 @@ +/* + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +using System.Text; + +namespace React +{ + /// + /// Handles file system functionality, such as reading files. + /// + abstract public class FileSystemBase : IFileSystem + { + /// + /// Prefix for relative paths + /// + public const string RELATIVE_PREFIX = "~/"; + + /// + /// Converts a path from an application relative path (~/...) to a full filesystem path + /// + /// App-relative path of the file + /// Full path of the file + public abstract string MapPath(string relativePath); + + /// + /// Converts a path from a full filesystem path to an application relative path (~/...) + /// + /// Full path of the file + /// App-relative path of the file + public virtual string ToRelativePath(string absolutePath) + { + var root = MapPath(RELATIVE_PREFIX); + return absolutePath.Replace(root, RELATIVE_PREFIX).Replace('\\', '/'); + } + + /// + /// Reads the contents of a file as a string. + /// + /// App-relative path of the file + /// Contents of the file + public virtual string ReadAsString(string relativePath) + { + return File.ReadAllText(MapPath(relativePath), Encoding.UTF8); + } + + /// + /// Writes a string to a file + /// + /// App-relative path of the file + /// Contents of the file + public virtual void WriteAsString(string relativePath, string contents) + { + File.WriteAllText(MapPath(relativePath), contents, Encoding.UTF8); + } + + /// + /// Determines if the specified file exists + /// + /// App-relative path of the file + /// true if the file exists + public virtual bool FileExists(string relativePath) + { + return File.Exists(MapPath(relativePath)); + } + + /// + /// Gets all the file paths that match the specified pattern + /// + /// Pattern to search for (eg. "~/Scripts/*.js") + /// File paths that match the pattern + public virtual IEnumerable Glob(string glob) + { + var path = MapPath(Path.GetDirectoryName(glob)); + var searchPattern = Path.GetFileName(glob); + return Directory.EnumerateFiles(path, searchPattern).Select(ToRelativePath); + } + } +} diff --git a/UET/Lib/Redpoint.ThirdParty.React.Core/FileSystemExtensions.cs b/UET/Lib/Redpoint.ThirdParty.React.Core/FileSystemExtensions.cs new file mode 100644 index 00000000..9a5eee0a --- /dev/null +++ b/UET/Lib/Redpoint.ThirdParty.React.Core/FileSystemExtensions.cs @@ -0,0 +1,26 @@ +/* + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +namespace React +{ + /// + /// Extension methods relating to file system paths. + /// + public static class FileSystemExtensions + { + /// + /// Determines if the specified string is a glob pattern that can be used with + /// . + /// + /// String + /// true if the specified string is a glob pattern + public static bool IsGlobPattern(this string input) + { + return input.Contains("*") || input.Contains("?"); + } + } +} diff --git a/UET/Lib/Redpoint.ThirdParty.React.Core/ICache.cs b/UET/Lib/Redpoint.ThirdParty.React.Core/ICache.cs new file mode 100644 index 00000000..6fc01871 --- /dev/null +++ b/UET/Lib/Redpoint.ThirdParty.React.Core/ICache.cs @@ -0,0 +1,46 @@ +/* + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +namespace React +{ + /// + /// Handles caching of data and optionally tracking dependencies + /// + public interface ICache + { + /// + /// Get an item from the cache. Returns if the item does + /// not exist. + /// + /// Type of data + /// The cache key + /// Value to return if item is not in the cache + /// Data from cache, otherwise + T Get(string key, T fallback = default(T)); + + /// + /// Sets an item in the cache. + /// + /// Type of data + /// The cache key + /// Data to cache + /// + /// Sliding expiration, if cache key is not accessed in this time period it will + /// automatically be removed from the cache + /// + /// + /// Filenames this cached item is dependent on. If any of these files change, the cache + /// will be cleared automatically + /// + void Set( + string key, + T data, + TimeSpan slidingExpiration, + IEnumerable cacheDependencyFiles = null + ); + } +} diff --git a/UET/Lib/Redpoint.ThirdParty.React.Core/IFileCacheHash.cs b/UET/Lib/Redpoint.ThirdParty.React.Core/IFileCacheHash.cs new file mode 100644 index 00000000..cfabfbd2 --- /dev/null +++ b/UET/Lib/Redpoint.ThirdParty.React.Core/IFileCacheHash.cs @@ -0,0 +1,38 @@ +/* + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +namespace React +{ + /// + /// Handles calculating a hash value for validating a file-based cache. + /// + public interface IFileCacheHash + { + /// + /// Calculates a hash for the specified input + /// + /// Input string + /// Hash of the input + string CalculateHash(string input); + + /// + /// Validates that the cache's hash is valid. This is used to ensure the input has not + /// changed, and to invalidate the cache if so. + /// + /// Contents retrieved from cache + /// Hash of the input + /// true if the cache is still valid + bool ValidateHash(string cacheContents, string hash); + + /// + /// Prepends the hash prefix to the hash + /// + /// Hash to prepend prefix to + /// Hash with prefix + string AddPrefix(string hash); + } +} diff --git a/UET/Lib/Redpoint.ThirdParty.React.Core/IFileSystem.cs b/UET/Lib/Redpoint.ThirdParty.React.Core/IFileSystem.cs new file mode 100644 index 00000000..3fdf8c0f --- /dev/null +++ b/UET/Lib/Redpoint.ThirdParty.React.Core/IFileSystem.cs @@ -0,0 +1,57 @@ +/* + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +namespace React +{ + /// + /// Handles file system functionality, such as reading files. + /// + public interface IFileSystem + { + /// + /// Converts a path from an application relative path (~/...) to a full filesystem path + /// + /// App-relative path of the file + /// Full path of the file + string MapPath(string relativePath); + + /// + /// Converts a path from a full filesystem path to anan application relative path (~/...) + /// + /// Full path of the file + /// App-relative path of the file + string ToRelativePath(string absolutePath); + + /// + /// Reads the contents of a file as a string. + /// + /// App-relative path of the file + /// Contents of the file + string ReadAsString(string relativePath); + + /// + /// Writes a string to a file + /// + /// App-relative path of the file + /// Contents of the file + void WriteAsString(string relativePath, string contents); + + /// + /// Determines if the specified file exists + /// + /// App-relative path of the file + /// true if the file exists + bool FileExists(string relativePath); + + /// + /// Gets all the files that match the specified pattern + /// + /// Pattern to search for (eg. "~/Scripts/*.js") + /// File names that match the pattern + IEnumerable Glob(string glob); + } +} diff --git a/UET/Lib/Redpoint.ThirdParty.React.Core/IJavaScriptEngineFactory.cs b/UET/Lib/Redpoint.ThirdParty.React.Core/IJavaScriptEngineFactory.cs new file mode 100644 index 00000000..adb731d3 --- /dev/null +++ b/UET/Lib/Redpoint.ThirdParty.React.Core/IJavaScriptEngineFactory.cs @@ -0,0 +1,30 @@ +using JavaScriptEngineSwitcher.Core; +using JSPool; + +namespace React +{ + /// + /// Handles creation of JavaScript engines. All methods are thread-safe. + /// + public interface IJavaScriptEngineFactory + { + /// + /// Gets the JavaScript engine for the current thread. It is recommended to use + /// instead, which will pool/reuse engines. + /// + /// The JavaScript engine + IJsEngine GetEngineForCurrentThread(); + + /// + /// Disposes the JavaScript engine for the current thread. This should only be used + /// if the engine was acquired through . + /// + void DisposeEngineForCurrentThread(); + + /// + /// Gets a JavaScript engine from the pool. + /// + /// The JavaScript engine + PooledJsEngine GetEngine(); + } +} \ No newline at end of file diff --git a/UET/Lib/Redpoint.ThirdParty.React.Core/IReactComponent.cs b/UET/Lib/Redpoint.ThirdParty.React.Core/IReactComponent.cs new file mode 100644 index 00000000..f3e19c14 --- /dev/null +++ b/UET/Lib/Redpoint.ThirdParty.React.Core/IReactComponent.cs @@ -0,0 +1,84 @@ +/* + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +namespace React +{ + /// + /// Represents a React JavaScript component. + /// + public interface IReactComponent + { + /// + /// Gets or sets the props for this component + /// + object Props { get; set; } + + /// + /// Gets or sets the name of the component + /// + string ComponentName { get; set; } + + /// + /// Gets or sets the unique ID for the container of this component + /// + string ContainerId { get; set; } + + /// + /// Gets or sets the HTML tag the component is wrapped in + /// + string ContainerTag { get; set; } + + /// + /// Gets or sets the HTML class for the container of this component + /// + string ContainerClass { get; set; } + + /// + /// Get or sets if this components only should be rendered server side + /// + bool ServerOnly { get; set; } + + /// + /// Renders the HTML for this component. This will execute the component server-side and + /// return the rendered HTML. + /// + /// Only renders component container. Used for client-side only rendering. + /// Only renders the common HTML mark up and not any React specific data attributes. Used for server-side only rendering. + /// A custom exception handler that will be called if a component throws during a render. Args: (Exception ex, string componentName, string containerId) + /// Functions to call during component render + /// HTML + string RenderHtml(bool renderContainerOnly = false, bool renderServerOnly = false, Action exceptionHandler = null, IRenderFunctions renderFunctions = null); + + /// + /// Renders the HTML for this component. This will execute the component server-side and + /// return the rendered HTML. + /// + /// The to which the content is written + /// Only renders component container. Used for client-side only rendering. + /// Only renders the common HTML mark up and not any React specific data attributes. Used for server-side only rendering. + /// A custom exception handler that will be called if a component throws during a render. Args: (Exception ex, string componentName, string containerId) + /// Functions to call during component render + /// HTML + void RenderHtml(TextWriter writer, bool renderContainerOnly = false, bool renderServerOnly = false, Action exceptionHandler = null, IRenderFunctions renderFunctions = null); + + /// + /// Renders the JavaScript required to initialise this component client-side. This will + /// initialise the React component, which includes attach event handlers to the + /// server-rendered HTML. + /// + /// JavaScript + string RenderJavaScript(bool waitForDOMContentLoad); + + /// + /// Renders the JavaScript required to initialise this component client-side. This will + /// initialise the React component, which includes attach event handlers to the + /// server-rendered HTML. + /// + /// JavaScript + void RenderJavaScript(TextWriter writer, bool waitForDOMContentLoad); + } +} diff --git a/UET/Lib/Redpoint.ThirdParty.React.Core/IReactEnvironment.cs b/UET/Lib/Redpoint.ThirdParty.React.Core/IReactEnvironment.cs new file mode 100644 index 00000000..9c79db6e --- /dev/null +++ b/UET/Lib/Redpoint.ThirdParty.React.Core/IReactEnvironment.cs @@ -0,0 +1,114 @@ +/* + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +namespace React +{ + /// + /// Request-specific ReactJS.NET environment. This is unique to the individual request and is + /// not shared. + /// + public interface IReactEnvironment + { + /// + /// Gets the version number of ReactJS.NET + /// + string Version { get; } + + /// + /// Gets the name and version of the JavaScript engine in use by ReactJS.NET + /// + string EngineVersion { get; } + + /// + /// Executes the provided JavaScript code. + /// + /// JavaScript to execute + void Execute(string code); + + /// + /// Executes the provided JavaScript code, returning a result of the specified type. + /// + /// Type to return + /// Code to execute + /// Result of the JavaScript code + T Execute(string code); + + /// + /// Executes the provided JavaScript function, returning a result of the specified type. + /// + /// Type to return + /// JavaScript function to execute + /// Arguments to pass to function + /// Result of the JavaScript code + T Execute(string function, params object[] args); + + /// + /// Determines if the specified variable exists in the JavaScript engine + /// + /// Name of the variable + /// true if the variable exists; false otherwise + bool HasVariable(string name); + + /// + /// Creates an instance of the specified React JavaScript component. + /// + /// Type of the props + /// Name of the component + /// Props to use + /// ID to use for the container HTML tag. Defaults to an auto-generated ID + /// True if server-side rendering will be bypassed. Defaults to false. + /// True if this component only should be rendered server-side. Defaults to false. + /// Skip adding to components list, which is used during GetInitJavascript + /// The component + IReactComponent CreateComponent(string componentName, T props, string containerId = null, bool clientOnly = false, bool serverOnly = false, bool skipLazyInit = false); + + /// + /// Adds the provided to the list of components to render client side. + /// + /// Component to add to client side render list + /// True if server-side rendering will be bypassed. Defaults to false. + /// The component + IReactComponent CreateComponent(IReactComponent component, bool clientOnly = false); + + /// + /// Renders the JavaScript required to initialise all components client-side. This will + /// attach event handlers to the server-rendered HTML. + /// + /// True if server-side rendering will be bypassed. Defaults to false. + /// JavaScript for all components + string GetInitJavaScript(bool clientOnly = false); + + /// + /// Returns the currently held JS engine to the pool. (no-op if engine pooling is disabled) + /// + void ReturnEngineToPool(); + + /// + /// Gets the site-wide configuration. + /// + IReactSiteConfiguration Configuration { get; } + + /// + /// Renders the JavaScript required to initialise all components client-side. This will + /// attach event handlers to the server-rendered HTML. + /// + /// The to which the content is written + /// True if server-side rendering will be bypassed. Defaults to false. + /// JavaScript for all components + void GetInitJavaScript(TextWriter writer, bool clientOnly = false); + + /// + /// Returns a list of paths to scripts generated by the React app + /// + IEnumerable GetScriptPaths(); + + /// + /// Returns a list of paths to stylesheets generated by the React app + /// + IEnumerable GetStylePaths(); + } +} diff --git a/UET/Lib/Redpoint.ThirdParty.React.Core/IReactIdGenerator.cs b/UET/Lib/Redpoint.ThirdParty.React.Core/IReactIdGenerator.cs new file mode 100644 index 00000000..57aacdae --- /dev/null +++ b/UET/Lib/Redpoint.ThirdParty.React.Core/IReactIdGenerator.cs @@ -0,0 +1,21 @@ +/* + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +namespace React +{ + /// + /// Fast component ID generator + /// + public interface IReactIdGenerator + { + /// + /// Returns a short react identifier starts with "react_". + /// + /// + string Generate(); + } +} diff --git a/UET/Lib/Redpoint.ThirdParty.React.Core/IReactSiteConfiguration.cs b/UET/Lib/Redpoint.ThirdParty.React.Core/IReactSiteConfiguration.cs new file mode 100644 index 00000000..9a19217a --- /dev/null +++ b/UET/Lib/Redpoint.ThirdParty.React.Core/IReactSiteConfiguration.cs @@ -0,0 +1,202 @@ +/* + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +using Newtonsoft.Json; + +namespace React +{ + /// + /// Site-wide configuration for ReactJS.NET + /// + public interface IReactSiteConfiguration + { + /// + /// Adds a script to the list of scripts that are executed. This should be called for all + /// React components and their dependencies. If the script does not have any JSX in it + /// (for example, it's built using Webpack or Gulp), use + /// instead. + /// + /// + /// Name of the file to execute. Should be a server relative path starting with ~ (eg. + /// ~/Scripts/Awesome.js) + /// + /// This configuration, for chaining + IReactSiteConfiguration AddScript(string filename); + + /// + /// Adds a script to the list of scripts that are executed. This is the same as + /// except it does not run JSX transformation on the script and thus is + /// more efficient. + /// + /// + /// Name of the file to execute. Should be a server relative path starting with ~ (eg. + /// ~/Scripts/Awesome.js) + /// + /// The configuration, for chaining + IReactSiteConfiguration AddScriptWithoutTransform(string filename); + + /// + /// Gets a list of all the scripts that have been added to this configuration and require JSX + /// transformation to be run. + /// + IEnumerable Scripts { get; } + + /// + /// Gets a list of all the scripts that have been added to this configuration and do not + /// require JSX transformation to be run. + /// + IEnumerable ScriptsWithoutTransform { get; } + + /// + /// Gets or sets whether JavaScript engines should be reused across requests. + /// + /// + bool ReuseJavaScriptEngines { get; set; } + /// + /// Sets whether JavaScript engines should be reused across requests. + /// + IReactSiteConfiguration SetReuseJavaScriptEngines(bool value); + + /// + /// Gets or sets the configuration for JSON serializer. + /// + JsonSerializerSettings JsonSerializerSettings { get; set; } + + /// + /// Sets the configuration for json serializer. + /// + /// + /// This confiquration is used when component initialization script + /// is being generated server-side. + /// + /// The settings. + IReactSiteConfiguration SetJsonSerializerSettings(JsonSerializerSettings settings); + + /// + /// Gets or sets the number of engines to initially start when a pool is created. + /// Defaults to 10. + /// + int? StartEngines { get; set; } + /// + /// Sets the number of engines to initially start when a pool is created. + /// Defaults to 10. + /// + IReactSiteConfiguration SetStartEngines(int? startEngines); + + /// + /// Gets or sets the maximum number of engines that will be created in the pool. + /// Defaults to 25. + /// + int? MaxEngines { get; set; } + /// + /// Sets the maximum number of engines that will be created in the pool. + /// Defaults to 25. + /// + IReactSiteConfiguration SetMaxEngines(int? maxEngines); + + /// + /// Gets or sets the maximum number of times an engine can be reused before it is disposed. + /// 0 is unlimited. Defaults to 100. + /// + int? MaxUsagesPerEngine { get; set; } + /// + /// Sets the maximum number of times an engine can be reused before it is disposed. + /// 0 is unlimited. Defaults to 100. + /// + IReactSiteConfiguration SetMaxUsagesPerEngine(int? maxUsagesPerEngine); + + /// + /// Gets or sets whether to allow the JavaScript pre-compilation (accelerates the + /// initialization of JavaScript engines). + /// + bool AllowJavaScriptPrecompilation { get; set; } + /// + /// Sets whether to allow the JavaScript pre-compilation (accelerates the initialization of + /// JavaScript engines). + /// + /// + IReactSiteConfiguration SetAllowJavaScriptPrecompilation(bool allowJavaScriptPrecompilation); + + /// + /// Gets or sets whether to use the debug version of React. This is slower, but gives + /// useful debugging tips. + /// + bool UseDebugReact { get; set; } + /// + /// Sets whether to use the debug version of React. This is slower, but gives + /// useful debugging tips. + /// + IReactSiteConfiguration SetUseDebugReact(bool value); + + /// + /// Gets or sets whether server-side rendering is enabled. + /// + bool UseServerSideRendering { get; set; } + /// + /// Disables server-side rendering. This is useful when debugging your scripts. + /// + IReactSiteConfiguration DisableServerSideRendering(); + + /// + /// An exception handler which will be called if a render exception is thrown. + /// If unset, unhandled exceptions will be thrown for all component renders. + /// + Action ExceptionHandler { get; set; } + + /// + /// Sets an exception handler which will be called if a render exception is thrown. + /// If unset, unhandled exceptions will be thrown for all component renders. + /// + /// + /// + IReactSiteConfiguration SetExceptionHandler(Action handler); + + /// + /// A provider that returns a nonce to be used on any script tags on the page. + /// This value must match the nonce used in the Content Security Policy header on the response. + /// + Func ScriptNonceProvider { get; set; } + + /// + /// Sets a provider that returns a nonce to be used on any script tags on the page. + /// This value must match the nonce used in the Content Security Policy header on the response. + /// + /// + /// + IReactSiteConfiguration SetScriptNonceProvider(Func provider); + + /// + /// The path to the application bundles built by webpack or create-react-app + /// + string ReactAppBuildPath { get; set; } + + /// + /// Sets the path to the application bundles built by webpack or create-react-app + /// + /// + /// + IReactSiteConfiguration SetReactAppBuildPath(string reactAppBuildPath); + + /// + /// Gets or sets if the React 18+ create root api should be used for rendering / hydration. + /// If false ReactDOM.render / ReactDOM.hydrate will be used. + /// + bool UseRootAPI { get; set; } + + /// + /// Enables usage of the React 18 root API when rendering / hydrating. + /// + /// + void EnableReact18RootAPI(); + + /// + /// If set, the styles and scripts that are emitted are filtered by this function, allowing + /// you to exclude Webpack bundles that should not be emitted. + /// + Func FilterResource { get; set; } + } +} diff --git a/UET/Lib/Redpoint.ThirdParty.React.Core/IReactUserScriptProvider.cs b/UET/Lib/Redpoint.ThirdParty.React.Core/IReactUserScriptProvider.cs new file mode 100644 index 00000000..d3835157 --- /dev/null +++ b/UET/Lib/Redpoint.ThirdParty.React.Core/IReactUserScriptProvider.cs @@ -0,0 +1,16 @@ +namespace React.Core +{ + using JavaScriptEngineSwitcher.Core; + + /// + /// + /// + public interface IReactJsEngineInitProvider + { + /// + /// + /// + /// + void InitEngine(IJsEngine engine); + } +} diff --git a/UET/Lib/Redpoint.ThirdParty.React.Core/IRenderFunctions.cs b/UET/Lib/Redpoint.ThirdParty.React.Core/IRenderFunctions.cs new file mode 100644 index 00000000..051d3a85 --- /dev/null +++ b/UET/Lib/Redpoint.ThirdParty.React.Core/IRenderFunctions.cs @@ -0,0 +1,46 @@ +namespace React +{ + /// + /// Functions to execute during a render request. + /// These functions will share the same Javascript context, so state can be passed around via variables. + /// + public interface IRenderFunctions + { + /// + /// Executes before component render. + /// It takes a func that accepts a Javascript code expression to evaluate, which returns the result of the expression. + /// This is useful for setting up variables that will be referenced after the render completes. + /// The func to execute + /// + void PreRender(Func executeJs); + + + /// + /// Transforms the React.createElement expression. + /// This is useful for libraries like styled components which require wrapping the root component + /// inside a helper to generate a stylesheet. + /// Example transform: React.createElement(Foo, ...) => wrapComponent(React.createElement(Foo, ...)) + /// + /// The Javascript expression to wrap + /// A wrapped expression + string WrapComponent(string componentToRender); + + + /// + /// Transforms the compiled rendered component HTML + /// This is useful for libraries like emotion which take rendered component HTML and output the transformed HTML plus additional style tags + /// + /// The component HTML + /// A wrapped expression + string TransformRenderedHtml(string input); + + + /// + /// Executes after component render. + /// It takes a func that accepts a Javascript code expression to evaluate, which returns the result of the expression. + /// This is useful for reading computed state, such as generated stylesheets or a router redirect result. + /// + /// The func to execute + void PostRender(Func executeJs); + } +} diff --git a/UET/Lib/Redpoint.ThirdParty.React.Core/JavaScriptEngineFactory.cs b/UET/Lib/Redpoint.ThirdParty.React.Core/JavaScriptEngineFactory.cs new file mode 100644 index 00000000..e9d275ff --- /dev/null +++ b/UET/Lib/Redpoint.ThirdParty.React.Core/JavaScriptEngineFactory.cs @@ -0,0 +1,389 @@ +using System.Collections.Concurrent; +using System.Diagnostics; +using System.Reflection; +using JavaScriptEngineSwitcher.Core; +using JSPool; +using Microsoft.Extensions.DependencyInjection; +using React.Core; +using React.Exceptions; + +namespace React +{ + /// + /// Handles creation of JavaScript engines. All methods are thread-safe. + /// + public class JavaScriptEngineFactory : IDisposable, IJavaScriptEngineFactory + { + /// + /// React configuration for the current site + /// + protected readonly IReactSiteConfiguration _config; + /// + /// Cache used for storing the pre-compiled scripts + /// + protected readonly ICache _cache; + /// + /// File system wrapper + /// + protected readonly IFileSystem _fileSystem; + /// + /// Function used to create new JavaScript engine instances. + /// + protected readonly Func _factory; + /// + /// List of init providers to run before loading React scripts. + /// + private readonly IReactJsEngineInitProvider[] _initProviders; + + /// + /// The JavaScript Engine Switcher instance used by ReactJS.NET + /// + protected readonly IJsEngineSwitcher _jsEngineSwitcher; + /// + /// Contains all current JavaScript engine instances. One per thread, keyed on thread ID. + /// + protected readonly ConcurrentDictionary _engines + = new ConcurrentDictionary(); + /// + /// Pool of JavaScript engines to use + /// + protected IJsPool _pool; + /// + /// Whether this class has been disposed. + /// + protected bool _disposed; + /// + /// The exception that was thrown during the most recent recycle of the pool. + /// + protected Exception _scriptLoadException; + + /// + /// Initializes a new instance of the class. + /// + public JavaScriptEngineFactory( + IJsEngineSwitcher jsEngineSwitcher, + IReactSiteConfiguration config, + ICache cache, + IFileSystem fileSystem, + IServiceProvider serviceProvider + ) + { + _jsEngineSwitcher = jsEngineSwitcher; + _config = config; + _cache = cache; + _fileSystem = fileSystem; +#pragma warning disable 618 + _factory = GetFactory(_jsEngineSwitcher); +#pragma warning restore 618 + _initProviders = serviceProvider.GetServices().ToArray(); + if (_config.ReuseJavaScriptEngines) + { + _pool = CreatePool(); + } + } + + /// + /// Creates a new JavaScript engine pool. + /// + protected virtual IJsPool CreatePool() + { + var allFiles = _config.Scripts + .Concat(_config.ScriptsWithoutTransform) + .Concat(_config.ReactAppBuildPath != null + ? new[] { $"{_config.ReactAppBuildPath}/asset-manifest.json" } + : Enumerable.Empty()) + .Select(_fileSystem.MapPath); + + var poolConfig = new JsPoolConfig + { + EngineFactory = _factory, + Initializer = InitialiseEngine, + WatchPath = _fileSystem.MapPath("~/"), + WatchFiles = allFiles + }; + if (_config.MaxEngines != null) + { + poolConfig.MaxEngines = _config.MaxEngines.Value; + } + if (_config.StartEngines != null) + { + poolConfig.StartEngines = _config.StartEngines.Value; + } + if (_config.MaxUsagesPerEngine != null) + { + poolConfig.MaxUsagesPerEngine = _config.MaxUsagesPerEngine.Value; + } + + var pool = new JsPool(poolConfig); + // Reset the recycle exception on recycle. If there *are* errors loading the scripts + // during recycle, the errors will be caught in the initializer. + pool.Recycled += (sender, args) => _scriptLoadException = null; + return pool; + } + + /// + /// Loads standard React scripts into the engine. + /// + protected virtual void InitialiseEngine(IJsEngine engine) + { + foreach (var provider in _initProviders) + { + provider.InitEngine(engine); + } + + var thisAssembly = typeof(ReactEnvironment).GetTypeInfo().Assembly; + LoadResource(engine, "React.Core.Resources.shims.js", thisAssembly); + + LoadUserScripts(engine); + if (_scriptLoadException == null) + { + // We expect the user to have loaded their own version of React in the scripts that + // were loaded above, let's ensure that's the case. + EnsureReactLoaded(engine); + } + } + + /// + /// Loads code from embedded JavaScript resource into the engine. + /// + /// Engine to load a code from embedded JavaScript resource + /// The case-sensitive resource name + /// The assembly, which contains the embedded resource + private void LoadResource(IJsEngine engine, string resourceName, Assembly assembly) + { + if (_config.AllowJavaScriptPrecompilation + && engine.TryExecuteResourceWithPrecompilation(_cache, resourceName, assembly)) + { + // Do nothing. + } + else + { + engine.ExecuteResource(resourceName, assembly); + } + } + + /// + /// Loads any user-provided scripts. Only scripts that don't need JSX transformation can + /// run immediately here. JSX files are loaded in ReactEnvironment. + /// + /// Engine to load scripts into + private void LoadUserScripts(IJsEngine engine) + { + try + { + IEnumerable manifestFiles = Enumerable.Empty(); + if (_config.ReactAppBuildPath != null) + { + var manifest = ReactAppAssetManifest.LoadManifest(_config, _fileSystem, _cache, useCacheRead: false); + manifestFiles = (manifest?.Entrypoints?.Where(x => x != null && x.EndsWith(".js"))) ?? Enumerable.Empty(); + } + + foreach (var file in _config.ScriptsWithoutTransform.Concat(manifestFiles)) + { + try + { + if (_config.AllowJavaScriptPrecompilation + && engine.TryExecuteFileWithPrecompilation(_cache, _fileSystem, file)) + { + // Do nothing. + } + else + { + engine.ExecuteFile(_fileSystem, file); + } + } + catch (JsException ex) + { + // We can't simply rethrow the exception here, as it's possible this is running + // on a background thread (ie. as a response to a file changing). If we did + // throw the exception here, it would terminate the entire process. Instead, + // save the exception, and then just rethrow it later when getting the engine. + _scriptLoadException = new ReactScriptLoadException(string.Format( + "Error while loading \"{0}\": {1}", + file, + ex.Message + ), ex); + } + } + } + catch (IOException ex) + { + // Files could be in the process of being rebuilt by JS build tooling + _scriptLoadException = new ReactScriptLoadException(ex.Message, ex); ; + } + } + + /// + /// Ensures that React has been correctly loaded into the specified engine. + /// + /// Engine to check + private void EnsureReactLoaded(IJsEngine engine) + { + var globalsString = engine.CallFunction("ReactNET_initReact"); + string[] globals = globalsString.Split(new[] { ',' }, StringSplitOptions.RemoveEmptyEntries); + + if (globals.Length != 0) + { + _scriptLoadException = new ReactNotInitialisedException( + $"React has not been loaded correctly: missing ({string.Join(", ", globals)})." + + "Please expose your version of React as global variables named " + + "'React', 'ReactDOM', and 'ReactDOMServer', or enable the 'LoadReact'" + + "configuration option to use the built-in version of React." + ); + } + } + + /// + /// Gets the JavaScript engine for the current thread. It is recommended to use + /// instead, which will pool/reuse engines. + /// + /// The JavaScript engine + public virtual IJsEngine GetEngineForCurrentThread() + { + EnsureValidState(); + return _engines.GetOrAdd(Thread.CurrentThread.ManagedThreadId, id => + { + var engine = _factory(); + InitialiseEngine(engine); + EnsureValidState(); + return engine; + }); + } + + /// + /// Disposes the JavaScript engine for the current thread. + /// + public virtual void DisposeEngineForCurrentThread() + { + IJsEngine engine; + if (_engines.TryRemove(Thread.CurrentThread.ManagedThreadId, out engine)) + { + if (engine != null) + { + engine.Dispose(); + } + } + } + + /// + /// Gets a JavaScript engine from the pool. + /// + /// The JavaScript engine + public virtual PooledJsEngine GetEngine() + { + EnsureValidState(); + return _pool.GetEngine(); + } + + /// + /// Gets a factory for the most appropriate JavaScript engine for the current environment. + /// The first functioning JavaScript engine with the lowest priority will be used. + /// + /// Function to create JavaScript engine + private static Func GetFactory(IJsEngineSwitcher jsEngineSwitcher) + { + string defaultEngineName = jsEngineSwitcher.DefaultEngineName; + if (!string.IsNullOrWhiteSpace(defaultEngineName)) + { + var engineFactory = jsEngineSwitcher.EngineFactories.Get(defaultEngineName); + if (engineFactory != null) + { + return engineFactory.CreateEngine; + } + else + { + throw new ReactEngineNotFoundException( + "Could not find a factory that creates an instance of the JavaScript " + + "engine with name `" + defaultEngineName + "`."); + } + } + + if (jsEngineSwitcher.EngineFactories.Count == 0) + { + throw new ReactException("No JS engines were registered. Visit https://reactjs.net/docs for more information."); + } + + var exceptionMessages = new List(); + foreach (var engineFactory in jsEngineSwitcher.EngineFactories.GetRegisteredFactories()) + { + IJsEngine engine = null; + try + { + engine = engineFactory.CreateEngine(); + if (EngineIsUsable(engine)) + { + // Success! Use this one. + return engineFactory.CreateEngine; + } + } + catch (JsEngineLoadException ex) + { + Trace.WriteLine(string.Format("Error initialising {0}: {1}", engineFactory, ex.Message)); + exceptionMessages.Add(ex.Message); + } + catch (Exception ex) + { + Trace.WriteLine(string.Format("Error initialising {0}: {1}", engineFactory, ex)); + exceptionMessages.Add(ex.ToString()); + } + finally + { + if (engine != null) + { + engine.Dispose(); + } + } + } + + throw new ReactEngineNotFoundException("There was an error initializing the registered JS engines. " + string.Join(Environment.NewLine, exceptionMessages)); + } + + /// + /// Performs a sanity check to ensure the specified engine type is usable. + /// + /// Engine to test + /// + private static bool EngineIsUsable(IJsEngine engine) + { + // Perform a sanity test to ensure this engine is usable + return engine.Evaluate("1 + 1") == 2; + } + + /// + /// Clean up all engines + /// + public virtual void Dispose() + { + _disposed = true; + foreach (var engine in _engines) + { + if (engine.Value != null) + { + engine.Value.Dispose(); + } + } + if (_pool != null) + { + _pool.Dispose(); + _pool = null; + } + } + + /// + /// Ensures that this object has not been disposed, and that no error was thrown while + /// loading the scripts. + /// + public void EnsureValidState() + { + if (_disposed) + { + throw new ObjectDisposedException(GetType().Name); + } + if (_scriptLoadException != null) + { + // This means an exception occurred while loading the script (eg. syntax error in the file) + throw _scriptLoadException; + } + } + } +} diff --git a/UET/Lib/Redpoint.ThirdParty.React.Core/JavaScriptEnginePrecompilationUtils.cs b/UET/Lib/Redpoint.ThirdParty.React.Core/JavaScriptEnginePrecompilationUtils.cs new file mode 100644 index 00000000..aff35e2b --- /dev/null +++ b/UET/Lib/Redpoint.ThirdParty.React.Core/JavaScriptEnginePrecompilationUtils.cs @@ -0,0 +1,116 @@ +using System.Reflection; +using JavaScriptEngineSwitcher.Core; +using React.Exceptions; + +namespace React +{ + /// + /// Helper methods for pre-compilation features of the JavaScript engine environment. + /// + public static class JavaScriptEnginePrecompilationUtils + { + /// + /// Cache key for the script resource pre-compilation + /// + private const string PRECOMPILED_JS_RESOURCE_CACHE_KEY = "PRECOMPILED_JS_RESOURCE_{0}"; + /// + /// Cache key for the script file pre-compilation + /// + private const string PRECOMPILED_JS_FILE_CACHE_KEY = "PRECOMPILED_JS_FILE_{0}"; + /// + /// Value that indicates whether a cache entry, that contains a precompiled script, should be + /// evicted if it has not been accessed in a given span of time + /// + private readonly static TimeSpan PRECOMPILED_JS_CACHE_ENTRY_SLIDING_EXPIRATION = TimeSpan.FromMinutes(30); + + /// + /// Tries to execute a code from JavaScript file with pre-compilation. + /// + /// Engine to execute code from JavaScript file with pre-compilation + /// Cache used for storing the pre-compiled scripts + /// File system wrapper + /// Path to the JavaScript file + /// Delegate that loads a code from specified JavaScript file + /// true if can perform a script pre-compilation; otherwise, false. + public static bool TryExecuteFileWithPrecompilation(this IJsEngine engine, ICache cache, + IFileSystem fileSystem, string path, Func scriptLoader = null) + { + EnsurePrecompilationAvailability(engine, cache); + + var cacheKey = string.Format(PRECOMPILED_JS_FILE_CACHE_KEY, path); + var precompiledScript = cache.Get(cacheKey); + + if (precompiledScript == null) + { + var contents = scriptLoader != null ? scriptLoader(path) : fileSystem.ReadAsString(path); + precompiledScript = engine.Precompile(contents, path); + var fullPath = fileSystem.MapPath(path); + cache.Set( + cacheKey, + precompiledScript, + slidingExpiration: PRECOMPILED_JS_CACHE_ENTRY_SLIDING_EXPIRATION, + cacheDependencyFiles: new[] { fullPath } + ); + } + + engine.Execute(precompiledScript); + + return true; + } + + /// + /// Tries to execute a code from embedded JavaScript resource with pre-compilation. + /// + /// Engine to execute a code from embedded JavaScript resource with pre-compilation + /// Cache used for storing the pre-compiled scripts + /// The case-sensitive resource name + /// The assembly, which contains the embedded resource + /// true if can perform a script pre-compilation; otherwise, false. + public static bool TryExecuteResourceWithPrecompilation(this IJsEngine engine, ICache cache, + string resourceName, Assembly assembly) + { + EnsurePrecompilationAvailability(engine, cache); + + var cacheKey = string.Format(PRECOMPILED_JS_RESOURCE_CACHE_KEY, resourceName); + var precompiledScript = cache.Get(cacheKey); + + if (precompiledScript == null) + { + precompiledScript = engine.PrecompileResource(resourceName, assembly); + cache.Set( + cacheKey, + precompiledScript, + slidingExpiration: PRECOMPILED_JS_CACHE_ENTRY_SLIDING_EXPIRATION + ); + } + + engine.Execute(precompiledScript); + + return true; + } + + /// + /// Ensures that the script pre-compilation is available. + /// + /// Instance of the JavaScript engine + /// Cache used for storing the pre-compiled scripts + private static void EnsurePrecompilationAvailability(IJsEngine engine, ICache cache) + { + if (!engine.SupportsScriptPrecompilation) + { + throw new ReactScriptPrecompilationNotAvailableException(string.Format( + "The {0} version {1} does not support the script pre-compilation.", + engine.Name, + engine.Version + )); + } + + if (cache is NullCache) + { + throw new ReactScriptPrecompilationNotAvailableException(string.Format( + "Usage of the script pre-compilation without caching does not make sense." + )); + } + } + } +} diff --git a/UET/Lib/Redpoint.ThirdParty.React.Core/JavaScriptEngineUtils.cs b/UET/Lib/Redpoint.ThirdParty.React.Core/JavaScriptEngineUtils.cs new file mode 100644 index 00000000..dd945658 --- /dev/null +++ b/UET/Lib/Redpoint.ThirdParty.React.Core/JavaScriptEngineUtils.cs @@ -0,0 +1,66 @@ +/* + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +using JavaScriptEngineSwitcher.Core; +using JavaScriptEngineSwitcher.Core.Helpers; +using Newtonsoft.Json; +using React.Exceptions; + +namespace React +{ + /// + /// Various helper methods for the JavaScript engine environment. + /// + public static class JavaScriptEngineUtils + { + /// + /// Executes a code from JavaScript file. + /// + /// Engine to execute code from JavaScript file + /// File system wrapper + /// Path to the JavaScript file + public static void ExecuteFile(this IJsEngine engine, IFileSystem fileSystem, string path) + { + var contents = fileSystem.ReadAsString(path); + engine.Execute(contents, path); + } + + /// + /// Calls a JavaScript function using the specified engine. If is + /// not a scalar type, the function is assumed to return a string of JSON that can be + /// parsed as that type. + /// + /// Type returned by function + /// Engine to execute function with + /// Name of the function to execute + /// Arguments to pass to function + /// Value returned by function + public static T CallFunctionReturningJson(this IJsEngine engine, string function, params object[] args) + { + if (ValidationHelpers.IsSupportedType(typeof(T))) + { + // Type is supported directly (ie. a scalar type like string/int/bool) + // Just execute the function directly. + return engine.CallFunction(function, args); + } + // The type is not a scalar type. Assume the function will return its result as + // JSON. + var resultJson = engine.CallFunction(function, args); + try + { + return JsonConvert.DeserializeObject(resultJson); + } + catch (JsonReaderException ex) + { + throw new ReactException(string.Format( + "{0} did not return valid JSON: {1}.\n\n{2}", + function, ex.Message, resultJson + ), ex); + } + } + } +} diff --git a/UET/Lib/Redpoint.ThirdParty.React.Core/JavaScriptWithSourceMap.cs b/UET/Lib/Redpoint.ThirdParty.React.Core/JavaScriptWithSourceMap.cs new file mode 100644 index 00000000..0bee3a5e --- /dev/null +++ b/UET/Lib/Redpoint.ThirdParty.React.Core/JavaScriptWithSourceMap.cs @@ -0,0 +1,32 @@ +/* + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +namespace React +{ + /// + /// Represents the result of a Babel transformation along with its + /// corresponding source map. + /// + [Serializable] + public class JavaScriptWithSourceMap + { + /// + /// The transformed result + /// + public string Code { get; set; } + + /// + /// The hash of the input file. + /// + public string Hash { get; set; } + + /// + /// The source map for this code + /// + public SourceMap SourceMap { get; set; } + } +} diff --git a/UET/Lib/Redpoint.ThirdParty.React.Core/NullCache.cs b/UET/Lib/Redpoint.ThirdParty.React.Core/NullCache.cs new file mode 100644 index 00000000..8b7d5c6c --- /dev/null +++ b/UET/Lib/Redpoint.ThirdParty.React.Core/NullCache.cs @@ -0,0 +1,47 @@ +/* + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +namespace React +{ + /// + /// Implementation of that never caches. + /// + public class NullCache : ICache + { + /// + /// Get an item from the cache. Returns if the item does + /// not exist. + /// + /// Type of data + /// The cache key + /// Value to return if item is not in the cache + /// Data from cache, otherwise + public T Get(string key, T fallback = default(T)) + { + return fallback; + } + + /// + /// Sets an item in the cache. + /// + /// Type of data + /// The cache key + /// Data to cache + /// + /// Sliding expiration, if cache key is not accessed in this time period it will + /// automatically be removed from the cache + /// + /// + /// Filenames this cached item is dependent on. If any of these files change, the cache + /// will be cleared automatically + /// + public void Set(string key, T data, TimeSpan slidingExpiration, IEnumerable cacheDependencyFiles = null) + { + // no-op + } + } +} diff --git a/UET/Lib/Redpoint.ThirdParty.React.Core/README.md b/UET/Lib/Redpoint.ThirdParty.React.Core/README.md new file mode 100644 index 00000000..b6d307a7 --- /dev/null +++ b/UET/Lib/Redpoint.ThirdParty.React.Core/README.md @@ -0,0 +1,5 @@ +# Redpoint.ThirdParty.React.AspNet + +This is a fork of React.NET (https://github.com/reactjs/react.net) that adds support for React v18. + +If you want to use this fork in an ASP.NET Core project, use the `Redpoint.ThirdParty.React.AspNet` package. \ No newline at end of file diff --git a/UET/Lib/Redpoint.ThirdParty.React.Core/ReactAppAssetManifest.cs b/UET/Lib/Redpoint.ThirdParty.React.Core/ReactAppAssetManifest.cs new file mode 100644 index 00000000..94d86c36 --- /dev/null +++ b/UET/Lib/Redpoint.ThirdParty.React.Core/ReactAppAssetManifest.cs @@ -0,0 +1,35 @@ +/* + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +using Newtonsoft.Json; + +namespace React +{ + internal class ReactAppAssetManifest + { + public Dictionary Files { get; set; } + public List Entrypoints { get; set; } + + public static ReactAppAssetManifest LoadManifest(IReactSiteConfiguration config, IFileSystem fileSystem, ICache cache, bool useCacheRead) + { + string cacheKey = "REACT_APP_MANIFEST"; + + if (useCacheRead) + { + var cachedManifest = cache.Get(cacheKey); + if (cachedManifest != null) + return cachedManifest; + } + + var manifestString = fileSystem.ReadAsString($"{config.ReactAppBuildPath}/asset-manifest.json"); + var manifest = JsonConvert.DeserializeObject(manifestString); + + cache.Set(cacheKey, manifest, TimeSpan.FromHours(1)); + return manifest; + } + } +} diff --git a/UET/Lib/Redpoint.ThirdParty.React.Core/ReactComponent.cs b/UET/Lib/Redpoint.ThirdParty.React.Core/ReactComponent.cs new file mode 100644 index 00000000..485fdff9 --- /dev/null +++ b/UET/Lib/Redpoint.ThirdParty.React.Core/ReactComponent.cs @@ -0,0 +1,363 @@ +/* + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +using System.Collections.Concurrent; +using System.Text; +using System.Text.RegularExpressions; +using JavaScriptEngineSwitcher.Core; +using Newtonsoft.Json; +using React.Exceptions; + +namespace React +{ + /// + /// Represents a React JavaScript component. + /// + public class ReactComponent : IReactComponent + { + private static readonly ConcurrentDictionary _componentNameValidCache = new ConcurrentDictionary(StringComparer.Ordinal); + + [ThreadStatic] + private static StringWriter _sharedStringWriter; + + /// + /// Regular expression used to validate JavaScript identifiers. Used to ensure component + /// names are valid. + /// Based off https://gist.github.com/Daniel15/3074365 + /// + private static readonly Regex _identifierRegex = new Regex(@"^[a-zA-Z_$][0-9a-zA-Z_$]*(?:\[(?:"".+""|\'.+\'|\d+)\])*?$", RegexOptions.Compiled); + + /// + /// Environment this component has been created in + /// + protected readonly IReactEnvironment _environment; + + /// + /// Global site configuration + /// + protected readonly IReactSiteConfiguration _configuration; + + /// + /// Raw props for this component + /// + protected object _props; + + /// + /// JSON serialized props for this component + /// + protected string _serializedProps; + + /// + /// Gets or sets the name of the component + /// + public string ComponentName { get; set; } + + /// + /// Gets or sets the unique ID for the DIV container of this component + /// + public string ContainerId { get; set; } + + /// + /// Gets or sets the HTML tag the component is wrapped in + /// + public string ContainerTag { get; set; } + + /// + /// Gets or sets the HTML class for the container of this component + /// + public string ContainerClass { get; set; } + + /// + /// Get or sets if this components only should be rendered server side + /// + public bool ServerOnly { get; set; } + + /// + /// Gets or sets the props for this component + /// + public object Props + { + get { return _props; } + set + { + _props = value; + _serializedProps = JsonConvert.SerializeObject( + value, + _configuration.JsonSerializerSettings); + } + } + /// + /// Get or sets if this components only should be rendered client side + /// + public bool ClientOnly { get; set; } + + /// + /// Initializes a new instance of the class. + /// + /// The environment. + /// Site-wide configuration. + /// React Id generator. + /// Name of the component. + /// The ID of the container DIV for this component + public ReactComponent(IReactEnvironment environment, IReactSiteConfiguration configuration, IReactIdGenerator reactIdGenerator, string componentName, string containerId) + { + EnsureComponentNameValid(componentName); + _environment = environment; + _configuration = configuration; + ComponentName = componentName; + ContainerId = string.IsNullOrEmpty(containerId) ? reactIdGenerator.Generate() : containerId; + ContainerTag = "div"; + } + + /// + /// Renders the HTML for this component. This will execute the component server-side and + /// return the rendered HTML. + /// + /// Only renders component container. Used for client-side only rendering. + /// Only renders the common HTML mark up and not any React specific data attributes. Used for server-side only rendering. + /// A custom exception handler that will be called if a component throws during a render. Args: (Exception ex, string componentName, string containerId) + /// Functions to call during component render + /// HTML + public virtual string RenderHtml(bool renderContainerOnly = false, bool renderServerOnly = false, Action exceptionHandler = null, IRenderFunctions renderFunctions = null) + { + return GetStringFromWriter(renderHtmlWriter => RenderHtml(renderHtmlWriter, renderContainerOnly, renderServerOnly, exceptionHandler, renderFunctions)); + } + + /// + /// Renders the HTML for this component. This will execute the component server-side and + /// return the rendered HTML. + /// + /// The to which the content is written + /// Only renders component container. Used for client-side only rendering. + /// Only renders the common HTML mark up and not any React specific data attributes. Used for server-side only rendering. + /// A custom exception handler that will be called if a component throws during a render. Args: (Exception ex, string componentName, string containerId) + /// Functions to call during component render + /// HTML + public virtual void RenderHtml(TextWriter writer, bool renderContainerOnly = false, bool renderServerOnly = false, Action exceptionHandler = null, IRenderFunctions renderFunctions = null) + { + if (!_configuration.UseServerSideRendering) + { + renderContainerOnly = true; + } + + if (!renderContainerOnly) + { + EnsureComponentExists(); + } + + var html = string.Empty; + if (!renderContainerOnly) + { + var stringWriter = _sharedStringWriter; + if (stringWriter != null) + { + stringWriter.GetStringBuilder().Clear(); + } + else + { + _sharedStringWriter = stringWriter = new StringWriter(new StringBuilder(_serializedProps.Length + 128)); + } + + try + { + stringWriter.Write(renderServerOnly ? "ReactDOMServer.renderToStaticMarkup(" : "ReactDOMServer.renderToString("); + if (renderFunctions != null) + { + stringWriter.Write(renderFunctions.WrapComponent(GetStringFromWriter(componentInitWriter => WriteComponentInitialiser(componentInitWriter)))); + } + else + { + WriteComponentInitialiser(stringWriter); + } + stringWriter.Write(')'); + + if (renderFunctions != null) + { + renderFunctions.PreRender(x => _environment.Execute(x)); + html = _environment.Execute(renderFunctions.TransformRenderedHtml(stringWriter.ToString())); + renderFunctions.PostRender(x => _environment.Execute(x)); + } + else + { + html = _environment.Execute(stringWriter.ToString()); + } + + if (renderServerOnly) + { + writer.Write(html); + return; + } + } + catch (JsException ex) + { + if (exceptionHandler == null) + { + exceptionHandler = _configuration.ExceptionHandler; + } + + exceptionHandler(ex, ComponentName, ContainerId); + } + } + + writer.Write('<'); + writer.Write(ContainerTag); + writer.Write(" id=\""); + writer.Write(ContainerId); + writer.Write('"'); + if (!string.IsNullOrEmpty(ContainerClass)) + { + writer.Write(" class=\""); + writer.Write(ContainerClass); + writer.Write('"'); + } + + writer.Write('>'); + writer.Write(html); + writer.Write("'); + } + + /// + /// Renders the JavaScript required to initialise this component client-side. This will + /// initialise the React component, which includes attach event handlers to the + /// server-rendered HTML. + /// + /// JavaScript + public virtual string RenderJavaScript(bool waitForDOMContentLoad) + { + return GetStringFromWriter(renderJsWriter => RenderJavaScript(renderJsWriter, waitForDOMContentLoad)); + } + + /// + /// Renders the JavaScript required to initialise this component client-side. This will + /// initialise the React component, which includes attach event handlers to the + /// server-rendered HTML. + /// + /// The to which the content is written + /// Delays the component init until the page load event fires. Useful if the component script tags are located after the call to Html.ReactWithInit. + /// JavaScript + public virtual void RenderJavaScript(TextWriter writer, bool waitForDOMContentLoad) + { + if (waitForDOMContentLoad) + { + writer.Write("window.addEventListener('DOMContentLoaded', function() {"); + } + + if (_configuration.UseRootAPI) + { + WriteComponentInitialization(writer); + } + else + { + WriteLegacyComponentInitialization(writer); + } + + if (waitForDOMContentLoad) + { + writer.Write("});"); + } + } + + /// + /// Writes initialization code using the React 18 root API + /// + private void WriteComponentInitialization(TextWriter writer) + { + var hydrate = _configuration.UseServerSideRendering && !ClientOnly; + if (hydrate) + { + writer.Write("ReactDOM.__SECRET_INTERNALS_DO_NOT_USE_OR_YOU_WILL_BE_FIRED.usingClientEntryPoint = true;"); + writer.Write("ReactDOM.hydrateRoot("); + writer.Write("document.getElementById(\""); + writer.Write(ContainerId); + writer.Write("\")"); + writer.Write(", "); + WriteComponentInitialiser(writer); + writer.Write(")"); + } + else + { + writer.Write("ReactDOM.createRoot("); + writer.Write("document.getElementById(\""); + writer.Write(ContainerId); + writer.Write("\"))"); + writer.Write(".render("); + WriteComponentInitialiser(writer); + writer.Write(")"); + } + } + + /// + /// Writes initialization code using the old ReactDOM.render / ReactDOM.hydrate APIs. + /// + private void WriteLegacyComponentInitialization(TextWriter writer) + { + writer.Write( + !_configuration.UseServerSideRendering || ClientOnly ? "ReactDOM.render(" : "ReactDOM.hydrate("); + WriteComponentInitialiser(writer); + writer.Write(", document.getElementById(\""); + writer.Write(ContainerId); + writer.Write("\"))"); + } + + /// + /// Ensures that this component exists in global scope + /// + protected virtual void EnsureComponentExists() + { + // This is safe as componentName was validated via EnsureComponentNameValid() + var componentExists = _environment.Execute(string.Format( + "typeof {0} !== 'undefined'", + ComponentName + )); + if (!componentExists) + { + throw new ReactInvalidComponentException(string.Format( + "Could not find a component named '{0}'. Did you forget to add it to " + + "App_Start\\ReactConfig.cs?", + ComponentName + )); + } + } + + /// + /// Gets the JavaScript code to initialise the component + /// + /// The to which the content is written + protected virtual void WriteComponentInitialiser(TextWriter writer) + { + writer.Write("React.createElement("); + writer.Write(ComponentName); + writer.Write(", "); + writer.Write(_serializedProps); + writer.Write(')'); + } + + /// + /// Validates that the specified component name is valid + /// + /// + internal static void EnsureComponentNameValid(string componentName) + { + var isValid = _componentNameValidCache.GetOrAdd(componentName, compName => compName.Split('.').All(segment => _identifierRegex.IsMatch(segment))); + if (!isValid) + { + throw new ReactInvalidComponentException($"Invalid component name '{componentName}'"); + } + } + + private string GetStringFromWriter(Action fnWithTextWriter) + { + using (var textWriter = new StringWriter()) + { + fnWithTextWriter(textWriter); + return textWriter.ToString(); + } + } + } +} diff --git a/UET/Lib/Redpoint.ThirdParty.React.Core/ReactEnvironment.cs b/UET/Lib/Redpoint.ThirdParty.React.Core/ReactEnvironment.cs new file mode 100644 index 00000000..4710a17b --- /dev/null +++ b/UET/Lib/Redpoint.ThirdParty.React.Core/ReactEnvironment.cs @@ -0,0 +1,332 @@ +/* + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +using System.Reflection; +using JavaScriptEngineSwitcher.Core; +using JSPool; + +namespace React +{ + /// + /// Request-specific ReactJS.NET environment. This is unique to the individual request and is + /// not shared. + /// + public class ReactEnvironment : IReactEnvironment, IDisposable + { + /// + /// JavaScript variable set when user-provided scripts have been loaded + /// + protected const string USER_SCRIPTS_LOADED_KEY = "_ReactNET_UserScripts_Loaded"; + /// + /// Stack size to use for JSXTransformer if the default stack is insufficient + /// + protected const int LARGE_STACK_SIZE = 2 * 1024 * 1024; + + /// + /// Factory to create JavaScript engines + /// + protected readonly IJavaScriptEngineFactory _engineFactory; + /// + /// Site-wide configuration + /// + protected readonly IReactSiteConfiguration _config; + /// + /// Cache used for storing compiled JSX + /// + protected readonly ICache _cache; + /// + /// File system wrapper + /// + protected readonly IFileSystem _fileSystem; + /// + /// Hash algorithm for file-based cache + /// + protected readonly IFileCacheHash _fileCacheHash; + /// + /// React Id generator + /// + private readonly IReactIdGenerator _reactIdGenerator; + + /// + /// Version number of ReactJS.NET + /// + protected readonly Lazy _version = new Lazy(GetVersion); + + /// + /// Contains an engine acquired from a pool of engines. Only used if + /// is enabled. + /// + protected Lazy _engineFromPool; + + /// + /// List of all components instantiated in this environment + /// + protected readonly IList _components = new List(); + + /// + /// Initializes a new instance of the class. + /// + /// The JavaScript engine factory + /// The site-wide configuration + /// The cache to use for JSX compilation + /// File system wrapper + /// Hash algorithm for file-based cache + /// React ID generator + public ReactEnvironment( + IJavaScriptEngineFactory engineFactory, + IReactSiteConfiguration config, + ICache cache, + IFileSystem fileSystem, + IFileCacheHash fileCacheHash, + IReactIdGenerator reactIdGenerator) + { + _engineFactory = engineFactory; + _config = config; + _cache = cache; + _fileSystem = fileSystem; + _fileCacheHash = fileCacheHash; + _reactIdGenerator = reactIdGenerator; + _engineFromPool = new Lazy(() => _engineFactory.GetEngine()); + } + + /// + /// Gets the JavaScript engine to use for this environment. + /// + protected virtual IJsEngine Engine + { + get + { + return _config.ReuseJavaScriptEngines + ? _engineFromPool.Value + : _engineFactory.GetEngineForCurrentThread(); + } + } + + /// + /// Gets the version of the JavaScript engine in use by ReactJS.NET + /// + public virtual string EngineVersion + { + get { return Engine.Name + " " + Engine.Version; } + } + + /// + /// Gets the version number of ReactJS.NET + /// + public virtual string Version + { + get { return _version.Value; } + } + + /// + /// Ensures any user-provided scripts have been loaded. This only loads JSX files; files + /// that need no transformation are loaded in JavaScriptEngineFactory. + /// + protected virtual void EnsureUserScriptsLoaded() + { + // We no longer do Babel transpilation. + Engine.SetVariableValue(USER_SCRIPTS_LOADED_KEY, true); + return; + } + + /// + /// Executes the provided JavaScript code. + /// + /// JavaScript to execute + public virtual void Execute(string code) + { + Engine.Execute(code); + } + + /// + /// Executes the provided JavaScript code, returning a result of the specified type. + /// + /// Type to return + /// Code to execute + /// Result of the JavaScript code + public virtual T Execute(string code) + { + return Engine.Evaluate(code); + } + + /// + /// Executes the provided JavaScript function, returning a result of the specified type. + /// + /// Type to return + /// JavaScript function to execute + /// Arguments to pass to function + /// Result of the JavaScript code + public virtual T Execute(string function, params object[] args) + { + return Engine.CallFunctionReturningJson(function, args); + } + + /// + /// Determines if the specified variable exists in the JavaScript engine + /// + /// Name of the variable + /// true if the variable exists; false otherwise + public virtual bool HasVariable(string name) + { + return Engine.HasVariable(name); + } + + /// + /// Creates an instance of the specified React JavaScript component. + /// + /// Type of the props + /// Name of the component + /// Props to use + /// ID to use for the container HTML tag. Defaults to an auto-generated ID + /// True if server-side rendering will be bypassed. Defaults to false. + /// True if this component only should be rendered server-side. Defaults to false. + /// Skip adding to components list, which is used during GetInitJavascript + /// The component + public virtual IReactComponent CreateComponent(string componentName, T props, string containerId = null, bool clientOnly = false, bool serverOnly = false, bool skipLazyInit = false) + { + if (!clientOnly) + { + EnsureUserScriptsLoaded(); + } + + var component = new ReactComponent(this, _config, _reactIdGenerator, componentName, containerId) + { + ClientOnly = clientOnly, + Props = props, + ServerOnly = serverOnly + }; + + if (!skipLazyInit) + { + _components.Add(component); + } + return component; + } + + /// + /// Adds the provided to the list of components to render client side. + /// + /// Component to add to client side render list + /// True if server-side rendering will be bypassed. Defaults to false. + /// The component + public virtual IReactComponent CreateComponent(IReactComponent component, bool clientOnly = false) + { + if (!clientOnly) + { + EnsureUserScriptsLoaded(); + } + + _components.Add(component); + return component; + } + + /// + /// Renders the JavaScript required to initialise all components client-side. This will + /// attach event handlers to the server-rendered HTML. + /// + /// True if server-side rendering will be bypassed. Defaults to false. + /// JavaScript for all components + public virtual string GetInitJavaScript(bool clientOnly = false) + { + using (var writer = new StringWriter()) + { + GetInitJavaScript(writer, clientOnly); + return writer.ToString(); + } + } + + /// + /// Renders the JavaScript required to initialise all components client-side. This will + /// attach event handlers to the server-rendered HTML. + /// + /// The to which the content is written + /// True if server-side rendering will be bypassed. Defaults to false. + /// JavaScript for all components + public virtual void GetInitJavaScript(TextWriter writer, bool clientOnly = false) + { + // Propagate any server-side console.log calls to corresponding client-side calls. + if (!clientOnly && _components.Count != 0) + { + var consoleCalls = Execute("console.getCalls()"); + writer.Write(consoleCalls); + } + + foreach (var component in _components) + { + if (!component.ServerOnly) + { + component.RenderJavaScript(writer, waitForDOMContentLoad: false); + writer.WriteLine(';'); + } + } + } + + private ReactAppAssetManifest GetAppManifest() => ReactAppAssetManifest.LoadManifest(_config, _fileSystem, _cache, useCacheRead: true); + + /// + /// Returns a list of paths to scripts generated by the React app + /// + public virtual IEnumerable GetScriptPaths() + { + return GetAppManifest().Entrypoints + .Where(path => path.EndsWith(".js")) + .Where(path => _config.FilterResource == null || _config.FilterResource(path)); + } + + /// + /// Returns a list of paths to stylesheets generated by the React app + /// + public virtual IEnumerable GetStylePaths() + { + return GetAppManifest().Entrypoints + .Where(path => path.EndsWith(".css")) + .Where(path => _config.FilterResource == null || _config.FilterResource(path)); + } + + /// + /// Gets the ReactJS.NET version number. Use instead. + /// + private static string GetVersion() + { + var assembly = typeof(ReactEnvironment).GetTypeInfo().Assembly; + var rawVersion = assembly.GetCustomAttribute().Version; + var lastDot = rawVersion.LastIndexOf('.'); + var version = rawVersion.Substring(0, lastDot); + var build = rawVersion.Substring(lastDot + 1); + return string.Format("{0} (build {1})", version, build); + } + + /// + /// Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources. + /// + public virtual void Dispose() + { + _engineFactory.DisposeEngineForCurrentThread(); + ReturnEngineToPool(); + } + + /// + /// Returns the currently held JS engine to the pool. (no-op if engine pooling is disabled) + /// + public void ReturnEngineToPool() + { + if (_engineFromPool.IsValueCreated) + { + _engineFromPool.Value.Dispose(); + _engineFromPool = new Lazy(() => _engineFactory.GetEngine()); + } + } + + /// + /// Gets the site-wide configuration. + /// + public virtual IReactSiteConfiguration Configuration + { + get { return _config; } + } + } +} diff --git a/UET/Lib/Redpoint.ThirdParty.React.Core/ReactIdGenerator.cs b/UET/Lib/Redpoint.ThirdParty.React.Core/ReactIdGenerator.cs new file mode 100644 index 00000000..1f5efbe2 --- /dev/null +++ b/UET/Lib/Redpoint.ThirdParty.React.Core/ReactIdGenerator.cs @@ -0,0 +1,63 @@ +/* + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +namespace React +{ + /// + /// React ID generator. + /// + public class ReactIdGenerator : IReactIdGenerator + { + private static readonly string _encode32Chars = "0123456789ABCDEFGHIJKLMNOPQRSTUV"; + + private static long _random = DateTime.UtcNow.Ticks; + + private static readonly char[] reactPrefix = "react_".ToCharArray(); + + /// + /// "react_".Length = 6 + 13 random symbols + /// + private const int reactIdLength = 19; + + [ThreadStatic] + private static char[] _chars; + + /// + /// Returns a short react identifier starts with "react_". + /// + /// + public string Generate() + { + var chars = _chars; + if (chars == null) + { + _chars = chars = new char[reactIdLength]; + Array.Copy(reactPrefix, 0, chars, 0, reactPrefix.Length); + } + + var id = Interlocked.Increment(ref _random); + + // from 6 because "react_".Length == 6, _encode32Chars.Length == 32 (base32), + // base32 characters are 5 bits in length and from long (64 bits) we can get 13 symbols + chars[6] = _encode32Chars[(int)(id >> 60) & 31]; + chars[7] = _encode32Chars[(int)(id >> 55) & 31]; + chars[8] = _encode32Chars[(int)(id >> 50) & 31]; + chars[9] = _encode32Chars[(int)(id >> 45) & 31]; + chars[10] = _encode32Chars[(int)(id >> 40) & 31]; + chars[11] = _encode32Chars[(int)(id >> 35) & 31]; + chars[12] = _encode32Chars[(int)(id >> 30) & 31]; + chars[13] = _encode32Chars[(int)(id >> 25) & 31]; + chars[14] = _encode32Chars[(int)(id >> 20) & 31]; + chars[15] = _encode32Chars[(int)(id >> 15) & 31]; + chars[16] = _encode32Chars[(int)(id >> 10) & 31]; + chars[17] = _encode32Chars[(int)(id >> 5) & 31]; + chars[18] = _encode32Chars[(int)id & 31]; + + return new string(chars, 0, reactIdLength); + } + } +} diff --git a/UET/Lib/Redpoint.ThirdParty.React.Core/ReactSiteConfiguration.cs b/UET/Lib/Redpoint.ThirdParty.React.Core/ReactSiteConfiguration.cs new file mode 100644 index 00000000..e49df0e9 --- /dev/null +++ b/UET/Lib/Redpoint.ThirdParty.React.Core/ReactSiteConfiguration.cs @@ -0,0 +1,339 @@ +/* + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +using Microsoft.Extensions.DependencyInjection; +using Newtonsoft.Json; +using React.Exceptions; + +namespace React +{ + /// + /// Site-wide configuration for ReactJS.NET + /// + public class ReactSiteConfiguration : IReactSiteConfiguration + { + private readonly IServiceProvider _serviceProvider; + + /// + /// Initializes a new instance of the class. + /// + public ReactSiteConfiguration(IServiceProvider serviceProvider) + { + _serviceProvider = serviceProvider; + + ReuseJavaScriptEngines = true; + AllowJavaScriptPrecompilation = false; + LoadReact = true; + JsonSerializerSettings = new JsonSerializerSettings + { + StringEscapeHandling = StringEscapeHandling.EscapeHtml + }; + UseDebugReact = false; + UseServerSideRendering = true; + ExceptionHandler = (Exception ex, string ComponentName, string ContainerId) => + throw new ReactServerRenderingException(string.Format( + "Error while rendering \"{0}\" to \"{2}\": {1}", + ComponentName, + ex.Message, + ContainerId + ), ex); + } + + /// + /// All the scripts that have been added to this configuration and require JSX + /// transformation to be run. + /// + private readonly IList _scriptFiles = new List(); + /// + /// All the scripts that have been added to this configuration and do not require JSX + /// transformation to be run. + /// + private readonly IList _scriptFilesWithoutTransform = new List(); + + /// + /// Adds a script to the list of scripts that are executed. This should be called for all + /// React components and their dependencies. If the script does not have any JSX in it + /// (for example, it's built using Webpack or Gulp), use + /// instead. + /// + /// + /// Name of the file to execute. Should be a server relative path starting with ~ (eg. + /// ~/Scripts/Awesome.js) + /// + /// This configuration, for chaining + public IReactSiteConfiguration AddScript(string filename) + { + _scriptFiles.Add(filename); + return this; + } + + /// + /// Adds a script to the list of scripts that are executed. This is the same as + /// except it does not run JSX transformation on the script and thus is + /// more efficient. + /// + /// + /// Name of the file to execute. Should be a server relative path starting with ~ (eg. + /// ~/Scripts/Awesome.js) + /// + /// The configuration, for chaining + public IReactSiteConfiguration AddScriptWithoutTransform(string filename) + { + _scriptFilesWithoutTransform.Add(filename); + return this; + } + + /// + /// Gets all the file paths that match the specified pattern. If the pattern is a plain + /// path, just returns that path verbatim. + /// + /// + /// Patterns to search for (eg. ~/Scripts/*.js or ~/Scripts/Awesome.js + /// + /// File paths that match this pattern + private IEnumerable Glob(string glob) + { + if (!glob.IsGlobPattern()) + { + return new[] { glob }; + } + // Directly touching the IoC container is not ideal, but we only want to pull the FileSystem + // dependency if it's absolutely necessary. + var fileSystem = _serviceProvider.GetRequiredService(); + return fileSystem.Glob(glob); + } + + /// + /// Gets a list of all the scripts that have been added to this configuration and require JSX + /// transformation to be run. + /// + public IEnumerable Scripts + { + // TODO: It's a bit strange to do the globbing here, ideally this class should just be a simple + // bag of settings with no logic. + get { return _scriptFiles.SelectMany(Glob); } + } + + /// + /// Gets a list of all the scripts that have been added to this configuration. + /// + public IEnumerable ScriptsWithoutTransform + { + get { return _scriptFilesWithoutTransform.SelectMany(Glob); } + } + + /// + /// Gets or sets the configuration for JSON serializer. + /// + public JsonSerializerSettings JsonSerializerSettings { get; set; } + + /// + /// Sets the configuration for json serializer. + /// + /// Settings. + /// + /// Thic confiquration is used when component initialization script + /// is being generated server-side. + /// + public IReactSiteConfiguration SetJsonSerializerSettings(JsonSerializerSettings settings) + { + JsonSerializerSettings = settings; + return this; + } + + /// + /// Gets or sets whether JavaScript engines should be reused across requests. + /// + public bool ReuseJavaScriptEngines { get; set; } + /// + /// Sets whether JavaScript engines should be reused across requests. + /// + public IReactSiteConfiguration SetReuseJavaScriptEngines(bool value) + { + ReuseJavaScriptEngines = value; + return this; + } + + /// + /// Gets or sets the number of engines to initially start when a pool is created. + /// Defaults to 10. + /// + public int? StartEngines { get; set; } + /// + /// Sets the number of engines to initially start when a pool is created. + /// Defaults to 10. + /// + public IReactSiteConfiguration SetStartEngines(int? startEngines) + { + StartEngines = startEngines; + return this; + } + + /// + /// Gets or sets the maximum number of engines that will be created in the pool. + /// Defaults to 25. + /// + public int? MaxEngines { get; set; } + /// + /// Sets the maximum number of engines that will be created in the pool. + /// Defaults to 25. + /// + public IReactSiteConfiguration SetMaxEngines(int? maxEngines) + { + MaxEngines = maxEngines; + return this; + } + + /// + /// Gets or sets the maximum number of times an engine can be reused before it is disposed. + /// 0 is unlimited. Defaults to 100. + /// + public int? MaxUsagesPerEngine { get; set; } + /// + /// Sets the maximum number of times an engine can be reused before it is disposed. + /// 0 is unlimited. Defaults to 100. + /// + public IReactSiteConfiguration SetMaxUsagesPerEngine(int? maxUsagesPerEngine) + { + MaxUsagesPerEngine = maxUsagesPerEngine; + return this; + } + + /// + /// Gets or sets whether to allow the JavaScript pre-compilation (accelerates the + /// initialization of JavaScript engines). + /// + public bool AllowJavaScriptPrecompilation { get; set; } + + /// + /// Sets whether to allow the JavaScript pre-compilation (accelerates the initialization of + /// JavaScript engines). + /// + /// + public IReactSiteConfiguration SetAllowJavaScriptPrecompilation(bool allowJavaScriptPrecompilation) + { + AllowJavaScriptPrecompilation = allowJavaScriptPrecompilation; + return this; + } + + /// + /// Gets or sets whether the built-in version of React is loaded. If false, you must + /// provide your own version of React. + /// + public bool LoadReact { get; set; } + + /// + /// Sets whether the built-in version of React is loaded. If false, you must + /// provide your own version of React. + /// + /// The configuration, for chaining + public IReactSiteConfiguration SetLoadReact(bool loadReact) + { + LoadReact = loadReact; + return this; + } + + /// + /// Gets or sets whether to use the debug version of React. This is slower, but gives + /// useful debugging tips. + /// + public bool UseDebugReact { get; set; } + + /// + /// Sets whether to use the debug version of React. This is slower, but gives + /// useful debugging tips. + /// + public IReactSiteConfiguration SetUseDebugReact(bool value) + { + UseDebugReact = value; + return this; + } + + /// + /// Gets or sets whether server-side rendering is enabled. + /// + public bool UseServerSideRendering { get; set; } + + /// + /// Disables server-side rendering. This is useful when debugging your scripts. + /// + public IReactSiteConfiguration DisableServerSideRendering() + { + UseServerSideRendering = false; + return this; + } + + /// + /// Handle an exception caught during server-render of a component. + /// If unset, unhandled exceptions will be thrown for all component renders. + /// + public Action ExceptionHandler { get; set; } + + /// + /// + /// + /// + /// + public IReactSiteConfiguration SetExceptionHandler(Action handler) + { + ExceptionHandler = handler; + return this; + } + + /// + /// A provider that returns a nonce to be used on any script tags on the page. + /// This value must match the nonce used in the Content Security Policy header on the response. + /// + public Func ScriptNonceProvider { get; set; } + + /// + /// Sets a provider that returns a nonce to be used on any script tags on the page. + /// This value must match the nonce used in the Content Security Policy header on the response. + /// + /// + /// + public IReactSiteConfiguration SetScriptNonceProvider(Func provider) + { + ScriptNonceProvider = provider; + return this; + } + + /// + /// The path to the application bundles built by webpack or create-react-app + /// + public string ReactAppBuildPath { get; set; } + + /// + /// Sets the path to the application bundles built by webpack or create-react-app + /// + /// + /// + public IReactSiteConfiguration SetReactAppBuildPath(string reactAppBuildPath) + { + ReactAppBuildPath = reactAppBuildPath; + return this; + } + + /// + /// Gets or sets if the React 18+ create root api should be used for rendering / hydration. + /// If false ReactDOM.render / ReactDOM.hydrate will be used. + /// + public bool UseRootAPI { get; set; } + + /// + /// Enables usage of the React 18 root API when rendering / hydrating. + /// + /// + public void EnableReact18RootAPI() + { + UseRootAPI = true; + } + + /// + public Func FilterResource { get; set; } + } +} diff --git a/UET/Lib/Redpoint.ThirdParty.React.Core/Redpoint.ThirdParty.React.Core.csproj b/UET/Lib/Redpoint.ThirdParty.React.Core/Redpoint.ThirdParty.React.Core.csproj new file mode 100644 index 00000000..d8122af8 --- /dev/null +++ b/UET/Lib/Redpoint.ThirdParty.React.Core/Redpoint.ThirdParty.React.Core.csproj @@ -0,0 +1,44 @@ + + + + + + React.Core + React.Core + + + + + A fork of React.NET (https://github.com/reactjs/react.net) that adds support for React v18. + Redpoint.ThirdParty.React.Core + react, react.net + MIT + June Rhodes, Daniel Lo Nigro + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/UET/Lib/Redpoint.ThirdParty.React.Core/RenderFunctions/ChainedRenderFunctions.cs b/UET/Lib/Redpoint.ThirdParty.React.Core/RenderFunctions/ChainedRenderFunctions.cs new file mode 100644 index 00000000..cdfa80ca --- /dev/null +++ b/UET/Lib/Redpoint.ThirdParty.React.Core/RenderFunctions/ChainedRenderFunctions.cs @@ -0,0 +1,91 @@ +using System.Collections.ObjectModel; + +namespace React.RenderFunctions +{ + /// + /// Helper to chain functions to be executed during server-side rendering. + /// For instance, React Router and React Helmet can both be used together using this class. + /// + public class ChainedRenderFunctions : IRenderFunctions + { + private readonly ReadOnlyCollection _chainedFunctions; + + /// + /// Constructor. Supports chained calls to multiple render functions by passing in a set of functions that should be called next. + /// + /// The chained render functions to call + public ChainedRenderFunctions(params IRenderFunctions[] chainedFunctions) + { + _chainedFunctions = chainedFunctions.Where(x => x != null).ToList().AsReadOnly(); + } + + /// + /// Executes before component render. + /// It takes a func that accepts a Javascript code expression to evaluate, which returns the result of the expression. + /// This is useful for setting up variables that will be referenced after the render completes. + /// The func to execute + /// + public void PreRender(Func executeJs) + { + foreach (var chainedFunction in _chainedFunctions) + { + chainedFunction.PreRender(executeJs); + } + } + + + /// + /// Transforms the React.createElement expression. + /// This is useful for libraries like styled components which require wrapping the root component + /// inside a helper to generate a stylesheet. + /// Example transform: React.createElement(Foo, ...) => wrapComponent(React.createElement(Foo, ...)) + /// + /// The Javascript expression to wrap + /// A wrapped expression + public string WrapComponent(string componentToRender) + { + string wrappedComponent = componentToRender; + + foreach (var chainedFunction in _chainedFunctions) + { + wrappedComponent = chainedFunction.WrapComponent(wrappedComponent); + } + + return wrappedComponent; + } + + + /// + /// Transforms the compiled rendered component HTML + /// This is useful for libraries like emotion which take rendered component HTML and output the transformed HTML plus additional style tags + /// + /// The component HTML + /// A wrapped expression + public string TransformRenderedHtml(string input) + { + string renderedHtml = input; + + foreach (var chainedFunction in _chainedFunctions) + { + renderedHtml = chainedFunction.TransformRenderedHtml(renderedHtml); + } + + return renderedHtml; + } + + + /// + /// Executes after component render. + /// It takes a func that accepts a Javascript code expression to evaluate, which returns the result of the expression. + /// This is useful for reading computed state, such as generated stylesheets or a router redirect result. + /// + /// The func to execute + public void PostRender(Func executeJs) + { + foreach (var chainedFunction in _chainedFunctions) + { + chainedFunction.PostRender(executeJs); + } + } + } +} diff --git a/UET/Lib/Redpoint.ThirdParty.React.Core/RenderFunctions/EmotionFunctions.cs b/UET/Lib/Redpoint.ThirdParty.React.Core/RenderFunctions/EmotionFunctions.cs new file mode 100644 index 00000000..7971bc68 --- /dev/null +++ b/UET/Lib/Redpoint.ThirdParty.React.Core/RenderFunctions/EmotionFunctions.cs @@ -0,0 +1,19 @@ +namespace React.RenderFunctions +{ + /// + /// Render functions for Emotion. https://github.com/emotion-js/emotion + /// Requires `emotion-server` to be exposed globally as `EmotionServer` + /// + public class EmotionFunctions : RenderFunctionsBase + { + /// + /// Implementation of TransformRenderedHtml + /// + /// + /// + public override string TransformRenderedHtml(string input) + { + return $"EmotionServer.renderStylesToString({input})"; + } + } +} diff --git a/UET/Lib/Redpoint.ThirdParty.React.Core/RenderFunctions/ReactHelmetFunctions.cs b/UET/Lib/Redpoint.ThirdParty.React.Core/RenderFunctions/ReactHelmetFunctions.cs new file mode 100644 index 00000000..0caee24d --- /dev/null +++ b/UET/Lib/Redpoint.ThirdParty.React.Core/RenderFunctions/ReactHelmetFunctions.cs @@ -0,0 +1,31 @@ +using Newtonsoft.Json; + +namespace React.RenderFunctions +{ + /// + /// Render functions for React-Helmet. https://github.com/nfl/react-helmet + /// Requires `react-helmet` to be exposed globally as `Helmet` + /// + public class ReactHelmetFunctions : RenderFunctionsBase + { + /// + /// Dictionary of Helmet properties, rendered as raw HTML tags + /// Available keys: "base", "bodyAttributes", "htmlAttributes", "link", "meta", "noscript", "script", "style", "title" + /// + public Dictionary RenderedHelmet { get; private set; } + + /// + /// Implementation of PostRender + /// + /// + public override void PostRender(Func executeJs) + { + var helmetString = executeJs(@" +var helmetResult = Helmet.renderStatic(); +JSON.stringify(['base', 'bodyAttributes', 'htmlAttributes', 'link', 'meta', 'noscript', 'script', 'style', 'title'] + .reduce((mappedResults, helmetKey) => Object.assign(mappedResults, { [helmetKey]: helmetResult[helmetKey] && helmetResult[helmetKey].toString() }), {}));"); + + RenderedHelmet = JsonConvert.DeserializeObject>(helmetString); + } + } +} diff --git a/UET/Lib/Redpoint.ThirdParty.React.Core/RenderFunctions/ReactJssFunctions.cs b/UET/Lib/Redpoint.ThirdParty.React.Core/RenderFunctions/ReactJssFunctions.cs new file mode 100644 index 00000000..18ca4bd8 --- /dev/null +++ b/UET/Lib/Redpoint.ThirdParty.React.Core/RenderFunctions/ReactJssFunctions.cs @@ -0,0 +1,42 @@ +namespace React.RenderFunctions +{ + /// + /// Render functions for React-JSS. https://github.com/cssinjs/react-jss + /// Requires `react-jss` to be exposed globally as `ReactJss` + /// + public class ReactJssFunctions : RenderFunctionsBase + { + /// + /// HTML style tag containing the rendered styles + /// + public string RenderedStyles { get; private set; } + + /// + /// Implementation of PreRender + /// + /// + public override void PreRender(Func executeJs) + { + executeJs("var reactJssProps = { registry: new ReactJss.SheetsRegistry() };"); + } + + /// + /// Implementation of WrapComponent + /// + /// + /// + public override string WrapComponent(string componentToRender) + { + return ($"React.createElement(ReactJss.JssProvider, reactJssProps, ({componentToRender}))"); + } + + /// + /// Implementation of PostRender + /// + /// + public override void PostRender(Func executeJs) + { + RenderedStyles = $""; + } + } +} diff --git a/UET/Lib/Redpoint.ThirdParty.React.Core/RenderFunctions/StyledComponentsFunctions.cs b/UET/Lib/Redpoint.ThirdParty.React.Core/RenderFunctions/StyledComponentsFunctions.cs new file mode 100644 index 00000000..863d0db2 --- /dev/null +++ b/UET/Lib/Redpoint.ThirdParty.React.Core/RenderFunctions/StyledComponentsFunctions.cs @@ -0,0 +1,42 @@ +namespace React.RenderFunctions +{ + /// + /// Render functions for styled components. https://github.com/styled-components/styled-components + /// Requires `styled-components` to be exposed globally as `Styled` + /// + public class StyledComponentsFunctions : RenderFunctionsBase + { + /// + /// HTML style tag containing the rendered styles + /// + public string RenderedStyles { get; private set; } + + /// + /// Implementation of PreRender + /// + /// + public override void PreRender(Func executeJs) + { + executeJs("var serverStyleSheet = new Styled.ServerStyleSheet();"); + } + + /// + /// Implementation of WrapComponent + /// + /// + /// + public override string WrapComponent(string componentToRender) + { + return ($"serverStyleSheet.collectStyles({componentToRender})"); + } + + /// + /// Implementation of PostRender + /// + /// + public override void PostRender(Func executeJs) + { + RenderedStyles = executeJs("serverStyleSheet.getStyleTags()"); + } + } +} diff --git a/UET/Lib/Redpoint.ThirdParty.React.Core/RenderFunctionsBase.cs b/UET/Lib/Redpoint.ThirdParty.React.Core/RenderFunctionsBase.cs new file mode 100644 index 00000000..633dd035 --- /dev/null +++ b/UET/Lib/Redpoint.ThirdParty.React.Core/RenderFunctionsBase.cs @@ -0,0 +1,50 @@ +namespace React +{ + /// + /// Functions to execute during a render request. + /// These functions will share the same Javascript context, so state can be passed around via variables. + /// + public abstract class RenderFunctionsBase : IRenderFunctions + { + /// + /// Executes before component render. + /// It takes a func that accepts a Javascript code expression to evaluate, which returns the result of the expression. + /// This is useful for setting up variables that will be referenced after the render completes. + /// The func to execute + /// + public virtual void PreRender(Func executeJs) + { + } + + + /// + /// Transforms the React.createElement expression. + /// This is useful for libraries like styled components which require wrapping the root component + /// inside a helper to generate a stylesheet. + /// Example transform: React.createElement(Foo, ...) => wrapComponent(React.createElement(Foo, ...)) + /// + /// The Javascript expression to wrap + /// A wrapped expression + public virtual string WrapComponent(string componentToRender) => componentToRender; + + + /// + /// Transforms the compiled rendered component HTML + /// This is useful for libraries like emotion which take rendered component HTML and output the transformed HTML plus additional style tags + /// + /// The component HTML + /// A wrapped expression + public virtual string TransformRenderedHtml(string input) => input; + + + /// + /// Executes after component render. + /// It takes a func that accepts a Javascript code expression to evaluate, which returns the result of the expression. + /// This is useful for reading computed state, such as generated stylesheets or a router redirect result. + /// + /// The func to execute + public virtual void PostRender(Func executeJs) + { + } + } +} diff --git a/UET/Lib/Redpoint.ThirdParty.React.Core/Resources/shims.js b/UET/Lib/Redpoint.ThirdParty.React.Core/Resources/shims.js new file mode 100644 index 00000000..f99e4f26 --- /dev/null +++ b/UET/Lib/Redpoint.ThirdParty.React.Core/Resources/shims.js @@ -0,0 +1,120 @@ +/** + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +var global = global || {}; +var React, ReactDOM, ReactDOMServer, setTimeout, clearTimeout; + +// Basic console shim. Caches all calls to console methods. +function MockConsole() { + this._calls = []; + ['log', 'error', 'warn', 'debug', 'info', 'dir', 'group', 'groupEnd', 'groupCollapsed'].forEach(function (methodName) { + this[methodName] = this._handleCall.bind(this, methodName); + }, this); +} +MockConsole.prototype = { + _handleCall: function(methodName/*, ...args*/) { + var serializedArgs = []; + for (var i = 1; i < arguments.length; i++) { + serializedArgs.push(JSON.stringify(arguments[i])); + } + + this._calls.push({ + method: methodName, + args: serializedArgs, + stack: '\nCall stack: ' + (new Error().stack || 'not available') + }); + }, + _formatCall: function(call) { + return 'console.' + call.method + '("[.NET]", ' + call.args.join(', ') + ', ' + JSON.stringify(call.stack) + ');'; + }, + getCalls: function() { + return this._calls.map(this._formatCall).join('\n'); + } +}; +var console = new MockConsole(); + +if (!Object.freeze) { + Object.freeze = function() { }; +} + +/** + * Finds a user-supplied version of React and ensures it's exposed globally. + * + * @return {string} Comma-separated list of missing globals. + */ +function ReactNET_initReact() { + var missing = []; + + if (typeof React === 'undefined') { + if (global.React) { + React = global.React; + } else { + missing.push('React'); + } + } + + if (typeof ReactDOM === 'undefined') { + if (global.ReactDOM) { + ReactDOM = global.ReactDOM; + } else { + missing.push('ReactDOM'); + } + } + + if (typeof ReactDOMServer === 'undefined') { + if (global.ReactDOMServer) { + ReactDOMServer = global.ReactDOMServer; + } + else { + missing.push('ReactDOMServer'); + } + } + + return missing.join(','); +} + +setTimeout = setTimeout || global.setTimeout; +if (setTimeout === undefined) { + setTimeout = function() { + throw new Error('setTimeout is not supported in server-rendered Javascript.'); + } +} + +clearTimeout = clearTimeout || global.clearTimeout; +if (clearTimeout === undefined) { + clearTimeout = function() { + throw new Error('clearTimeout is not supported in server-rendered Javascript.'); + } +} + +/** + * Polyfill for engines that do not support Object.assign + */ +if (typeof Object.assign !== 'function') { + Object.assign = function (target, varArgs) { // .length of function is 2 + 'use strict'; + if (target == null) { // TypeError if undefined or null + throw new TypeError('Cannot convert undefined or null to object'); + } + + var to = Object(target); + + for (var index = 1; index < arguments.length; index++) { + var nextSource = arguments[index]; + + if (nextSource != null) { // Skip over if undefined or null + for (var nextKey in nextSource) { + // Avoid bugs when hasOwnProperty is shadowed + if (Object.prototype.hasOwnProperty.call(nextSource, nextKey)) { + to[nextKey] = nextSource[nextKey]; + } + } + } + } + return to; + }; +} diff --git a/UET/Lib/Redpoint.ThirdParty.React.Core/SimpleFileSystem.cs b/UET/Lib/Redpoint.ThirdParty.React.Core/SimpleFileSystem.cs new file mode 100644 index 00000000..acfcad83 --- /dev/null +++ b/UET/Lib/Redpoint.ThirdParty.React.Core/SimpleFileSystem.cs @@ -0,0 +1,25 @@ +/* + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +namespace React +{ + /// + /// An implementation of that does not do any mapping of file paths. + /// + public class SimpleFileSystem : FileSystemBase + { + /// + /// Converts a path from an application relative path (~/...) to a full filesystem path + /// + /// App-relative path of the file + /// Full path of the file + public override string MapPath(string relativePath) + { + return relativePath; + } + } +} diff --git a/UET/Lib/Redpoint.ThirdParty.React.Core/SourceMap.cs b/UET/Lib/Redpoint.ThirdParty.React.Core/SourceMap.cs new file mode 100644 index 00000000..e5efbd94 --- /dev/null +++ b/UET/Lib/Redpoint.ThirdParty.React.Core/SourceMap.cs @@ -0,0 +1,82 @@ +/* + * Copyright (c) Facebook, Inc. and its affiliates. + * + * This source code is licensed under the MIT license found in the + * LICENSE file in the root directory of this source tree. + */ + +using Newtonsoft.Json; +using Newtonsoft.Json.Serialization; + +namespace React +{ + /// + /// Represents the data contained in a source map + /// + [Serializable] + public class SourceMap + { + /// + /// Version number of the source map spec used to build this source map. Expected + /// to be version 3. + /// + public int Version { get; set; } + + /// + /// An optional name of the generated code that this source map is associated with. + /// + public string File { get; set; } + + /// + /// An optional source root, useful for relocating source files on a server or + /// removing repeated values in the entry. This value is + /// prepended to the individual entries in the field. + /// + public string SourceRoot { get; set; } + + /// + /// A list of original sources used by the entry. + /// + public IList Sources { get; set; } + + /// + /// An optional list of source content, useful when the can't + /// be hosted. The contents are listed in the same order as the . + /// null may be used if some original sources should be retrieved by name. + /// + public IList SourcesContent { get; set; } + + /// + /// A list of symbol names used by the entry. + /// + public IList Names { get; set; } + + /// + /// A string with the mapping data encoded in base 64 VLQ. + /// + public string Mappings { get; set; } + + /// + /// Outputs this source map as JSON. + /// + /// + public string ToJson() + { + return JsonConvert.SerializeObject(this, new JsonSerializerSettings + { + // Camelcase keys (eg. "SourcesContent" -> "sourcesContent") + ContractResolver = new CamelCasePropertyNamesContractResolver() + }); + } + + /// + /// Parse a source map from JSON + /// + /// JSON input + /// Source map + public static SourceMap FromJson(string json) + { + return JsonConvert.DeserializeObject(json); + } + } +} diff --git a/UET/Lib/ThirdPartyCommon.Build.props b/UET/Lib/ThirdPartyCommon.Build.props new file mode 100644 index 00000000..4fe26f87 --- /dev/null +++ b/UET/Lib/ThirdPartyCommon.Build.props @@ -0,0 +1,15 @@ + + + + true + true + + + + + + enable + annotations + + + diff --git a/UET/Lib/XunitTesting.Build.props b/UET/Lib/XunitTesting.Build.props index f9db5820..b4b333d9 100644 --- a/UET/Lib/XunitTesting.Build.props +++ b/UET/Lib/XunitTesting.Build.props @@ -14,27 +14,7 @@ true - - - - - - - - runtime; build; native; contentfiles; analyzers; buildtransitive - all - - - runtime; build; native; contentfiles; analyzers; buildtransitive - all - - - - - - - - + diff --git a/UET/Lib/XunitTestingDependencies.Build.props b/UET/Lib/XunitTestingDependencies.Build.props new file mode 100644 index 00000000..cfc33012 --- /dev/null +++ b/UET/Lib/XunitTestingDependencies.Build.props @@ -0,0 +1,27 @@ + + + + + + + + + + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + + + + + + + + \ No newline at end of file diff --git a/UET/Redpoint.CloudFramework.CLI/BuildClientApp.cs b/UET/Redpoint.CloudFramework.CLI/BuildClientApp.cs new file mode 100644 index 00000000..723efd33 --- /dev/null +++ b/UET/Redpoint.CloudFramework.CLI/BuildClientApp.cs @@ -0,0 +1,130 @@ +namespace Redpoint.CloudFramework.CLI +{ + using Microsoft.Extensions.Logging; + using Redpoint.CommandLine; + using System.CommandLine; + using System.Threading.Tasks; + using Redpoint.ProcessExecution; + + internal class BuildClientApp + { + internal class Options + { + public Option Path = new Option( + "--app-path", + "The path to the client application. This directory should have a package.json file in it."); + + public Option Configuration = new Option( + "--configuration", + "The build configuration; should be set to $(Configuration) from MSBuild."); + } + + public static Command CreateCommand(ICommandBuilder builder) + { + return new Command("build-client-app", "Builds a TypeScript-based client app, installing all required dependencies as needed."); + } + + internal class CommandInstance : ICommandInstance + { + private readonly ILogger _logger; + private readonly IYarnInstallationService _yarnInstallationService; + private readonly IProcessExecutor _processExecutor; + private readonly Options _options; + + private static readonly string[] _openapiGenerateArgs = new[] { "run", "openapi", "--input", "openapi.json", "--output", "src/api", "-c", "fetch" }; + private static readonly string[] _webpackProductionBuildArgs = new[] { "run", "webpack", "--progress", "--mode", "production" }; + private static readonly string[] _yarnInstallArgs = new[] { "install", "--json" }; + + public CommandInstance( + ILogger logger, + IYarnInstallationService yarnInstallationService, + IProcessExecutor processExecutor, + Options options) + { + _logger = logger; + _yarnInstallationService = yarnInstallationService; + _processExecutor = processExecutor; + _options = options; + } + + public async Task ExecuteAsync(ICommandInvocationContext context) + { + var appPath = context.ParseResult.GetValueForOption(_options.Path); + if (appPath == null || !appPath.Exists || !File.Exists(Path.Combine(appPath.FullName, "package.json"))) + { + _logger.LogError("Expected --app-path to exist and contain a package.json file."); + return 1; + } + var configuration = context.ParseResult.GetValueForOption(_options.Configuration) ?? string.Empty; + + // Install Yarn. + var (exitCode, yarnCorepackShimPath) = await _yarnInstallationService.InstallYarnIfNeededAsync(context.GetCancellationToken()).ConfigureAwait(true); + if (yarnCorepackShimPath == null) + { + return exitCode; + } + + // Run 'yarn install' to install dependencies. + exitCode = await _processExecutor.ExecuteAsync( + new ProcessSpecification + { + FilePath = yarnCorepackShimPath, + Arguments = _yarnInstallArgs.Select(x => new LogicalProcessArgument(x)), + WorkingDirectory = appPath.FullName, + }, + new YarnInstallCaptureSpecification(_logger), + context.GetCancellationToken()).ConfigureAwait(true); + if (exitCode != 0) + { + _logger.LogError("'yarn install' command failed; see above for output."); + return exitCode; + } + + // If an openapi.json file exists in the root of the application path, + // automatically generate the TypeScript API for it. + var openapiPath = Path.Combine(appPath.FullName, "openapi.json"); + if (File.Exists(openapiPath)) + { + exitCode = await _processExecutor.ExecuteAsync( + new ProcessSpecification + { + FilePath = yarnCorepackShimPath, + Arguments = _openapiGenerateArgs.Select(x => new LogicalProcessArgument(x)), + WorkingDirectory = appPath.FullName, + }, + CaptureSpecification.Passthrough, + context.GetCancellationToken()).ConfigureAwait(true); + if (exitCode != 0) + { + _logger.LogError("'yarn run openapi' command failed; see above for output."); + return exitCode; + } + } + + if (configuration == "Release") + { + // If we're building for Release, build the production version of the application. + return await _processExecutor.ExecuteAsync( + new ProcessSpecification + { + FilePath = yarnCorepackShimPath, + Arguments = _webpackProductionBuildArgs.Select(x => new LogicalProcessArgument(x)), + WorkingDirectory = appPath.FullName, + EnvironmentVariables = File.Exists(Path.Combine(appPath.FullName, "tsconfig.webpack.json")) ? new Dictionary + { + { "TS_NODE_PROJECT", "tsconfig.webpack.json" } + } : null + }, + CaptureSpecification.Passthrough, + context.GetCancellationToken()).ConfigureAwait(true); + } + else + { + // If we're building for Debug, then Redpoint.CloudFramework will handle running + // Webpack in watch mode. + return 0; + } + } + } + } +} diff --git a/UET/Redpoint.CloudFramework.CLI/DefaultYarnInstallationService.cs b/UET/Redpoint.CloudFramework.CLI/DefaultYarnInstallationService.cs new file mode 100644 index 00000000..e2665718 --- /dev/null +++ b/UET/Redpoint.CloudFramework.CLI/DefaultYarnInstallationService.cs @@ -0,0 +1,123 @@ +namespace Redpoint.CloudFramework.CLI +{ + using Microsoft.Extensions.Logging; + using Redpoint.PathResolution; + using Redpoint.ProcessExecution; + using System; + using System.Diagnostics; + using System.Threading.Tasks; + + internal class DefaultYarnInstallationService : IYarnInstallationService + { + private readonly ILogger _logger; + private readonly IPathResolver _pathResolver; + private readonly IProcessExecutor _processExecutor; + + private static readonly string[] _winGetInstallNodeJsArgs = new[] { "install", "OpenJS.NodeJS" }; + + public DefaultYarnInstallationService( + ILogger logger, + IPathResolver pathResolver, + IProcessExecutor processExecutor) + { + _logger = logger; + _pathResolver = pathResolver; + _processExecutor = processExecutor; + } + + public async Task<(int exitCode, string? yarnPath)> InstallYarnIfNeededAsync(CancellationToken cancellationToken) + { + int exitCode; + + // Check if we have Node.js installed. If we don't, try to use WinGet to install it. + var node = await _pathResolver.ResolveBinaryPath("node").ConfigureAwait(true); + var corepack = await _pathResolver.ResolveBinaryPath("corepack").ConfigureAwait(true); + if (node == null || corepack == null) + { + if (OperatingSystem.IsWindows()) + { + var winget = await _pathResolver.ResolveBinaryPath("winget").ConfigureAwait(true); + if (winget == null) + { + winget = Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.LocalApplicationData), "Microsoft", "WindowsApps", "winget.exe"); + if (!File.Exists(winget)) + { + winget = null; + } + } + if (winget == null) + { + if (node != null) + { + _logger.LogError("WinGet is not installed, so Node.js can't be upgraded automatically. Please install WinGet by installing App Installer from the Microsoft Store first."); + } + else + { + _logger.LogError("WinGet is not installed, so Node.js can't be installed automatically. Please install WinGet by installing App Installer from the Microsoft Store first."); + } + Process.Start("https://www.microsoft.com/p/app-installer/9nblggh4nns1#activetab=pivot:overviewtab"); + return (1, null); + } + + _logger.LogInformation("Installing Node.js via WinGet..."); + exitCode = await _processExecutor.ExecuteAsync( + new ProcessSpecification + { + FilePath = winget, + Arguments = _winGetInstallNodeJsArgs.Select(x => new LogicalProcessArgument(x)), + }, + CaptureSpecification.Passthrough, + cancellationToken).ConfigureAwait(true); + if (exitCode != 0) + { + _logger.LogError("'winget' command failed; see above for output."); + return (exitCode, null); + } + + node = Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.ProgramFiles), "nodejs", "node.exe"); + corepack = Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.ProgramFiles), "nodejs", "corepack.cmd"); + if (!File.Exists(node)) + { + _logger.LogError("Node.js did not install correctly from WinGet, or did not install into the usual place. If Node.js did install correctly, try logging out and logging back in to refresh your environment variables."); + return (1, null); + } + if (!File.Exists(corepack)) + { + _logger.LogError("The version of Node.js that is installed on this machine is not new enough to have 'corepack'. Upgrade Node.js manually and then try again. If Node.js did upgrade correctly, try logging out and logging back in to refresh your environment variables."); + return (1, null); + } + } + else + { + _logger.LogError("Node.js is not installed on this machine, or is not new enough to have the 'corepack' command. Upgrade Node.js to at least v16.9.0 and try again."); + return (1, null); + } + } + + // Create a temporary location to install the Corepack shims. We'll use this writable location instead of + // the default since on Windows you can't do 'corepack enable' without elevating to Administrator. + var corepackShimPath = Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.LocalApplicationData), "rcf-corepack"); + var yarnCorepackShimPath = Path.Combine(corepackShimPath, OperatingSystem.IsWindows() ? "yarn.cmd" : "yarn"); + if (!File.Exists(yarnCorepackShimPath)) + { + _logger.LogInformation("Setting up Node.js corepack shims..."); + Directory.CreateDirectory(corepackShimPath); + exitCode = await _processExecutor.ExecuteAsync( + new ProcessSpecification + { + FilePath = corepack, + Arguments = new[] { "enable", "--install-directory", corepackShimPath }.Select(x => new LogicalProcessArgument(x)), + }, + CaptureSpecification.Passthrough, + cancellationToken).ConfigureAwait(true); + if (exitCode != 0) + { + _logger.LogError("'corepack enable' command failed; see above for output."); + return (exitCode, null); + } + } + + return (0, yarnCorepackShimPath); + } + } +} diff --git a/UET/Redpoint.CloudFramework.CLI/GenerateHtmlFromMjml.cs b/UET/Redpoint.CloudFramework.CLI/GenerateHtmlFromMjml.cs new file mode 100644 index 00000000..f2d7adef --- /dev/null +++ b/UET/Redpoint.CloudFramework.CLI/GenerateHtmlFromMjml.cs @@ -0,0 +1,154 @@ +namespace Redpoint.CloudFramework.CLI +{ + using Microsoft.Extensions.Logging; + using Redpoint.CommandLine; + using System.CommandLine; + using System.Threading.Tasks; + using Redpoint.ProcessExecution; + using System.Text; + + internal class GenerateHtmlFromMjml + { + internal class Options + { + public Option Path = new Option("--path", "The path to the MJML file."); + } + + public static Command CreateCommand(ICommandBuilder builder) + { + return new Command("generate-html-from-mjml", "Generates a HTML file from an MJML file."); + } + + internal class CommandInstance : ICommandInstance + { + private readonly ILogger _logger; + private readonly IYarnInstallationService _yarnInstallationService; + private readonly IProcessExecutor _processExecutor; + private readonly Options _options; + + internal static readonly string[] _yarnInitArgs = new[] { "init", "-2" }; + internal static readonly string[] _yarnAddMjmlArgs = new[] { "add", "-D", "mjml" }; + internal static readonly string[] _yarnAddHtmlToTextArgs = new[] { "add", "-D", "@html-to/text-cli", "dom-serializer" }; + internal static readonly string[] _htmlToTextArgs = new[] { "run", "html-to-text", "--selectors[]", ":[0].selector=h1", ":[0].format=skip" }; + + public CommandInstance( + ILogger logger, + IYarnInstallationService yarnInstallationService, + IProcessExecutor processExecutor, + Options options) + { + _logger = logger; + _yarnInstallationService = yarnInstallationService; + _processExecutor = processExecutor; + _options = options; + } + + public async Task ExecuteAsync(ICommandInvocationContext context) + { + var inputPath = context.ParseResult.GetValueForOption(_options.Path); + if (inputPath == null || !inputPath.Exists) + { + _logger.LogError("Expected --input-path to exist."); + return 1; + } + + // Install Yarn. + var (exitCode, yarnCorepackShimPath) = await _yarnInstallationService.InstallYarnIfNeededAsync(context.GetCancellationToken()).ConfigureAwait(true); + if (yarnCorepackShimPath == null) + { + return exitCode; + } + + // Create our directory where we will install the mjml tool. + var mjmlInstallPath = Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.LocalApplicationData), "rcf-mjml"); + Directory.CreateDirectory(mjmlInstallPath); + + // If Yarn isn't initialised in this directory, do it now. + if (!File.Exists(Path.Combine(mjmlInstallPath, "yarn.lock"))) + { + exitCode = await _processExecutor.ExecuteAsync( + new ProcessSpecification + { + FilePath = yarnCorepackShimPath, + Arguments = _yarnInitArgs.Select(x => new LogicalProcessArgument(x)), + WorkingDirectory = mjmlInstallPath, + }, + new YarnInstallCaptureSpecification(_logger), + context.GetCancellationToken()).ConfigureAwait(true); + if (exitCode != 0) + { + _logger.LogError("'yarn init -2' command failed; see above for output."); + return exitCode; + } + } + + // If package.json doesn't have mjml, install it. + if (!File.ReadAllText(Path.Combine(mjmlInstallPath, "package.json")).Contains(@"""mjml""", StringComparison.Ordinal)) + { + exitCode = await _processExecutor.ExecuteAsync( + new ProcessSpecification + { + FilePath = yarnCorepackShimPath, + Arguments = _yarnAddMjmlArgs.Select(x => new LogicalProcessArgument(x)), + WorkingDirectory = mjmlInstallPath, + }, + new YarnInstallCaptureSpecification(_logger), + context.GetCancellationToken()).ConfigureAwait(true); + if (exitCode != 0) + { + _logger.LogError("'yarn add -D mjml' command failed; see above for output."); + return exitCode; + } + } + + // If package.json doesn't have @html-to/text-cli, install it. + if (!File.ReadAllText(Path.Combine(mjmlInstallPath, "package.json")).Contains(@"""@html-to/text-cli""", StringComparison.Ordinal)) + { + exitCode = await _processExecutor.ExecuteAsync( + new ProcessSpecification + { + FilePath = yarnCorepackShimPath, + Arguments = _yarnAddHtmlToTextArgs.Select(x => new LogicalProcessArgument(x)), + WorkingDirectory = mjmlInstallPath, + }, + new YarnInstallCaptureSpecification(_logger), + context.GetCancellationToken()).ConfigureAwait(true); + if (exitCode != 0) + { + _logger.LogError("'yarn add -D @html-to/text-cli' command failed; see above for output."); + return exitCode; + } + } + + // Execute mjml for the input and output paths. + exitCode = await _processExecutor.ExecuteAsync( + new ProcessSpecification + { + FilePath = yarnCorepackShimPath, + Arguments = new[] { "run", "mjml", "-r", inputPath.FullName, "-o", inputPath.FullName + ".html" }.Select(x => new LogicalProcessArgument(x)), + WorkingDirectory = mjmlInstallPath, + }, + CaptureSpecification.Passthrough, + context.GetCancellationToken()).ConfigureAwait(true); + if (exitCode != 0) + { + return exitCode; + } + + // Execute html-to-text on the output HTML, so that we can have a text version as well. + var textStringBuilder = new StringBuilder(); + exitCode = await _processExecutor.ExecuteAsync( + new ProcessSpecification + { + FilePath = yarnCorepackShimPath, + Arguments = _htmlToTextArgs.Select(x => new LogicalProcessArgument(x)), + WorkingDirectory = mjmlInstallPath, + }, + new HtmlToTextCaptureSpecification(File.ReadAllText(inputPath.FullName + ".html"), textStringBuilder), + context.GetCancellationToken()).ConfigureAwait(true); + File.WriteAllText(inputPath.FullName + ".txt", textStringBuilder.ToString()); + return exitCode; + } + } + } +} diff --git a/UET/Redpoint.CloudFramework.CLI/GenerateOpenApiJson.cs b/UET/Redpoint.CloudFramework.CLI/GenerateOpenApiJson.cs new file mode 100644 index 00000000..85878735 --- /dev/null +++ b/UET/Redpoint.CloudFramework.CLI/GenerateOpenApiJson.cs @@ -0,0 +1,198 @@ +namespace Redpoint.CloudFramework.CLI +{ + using Microsoft.AspNetCore.Hosting; + using Microsoft.Extensions.DependencyInjection; + using Microsoft.Extensions.Logging; + using Microsoft.OpenApi.Writers; + using Redpoint.CloudFramework.Startup; + using Redpoint.CommandLine; + using Swashbuckle.AspNetCore.Swagger; + using System; + using System.CommandLine; + using System.Globalization; + using System.Reflection; + using System.Runtime.Loader; + using System.Threading.Tasks; + + internal class GenerateOpenApiJson + { + internal class Options + { + public Option AssemblyPath = new Option( + "--assembly-path", + "The path to the built .NET assembly."); + + public Option OutputPath = new Option( + "--output-path", + "The path to output the OpenAPI JSON file to."); + + public Option EntrypointClass = new Option( + "--entrypoint-class", + "The class name for the entrypoint."); + + public Option Version = new Option( + "--version", + () => "v1", + "The document version to generate for."); + } + + public static Command CreateCommand(ICommandBuilder builder) + { + return new Command("generate-openapi-json", "Generates an OpenAPI JSON file from the .NET assembly."); + } + + internal class CommandInstance : ICommandInstance + { + private readonly ILogger _logger; + private readonly Options _options; + + public CommandInstance( + ILogger logger, + Options options) + { + _logger = logger; + _options = options; + } + +#pragma warning disable IL2026 // Members annotated with 'RequiresUnreferencedCodeAttribute' require dynamic access otherwise can break functionality when trimming application code +#pragma warning disable IL2075 // 'this' argument does not satisfy 'DynamicallyAccessedMembersAttribute' in call to target method. The return value of the source method does not have matching annotations. + public async Task ExecuteAsync(ICommandInvocationContext context) + { + var assemblyPath = context.ParseResult.GetValueForOption(_options.AssemblyPath); + if (assemblyPath == null || !assemblyPath.Exists) + { + _logger.LogError("The input assembly for generating the OpenAPI JSON (--assembly-path) must exist."); + return 1; + } + var outputPath = context.ParseResult.GetValueForOption(_options.OutputPath); + if (outputPath == null) + { + _logger.LogError("The output path for generating the OpenAPI JSON (--output-path) must be specified."); + return 1; + } + var entrypointClassName = context.ParseResult.GetValueForOption(_options.EntrypointClass); + var version = context.ParseResult.GetValueForOption(_options.Version); + + Environment.SetEnvironmentVariable("ASPNETCORE_ENVIRONMENT", "Development"); + Environment.SetEnvironmentVariable("CLOUDFRAMEWORK_IS_CLIENT_API_GENERATION", "true"); + + AppDomain.CurrentDomain.AssemblyResolve += (sender, ev) => + { + var name = new AssemblyName(ev.Name); + var baseDir = Path.GetDirectoryName(assemblyPath.FullName); + var targetFile = Path.Combine(baseDir!, name.Name + ".dll"); + if (File.Exists(targetFile)) + { + return AssemblyLoadContext.Default.LoadFromAssemblyPath(targetFile); + } + if (!targetFile.EndsWith(".resources.dll", StringComparison.InvariantCultureIgnoreCase)) + { + _logger.LogError($"Unable to find assembly at: {targetFile}"); + } + return null; + }; + + var assembly = AssemblyLoadContext.Default.LoadFromAssemblyPath(assemblyPath.FullName); + if (assembly == null) + { + _logger.LogError($"Unable to load the assembly located at '{assemblyPath.FullName}'."); + return 1; + } + + IWebHost? app; + var providerType = assembly.GetExportedTypes() + .FirstOrDefault(x => typeof(IWebAppProvider).IsAssignableFrom(x)); + if (providerType == null) + { + if (string.IsNullOrWhiteSpace(entrypointClassName)) + { + _logger.LogError("The entrypoint class (--entrypoint-class) must be specified because there is no class that implements IWebAppProvider."); + return 1; + } + + _logger.LogWarning("You should migrate to having either Program or another public class implement IWebAppProvider instead of relying on Program having a static public GetWebHostAsync method and --entrypoint-class."); + var legacyTask = (Task?)assembly + .GetType(entrypointClassName) + ?.GetMethod("GetWebHost", BindingFlags.Static | BindingFlags.Public) + ?.Invoke(null, Array.Empty()); + if (legacyTask == null) + { + legacyTask = (Task?)assembly + .GetType(entrypointClassName) + ?.GetMethod("GetWebHostAsync", BindingFlags.Static | BindingFlags.Public) + ?.Invoke(null, Array.Empty()); + } + if (legacyTask == null) + { + _logger.LogError($"Unable to locate {entrypointClassName}.GetWebHostAsync in loaded DLL. You should have a public class implement IWebAppProvider instead of relying on --entrypoint-class search behaviour."); + return 1; + } + app = await legacyTask.ConfigureAwait(false); + } + else + { + var interfaceMap = providerType.GetInterfaceMap(typeof(IWebAppProvider)); + MethodInfo? targetMethod = null; + for (var i = 0; i < interfaceMap.InterfaceMethods.Length; i++) + { + var interfaceMethod = interfaceMap.InterfaceMethods[i]; + if (interfaceMethod.Name == "GetWebHostAsync" && + interfaceMethod.ReturnType == typeof(ValueTask)) + { + targetMethod = interfaceMap.TargetMethods[i]; + } + } + if (targetMethod == null) + { + _logger.LogError($"The '{providerType.FullName}' class does not correctly implement the IWebAppProvider interface."); + return 1; + } + var taskObject = targetMethod?.Invoke(null, Array.Empty()); + if (taskObject == null) + { + _logger.LogError($"The '{providerType.FullName}' class somehow returned a null value from GetWebHostAsync, even though it's return type should be a value type."); + return 1; + } + var task = (ValueTask)taskObject; + app = await task.ConfigureAwait(false); + } + + _logger.LogInformation("Getting ISwaggerProvider..."); + var swaggerProvider = app.Services.GetRequiredService(); + + _logger.LogInformation("Generating OpenAPI document..."); + try + { + var swagger = swaggerProvider.GetSwagger( + documentName: string.IsNullOrWhiteSpace(version) ? "v1" : version, + host: null, + basePath: null); + + _logger.LogInformation("Writing OpenAPI document to output path..."); + using (var textWriter = new StringWriter(CultureInfo.InvariantCulture)) + { + var jsonWriter = new OpenApiJsonWriter(textWriter); + + swagger.SerializeAsV3(jsonWriter); + + if (outputPath.DirectoryName != null) + { + Directory.CreateDirectory(outputPath.DirectoryName); + } + await File.WriteAllTextAsync(outputPath.FullName, textWriter.ToString()).ConfigureAwait(false); + } + } + catch (Exception ex) + { + _logger.LogError(ex, ex.Message); + return 1; + } + + _logger.LogInformation("OpenAPI generation complete."); + return 0; + } +#pragma warning restore IL2026 +#pragma warning restore IL2075 + } + } +} diff --git a/UET/Redpoint.CloudFramework.CLI/GenerateStronglyTypedLanguageFiles.cs b/UET/Redpoint.CloudFramework.CLI/GenerateStronglyTypedLanguageFiles.cs new file mode 100644 index 00000000..7113f14c --- /dev/null +++ b/UET/Redpoint.CloudFramework.CLI/GenerateStronglyTypedLanguageFiles.cs @@ -0,0 +1,229 @@ +namespace Redpoint.CloudFramework.CLI +{ + using Microsoft.Extensions.Logging; + using Redpoint.CommandLine; + using System.CommandLine; + using System.Text.Json; + using System.Text.RegularExpressions; + using System.Threading.Tasks; + + internal class GenerateStronglyTypedLanguageFiles + { + internal class Options + { + public Option JsonPath = new Option( + "--json", + "The path to the JSON file that contains the language keys."); + + public Option Namespace = new Option( + "--namespace", + "The namespace to put C# classes in."); + + public Option CsPath = new Option( + "--cs-path", + "The path to emit the C# code."); + + public Option TsPath = new Option( + "--ts-path", + "The path to emit the TypeScript code."); + } + + public static Command CreateCommand(ICommandBuilder builder) + { + return new Command("generate-strongly-typed-language", "Generates files for a strongly-typed language system."); + } + + internal class CommandInstance : ICommandInstance + { + private readonly ILogger _logger; + private readonly Options _options; + + public CommandInstance( + ILogger logger, + Options options) + { + _logger = logger; + _options = options; + } + + private static readonly Regex _braceRegex = new Regex("\\{[0-9]\\}"); + private static readonly Regex _htmlRegex = new Regex("\\<[a-z]+\\>"); + + public Task ExecuteAsync(ICommandInvocationContext context) + { + var jsonPath = context.ParseResult.GetValueForOption(_options.JsonPath)!; + var ns = context.ParseResult.GetValueForOption(_options.Namespace)!; + var csPath = context.ParseResult.GetValueForOption(_options.CsPath)!; + var tsPath = context.ParseResult.GetValueForOption(_options.TsPath)!; + + var languageDict = JsonSerializer.Deserialize(File.ReadAllText(jsonPath.FullName), LanguageJsonSerializerContext.Default.LanguageDictionary)!; + if (!languageDict.IsSorted()) + { + _logger.LogWarning("Automatically sorting language dictionary keys..."); + languageDict.SortKeys(); + File.WriteAllText( + jsonPath.FullName, + JsonSerializer.Serialize( + languageDict, + new LanguageJsonSerializerContext(new JsonSerializerOptions + { + WriteIndented = true, + }).LanguageDictionary)); + } + + _logger.LogInformation($"Generating {languageDict.Count} entries for strongly-typed language files..."); + + using (var writer = new StreamWriter(csPath.FullName)) + { + writer.WriteLine( + $$""" + namespace {{ns}}; + + using Microsoft.AspNetCore.Html; + + #pragma warning disable CA1707 + + public interface IHtmlLanguageService + { + """); + foreach (var kv in languageDict) + { + if (_braceRegex.IsMatch(kv.Value)) + { + writer.WriteLine( + $$""" + IHtmlContent {{kv.Key}}(params object[] arguments); + """); + } + else + { + writer.WriteLine( + $$""" + IHtmlContent {{kv.Key}} { get; } + """); + } + } + writer.WriteLine( + $$""" + } + + public interface ITextLanguageService + { + """); + foreach (var kv in languageDict) + { + if (_braceRegex.IsMatch(kv.Value)) + { + writer.WriteLine( + $$""" + string {{kv.Key}}(params object[] arguments); + """); + } + else + { + writer.WriteLine( + $$""" + string {{kv.Key}} { get; } + """); + } + } + writer.WriteLine( + $$""" + } + + internal class DefaultLanguageService : IHtmlLanguageService, ITextLanguageService + { + private readonly ITranslationService _translationService; + + public DefaultLanguageService(ITranslationService translationService) + { + _translationService = translationService; + } + + """); + foreach (var kv in languageDict) + { + if (_braceRegex.IsMatch(kv.Value)) + { + writer.WriteLine( + $$""" + string ITextLanguageService.{{kv.Key}}(params object[] arguments) + { + return _translationService.TX("{{kv.Key}}", arguments); + } + + IHtmlContent IHtmlLanguageService.{{kv.Key}}(params object[] arguments) + { + return _translationService.T("{{kv.Key}}", arguments); + } + """); + } + else + { + writer.WriteLine( + $$""" + string ITextLanguageService.{{kv.Key}} => _translationService.TX("{{kv.Key}}"); + IHtmlContent IHtmlLanguageService.{{kv.Key}} => _translationService.T("{{kv.Key}}"); + """); + } + } + writer.WriteLine( + $$""" + } + + public static class TK + { + """); + foreach (var kv in languageDict) + { + writer.WriteLine( + $$""" + public const string {{kv.Key}} = "{{kv.Key}}"; + """); + } + writer.WriteLine( + $$""" + } + + #pragma warning restore CA1707 + """); + } + + using (var writer = new StreamWriter(tsPath.FullName)) + { + // @note: Keys that use HTML in their values are not exposed to TypeScript, as they won't + // work correctly in React. + var nonHtmlLanguageDict = new Dictionary(); + foreach (var kv in languageDict.Where(x => !_htmlRegex.IsMatch(x.Value))) + { + nonHtmlLanguageDict.Add(kv.Key, kv.Value); + } + if (nonHtmlLanguageDict.Count != languageDict.Count) + { + _logger.LogWarning($"{languageDict.Count - nonHtmlLanguageDict.Count} entries were excluded from TypeScript because their values contain HTML. HTML support for language values is deprecated."); + } + + var nonParameterKeys = nonHtmlLanguageDict.Keys.Where(x => !_braceRegex.IsMatch(languageDict[x])); + var parameterKeys = nonHtmlLanguageDict.Keys.Where(x => _braceRegex.IsMatch(languageDict[x])); + + writer.WriteLine( + $$""" + export type NonParameterizedLanguageKey = {{string.Join(" |\n ", nonParameterKeys.Select(x => $"\"{x}\""))}}; + export type ParameterizedLanguageKey = {{string.Join(" |\n ", parameterKeys.Select(x => $"\"{x}\""))}}; + export const TK: (NonParameterizedLanguageKey | ParameterizedLanguageKey)[] = [{{string.Join(",\n ", nonHtmlLanguageDict.Keys.Select(x => $"\"{x}\""))}}]; + """); + foreach (var key in nonParameterKeys) + { + writer.WriteLine($"export const {key}: NonParameterizedLanguageKey = \"{key}\";"); + } + foreach (var key in parameterKeys) + { + writer.WriteLine($"export const {key}: ParameterizedLanguageKey = \"{key}\";"); + } + } + + return Task.FromResult(0); + } + } + } +} diff --git a/UET/Redpoint.CloudFramework.CLI/HtmlToTextCaptureSpecification.cs b/UET/Redpoint.CloudFramework.CLI/HtmlToTextCaptureSpecification.cs new file mode 100644 index 00000000..29bfa934 --- /dev/null +++ b/UET/Redpoint.CloudFramework.CLI/HtmlToTextCaptureSpecification.cs @@ -0,0 +1,52 @@ +namespace Redpoint.CloudFramework.CLI +{ + using Redpoint.ProcessExecution; + using System; + using System.Text; + + internal class HtmlToTextCaptureSpecification : ICaptureSpecification + { + private readonly string _input; + private readonly StringBuilder _output; + private int _emptyNewLineCount; + + public HtmlToTextCaptureSpecification(string input, StringBuilder output) + { + _input = input; + _output = output; + _emptyNewLineCount = 0; + } + + public bool InterceptStandardInput => true; + + public bool InterceptStandardOutput => true; + + public bool InterceptStandardError => false; + + public void OnReceiveStandardError(string data) + { + throw new NotImplementedException(); + } + + public void OnReceiveStandardOutput(string data) + { + if (string.IsNullOrWhiteSpace(data)) + { + _emptyNewLineCount++; + } + else + { + _emptyNewLineCount = 0; + } + if (_emptyNewLineCount <= 1) + { + _output.AppendLine(data); + } + } + + public string? OnRequestStandardInputAtStartup() + { + return _input; + } + } +} diff --git a/UET/Redpoint.CloudFramework.CLI/IYarnInstallationService.cs b/UET/Redpoint.CloudFramework.CLI/IYarnInstallationService.cs new file mode 100644 index 00000000..1c981e68 --- /dev/null +++ b/UET/Redpoint.CloudFramework.CLI/IYarnInstallationService.cs @@ -0,0 +1,9 @@ +namespace Redpoint.CloudFramework.CLI +{ + using System.Threading.Tasks; + + internal interface IYarnInstallationService + { + Task<(int exitCode, string? yarnPath)> InstallYarnIfNeededAsync(CancellationToken cancellationToken); + } +} diff --git a/UET/Redpoint.CloudFramework.CLI/LanguageDictionary.cs b/UET/Redpoint.CloudFramework.CLI/LanguageDictionary.cs new file mode 100644 index 00000000..7226e54e --- /dev/null +++ b/UET/Redpoint.CloudFramework.CLI/LanguageDictionary.cs @@ -0,0 +1,66 @@ +namespace Redpoint.CloudFramework.CLI +{ + using System.Collections; + using System.Collections.Generic; + using System.Text.Json.Serialization; + + [JsonConverter(typeof(LanguageDictionaryJsonConverter))] + internal class LanguageDictionary : IEnumerable> + { + private readonly Dictionary _dict; + private readonly List _keys; + + public LanguageDictionary() + { + _dict = new Dictionary(); + _keys = new List(); + } + + public string this[string key] + { + get => _dict[key]; + } + + public IReadOnlyList Keys => _keys; + + public void SortKeys() + { + _keys.Sort((a, b) => string.Compare(a, b, StringComparison.Ordinal)); + } + + public int Count => _dict.Count; + + public bool IsSorted() + { + string? lastKey = null; + foreach (var key in _keys) + { + if (lastKey != null && string.Compare(lastKey, key, StringComparison.Ordinal) != -1) + { + return false; + } + lastKey = key; + } + return true; + } + + public void Add(string k, string v) + { + _dict.Add(k, v); + _keys.Add(k); + } + + public IEnumerator> GetEnumerator() + { + foreach (var key in _keys) + { + yield return new KeyValuePair(key, _dict[key]); + } + } + + IEnumerator IEnumerable.GetEnumerator() + { + return GetEnumerator(); + } + } +} diff --git a/UET/Redpoint.CloudFramework.CLI/LanguageDictionaryJsonConverter.cs b/UET/Redpoint.CloudFramework.CLI/LanguageDictionaryJsonConverter.cs new file mode 100644 index 00000000..142009ba --- /dev/null +++ b/UET/Redpoint.CloudFramework.CLI/LanguageDictionaryJsonConverter.cs @@ -0,0 +1,44 @@ +namespace Redpoint.CloudFramework.CLI +{ + using System.Text.Json; + using System.Text.Json.Serialization; + + internal class LanguageDictionaryJsonConverter : JsonConverter + { + public override LanguageDictionary? Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) + { + var value = new LanguageDictionary(); + if (reader.TokenType != JsonTokenType.StartObject) + { + throw new JsonException(); + } + while (reader.Read()) + { + if (reader.TokenType == JsonTokenType.EndObject) + { + break; + } + + if (reader.TokenType == JsonTokenType.PropertyName) + { + var propertyName = reader.GetString()!; + reader.Read(); + var propertyValue = reader.GetString()!; + value.Add(propertyName, propertyValue); + } + } + return value; + } + + public override void Write(Utf8JsonWriter writer, LanguageDictionary value, JsonSerializerOptions options) + { + writer.WriteStartObject(); + foreach (var kv in value) + { + writer.WritePropertyName(kv.Key); + writer.WriteStringValue(kv.Value); + } + writer.WriteEndObject(); + } + } +} diff --git a/UET/Redpoint.CloudFramework.CLI/LanguageJsonSerializerContext.cs b/UET/Redpoint.CloudFramework.CLI/LanguageJsonSerializerContext.cs new file mode 100644 index 00000000..7e7939c3 --- /dev/null +++ b/UET/Redpoint.CloudFramework.CLI/LanguageJsonSerializerContext.cs @@ -0,0 +1,9 @@ +namespace Redpoint.CloudFramework.CLI +{ + using System.Text.Json.Serialization; + + [JsonSerializable(typeof(LanguageDictionary))] + internal partial class LanguageJsonSerializerContext : JsonSerializerContext + { + } +} diff --git a/UET/Redpoint.CloudFramework.CLI/Program.cs b/UET/Redpoint.CloudFramework.CLI/Program.cs new file mode 100644 index 00000000..c1638b6a --- /dev/null +++ b/UET/Redpoint.CloudFramework.CLI/Program.cs @@ -0,0 +1,59 @@ +extern alias RDCommandLine; + +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using RDCommandLine::Microsoft.Extensions.Logging.Console; +using Redpoint.CloudFramework.CLI; +using Redpoint.CommandLine; +using Redpoint.Logging.SingleLine; +using Redpoint.PathResolution; +using Redpoint.ProcessExecution; +using System.CommandLine; + +Crayon.Output.Disable(); + +// Turn off all Node.js deprecation warnings. They're not actionable. +Environment.SetEnvironmentVariable("NODE_NO_WARNINGS", "1"); + +var rootCommand = CommandLineBuilder.NewBuilder() + .AddGlobalRuntimeServices((builder, services) => + { + services.AddLogging(builder => + { + builder.ClearProviders(); + builder.SetMinimumLevel(LogLevel.Information); + builder.AddSingleLineConsoleFormatter(options => + { + options.OmitLogPrefix = false; + options.ColorBehavior = LoggerColorBehavior.Disabled; + }); + builder.AddSingleLineConsole(); + }); + services.AddProcessExecution(); + services.AddPathResolution(); + services.AddSingleton(); + }) + .SetGlobalExecutionHandler(async (sp, executeCommand) => + { + var logger = sp.GetRequiredService>(); + try + { + return await executeCommand().ConfigureAwait(true); + } + catch (Exception ex) + { + logger.LogError(ex, $"Uncaught exception during command execution: {ex}"); + return 1; + } + }) + .AddCommand(BuildClientApp.CreateCommand) + .AddCommand(GenerateOpenApiJson.CreateCommand) + .AddCommand(GenerateHtmlFromMjml.CreateCommand) + .AddCommand(GenerateStronglyTypedLanguageFiles.CreateCommand) + .Build(); + +var exitCode = await rootCommand.InvokeAsync(args).ConfigureAwait(false); +await Console.Out.FlushAsync().ConfigureAwait(false); +await Console.Error.FlushAsync().ConfigureAwait(false); +Environment.Exit(exitCode); +throw new BadImageFormatException(); \ No newline at end of file diff --git a/UET/Redpoint.CloudFramework.CLI/README.md b/UET/Redpoint.CloudFramework.CLI/README.md new file mode 100644 index 00000000..052dd128 --- /dev/null +++ b/UET/Redpoint.CloudFramework.CLI/README.md @@ -0,0 +1,3 @@ +# Redpoint.CommandLine.CLI + +This package provides the CLI for Redpoint.CloudFramework and MSBuild targets. diff --git a/UET/Redpoint.CloudFramework.CLI/Redpoint.CloudFramework.CLI.csproj b/UET/Redpoint.CloudFramework.CLI/Redpoint.CloudFramework.CLI.csproj new file mode 100644 index 00000000..fc9e127e --- /dev/null +++ b/UET/Redpoint.CloudFramework.CLI/Redpoint.CloudFramework.CLI.csproj @@ -0,0 +1,58 @@ + + + + + + + Redpoint.CloudFramework.CLI + Provides the command line tool and MSBuild targets for Redpoint.CloudFramework. + cloud framework, cli, msbuild + true + rcf-cli + Redpoint.CloudFramework.CLI.nuspec + version=$(PackageVersion);configuration=$(Configuration) + + + + The command line tool for Redpoint.CloudFramework. + Redpoint.CloudFramework CLI + Redpoint.CloudFramework CLI + rcf-cli + Exe + + + + false + + + + + + + + + RDCommandLine + + + + + + + + + + + + + + + + + + true + buildTransitive;build + None + + + + diff --git a/UET/Redpoint.CloudFramework.CLI/Redpoint.CloudFramework.CLI.nuspec b/UET/Redpoint.CloudFramework.CLI/Redpoint.CloudFramework.CLI.nuspec new file mode 100644 index 00000000..d50dc9b1 --- /dev/null +++ b/UET/Redpoint.CloudFramework.CLI/Redpoint.CloudFramework.CLI.nuspec @@ -0,0 +1,25 @@ + + + + Redpoint.CloudFramework.CLI + $version$ + June Rhodes + MIT + https://licenses.nuget.org/MIT + PackageIcon.png + README.md + The command line tool for Redpoint.CloudFramework. + Redpoint Games + cloud framework, cli, msbuild + + + + + + + + + + + + \ No newline at end of file diff --git a/UET/Redpoint.CloudFramework.CLI/Redpoint.CloudFramework.CLI.targets b/UET/Redpoint.CloudFramework.CLI/Redpoint.CloudFramework.CLI.targets new file mode 100644 index 00000000..f0149879 --- /dev/null +++ b/UET/Redpoint.CloudFramework.CLI/Redpoint.CloudFramework.CLI.targets @@ -0,0 +1,43 @@ + + + + + + false + + + + + $(MSBuildThisFileDirectory)..\tools\$(TargetFramework)\any\rcf-cli.dll + $(MSBuildThisFileDirectory)bin\$(Configuration)\$(TargetFramework)\rcf-cli.dll + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/UET/Redpoint.CloudFramework.CLI/Utf8SanitizedCaptureSpecification.cs b/UET/Redpoint.CloudFramework.CLI/Utf8SanitizedCaptureSpecification.cs new file mode 100644 index 00000000..85fd99da --- /dev/null +++ b/UET/Redpoint.CloudFramework.CLI/Utf8SanitizedCaptureSpecification.cs @@ -0,0 +1,86 @@ +namespace Redpoint.CloudFramework.CLI +{ + using Microsoft.Extensions.Logging; + using Redpoint.ProcessExecution; + using System; + using System.Text.Json; + using System.Text.RegularExpressions; + + internal class YarnInstallCaptureSpecification : ICaptureSpecification + { + private readonly ILogger _logger; + private readonly YarnLogEntryJsonSerializerContext _serializer; + + private static readonly Dictionary _replacements = new Dictionary + { + { "┬╖", "-" }, + { "Γöî", "/" }, + { "Γöö", "\\" }, + { "Γöé", "|" }, + }; + private static readonly Regex _consoleRegex = new Regex("\x1b\\[[^;];"); + + public YarnInstallCaptureSpecification(ILogger logger) + { + _logger = logger; + _serializer = new YarnLogEntryJsonSerializerContext(); + } + + public bool InterceptStandardInput => false; + + public bool InterceptStandardOutput => true; + + public bool InterceptStandardError => false; + + public void OnReceiveStandardError(string data) + { + throw new NotImplementedException(); + } + + public void OnReceiveStandardOutput(string data) + { + YarnLogEntry? entry; + try + { + entry = JsonSerializer.Deserialize(data, _serializer.YarnLogEntry); + } + catch + { + return; + } + if (entry == null) + { + return; + } + + var indent = entry.Indent ?? string.Empty; + var message = entry.Data ?? string.Empty; + indent = _consoleRegex.Replace(indent, string.Empty); + message = _consoleRegex.Replace(message, string.Empty); + foreach (var replacement in _replacements) + { + indent = indent.Replace(replacement.Key, replacement.Value, StringComparison.Ordinal); + message = message.Replace(replacement.Key, replacement.Value, StringComparison.Ordinal); + } + + switch (entry.Type) + { + case "info": + _logger.LogInformation($"{entry.DisplayName} {indent}{message}"); + break; + case "warning": + _logger.LogWarning($"{entry.DisplayName} {indent}{message}"); + break; + case "error": + default: + _logger.LogError($"{entry.DisplayName} {indent}{message}"); + break; + } + } + + public string? OnRequestStandardInputAtStartup() + { + throw new NotImplementedException(); + } + } +} diff --git a/UET/Redpoint.CloudFramework.CLI/YarnLogEntry.cs b/UET/Redpoint.CloudFramework.CLI/YarnLogEntry.cs new file mode 100644 index 00000000..c750aefb --- /dev/null +++ b/UET/Redpoint.CloudFramework.CLI/YarnLogEntry.cs @@ -0,0 +1,19 @@ +namespace Redpoint.CloudFramework.CLI +{ + using System.Text.Json.Serialization; + + internal class YarnLogEntry + { + [JsonPropertyName("type")] + public string? Type { get; set; } + + [JsonPropertyName("displayName")] + public string? DisplayName { get; set; } + + [JsonPropertyName("indent")] + public string? Indent { get; set; } + + [JsonPropertyName("data")] + public string? Data { get; set; } + } +} diff --git a/UET/Redpoint.CloudFramework.CLI/YarnLogEntryJsonSerializerContext.cs b/UET/Redpoint.CloudFramework.CLI/YarnLogEntryJsonSerializerContext.cs new file mode 100644 index 00000000..94ff5538 --- /dev/null +++ b/UET/Redpoint.CloudFramework.CLI/YarnLogEntryJsonSerializerContext.cs @@ -0,0 +1,9 @@ +namespace Redpoint.CloudFramework.CLI +{ + using System.Text.Json.Serialization; + + [JsonSerializable(typeof(YarnLogEntry))] + internal partial class YarnLogEntryJsonSerializerContext : JsonSerializerContext + { + } +} diff --git a/UET/Redpoint.CloudFramework.Tests.Shared/CloudFrameworkTestEnvironment.cs b/UET/Redpoint.CloudFramework.Tests.Shared/CloudFrameworkTestEnvironment.cs new file mode 100644 index 00000000..961c78a9 --- /dev/null +++ b/UET/Redpoint.CloudFramework.Tests.Shared/CloudFrameworkTestEnvironment.cs @@ -0,0 +1,368 @@ +namespace Redpoint.CloudFramework.Tests +{ + using Docker.DotNet; + using Docker.DotNet.Models; + using Google.Api.Gax; + using Google.Cloud.Datastore.V1; + using Grpc.Core; + using Microsoft.Extensions.Configuration; + using Microsoft.Extensions.DependencyInjection; + using Microsoft.Extensions.Hosting; + using Microsoft.Extensions.Logging; + using Redpoint.CloudFramework.Counter; + using Redpoint.CloudFramework.GoogleInfrastructure; + using Redpoint.CloudFramework.Locking; + using Redpoint.CloudFramework.Models; + using Redpoint.CloudFramework.Repository.Datastore; + using Redpoint.CloudFramework.Repository; + using Redpoint.CloudFramework.Repository.Hooks; + using Redpoint.CloudFramework.Repository.Transaction; + using Redpoint.CloudFramework.Startup; + using Redpoint.CloudFramework.Tracing; + using StackExchange.Redis; + using System; + using System.Collections.Generic; + using System.Linq; + using System.Threading; + using System.Threading.Tasks; + using Xunit; + using Xunit.Abstractions; + using Xunit.Sdk; + using Redpoint.CloudFramework.Prefix; + using System.Diagnostics; + + public class CloudFrameworkTestEnvironment : IAsyncLifetime, ICloudFrameworkTestEnvironment + { +#pragma warning disable CS8618 + public CloudFrameworkTestEnvironment(IMessageSink messageSink) + { + _messageSink = messageSink; + } +#pragma warning restore CS8618 + + public IServiceProvider Services { get; private set; } + + public ICloudFrameworkTestEnvironment CreateWithServices(Action servicesFactory) + { + ArgumentNullException.ThrowIfNull(servicesFactory); + + var services = new ServiceCollection(); + + _messageSink.OnMessage(new DiagnosticMessage($"Building service provider")); + + if (Environment.GetEnvironmentVariable("GITLAB_CI") == "true") + { + services.AddSingleton(sp => + { + return new ConnectionMultiplexerProxy( + $"redis:6379", + sp.GetRequiredService>()); + }); + } + + var hostEnvironment = new TestHostEnvironment(); + services.AddSingleton(hostEnvironment); + services.AddSingleton(sp => + { + return new ConfigurationBuilder().Build(); + }); + services.AddSingleton(); + new Configurator().Configure(hostEnvironment, services); + services.AddSingleton(); + + // Add namespaced services. + services.AddScoped(); + services.AddScoped(); + services.AddScoped(); + services.AddScoped(); + services.AddSingleton(); + services.AddSingleton(); + + services.AddHttpClient(); + + servicesFactory(services); + +#pragma warning disable IL3050 // Calling members annotated with 'RequiresDynamicCodeAttribute' may break functionality when AOT compiling. + return new NestedCloudFrameworkTestEnvironment(services.BuildServiceProvider()); +#pragma warning restore IL3050 // Calling members annotated with 'RequiresDynamicCodeAttribute' may break functionality when AOT compiling. + } + + private readonly IMessageSink _messageSink; + + public async Task InitializeAsync() + { + if (Environment.GetEnvironmentVariable("GITLAB_CI") == "true") + { + // Redis set up as part of CreateWithServices. + // PUBSUB_SERVER is set by CI script. + // DATASTORE_SERVER is set by CI script. + } + else if (Process.GetProcessesByName("Rancher Desktop").Length != 0) + { + // Rancher Desktop doesn't work with the Docker C# library for some + // reason; just expect things to have been manually started. + Environment.SetEnvironmentVariable("REDIS_SERVER", "localhost:61000"); + Environment.SetEnvironmentVariable("PUBSUB_SERVER", "localhost:61001"); + Environment.SetEnvironmentVariable("DATASTORE_SERVER", "localhost:61002"); + } + else + { + // Spin up Redis and Datastore Emulator on the local Docker instance. + var client = new DockerClientConfiguration().CreateClient(); + + // Pull all images. + var currentImages = await client.Images.ListImagesAsync(new ImagesListParameters + { + All = true, + }).ConfigureAwait(true); + foreach (var expectedContainer in ContainerConfiguration._expectedContainers) + { + _messageSink.OnMessage(new DiagnosticMessage($"Pulling image: {expectedContainer.image}")); + await client.Images.CreateImageAsync(new ImagesCreateParameters + { + FromImage = expectedContainer.image, + }, null, new NullLogProgress()).ConfigureAwait(true); + } + + // Cleanup any old testing containers. + var containers = await client.Containers.ListContainersAsync(new ContainersListParameters + { + All = true, + }).ConfigureAwait(true); + foreach (var container in containers) + { + if (container.Names.Any(x => x.StartsWith($"/rcftest", StringComparison.Ordinal) || x.StartsWith($"rcftest", StringComparison.Ordinal)) && + (container.Status.Contains("hour", StringComparison.Ordinal) || + container.Status.Contains("day", StringComparison.Ordinal))) + { + _messageSink.OnMessage(new DiagnosticMessage($"Stopping container: {container.ID}")); + await client.Containers.StopContainerAsync(container.ID, new ContainerStopParameters + { + WaitBeforeKillSeconds = 0, + }).ConfigureAwait(true); + _messageSink.OnMessage(new DiagnosticMessage($"Removing container: {container.ID}")); + await client.Containers.RemoveContainerAsync(container.ID, new ContainerRemoveParameters + { + Force = true, + }).ConfigureAwait(true); + } + } + + // Create containers if they don't exist. + containers = await client.Containers.ListContainersAsync(new ContainersListParameters + { + All = true, + }).ConfigureAwait(true); + foreach (var expectedContainer in ContainerConfiguration._expectedContainers) + { + if (!containers.Any(x => x.Names.Any(y => y.EndsWith(expectedContainer.name, StringComparison.Ordinal)))) + { + _messageSink.OnMessage(new DiagnosticMessage($"Creating container: {expectedContainer.name}")); + var createdContainerConfig = new CreateContainerParameters + { + Name = expectedContainer.name, + Image = expectedContainer.image, + Cmd = expectedContainer.arguments, + ExposedPorts = new Dictionary + { + { $"{expectedContainer.port}/tcp", new EmptyStruct() }, + }, + HostConfig = new HostConfig + { + PortBindings = new Dictionary> + { + { + $"{expectedContainer.port}/tcp", + new List + { + new PortBinding + { + HostIP = "0.0.0.0", + HostPort = null, + } + } + }, + }, + }, + Env = new List + { + "CLOUDSDK_CORE_PROJECT=local-dev" + }, + }; + var createdContainer = await client.Containers.CreateContainerAsync(createdContainerConfig).ConfigureAwait(true); + + _messageSink.OnMessage(new DiagnosticMessage($"Starting container: {createdContainer.ID}")); + await client.Containers.StartContainerAsync(createdContainer.ID, new ContainerStartParameters + { + }).ConfigureAwait(true); + } + } + + // Now get the ports they were mapped to on the host. + containers = await client.Containers.ListContainersAsync(new ContainersListParameters + { + All = true, + }).ConfigureAwait(true); + var didSetRedis = false; + var didSetPubsub = false; + var didSetDatastore = false; + foreach (var expectedContainer in ContainerConfiguration._expectedContainers) + { + var container = containers.FirstOrDefault(x => x.Names.Any(y => y.EndsWith(expectedContainer.name, StringComparison.Ordinal))); + if (container == null) + { + _messageSink.OnMessage(new DiagnosticMessage($"Unable to locate container with name: {expectedContainer.name}")); + throw new InvalidOperationException("Failed to start required container for tests."); + } + + var targetPort = container.Ports.First(x => x.PrivatePort == expectedContainer.port); + var targetConnection = $"localhost:{targetPort.PublicPort}"; + switch (expectedContainer.type) + { + case "redis": + Environment.SetEnvironmentVariable("REDIS_SERVER", targetConnection); + didSetRedis = true; + break; + case "pubsub": + Environment.SetEnvironmentVariable("PUBSUB_SERVER", targetConnection); + didSetPubsub = true; + break; + case "datastore": + Environment.SetEnvironmentVariable("DATASTORE_SERVER", targetConnection); + didSetDatastore = true; + break; + } + } + if (!didSetRedis || !didSetPubsub || !didSetDatastore) + { + throw new InvalidOperationException("Could not set test environment up correctly based on containers."); + } + } + + Services = CreateWithServices(_ => { }).Services; + + _messageSink.OnMessage(new DiagnosticMessage($"Waiting for Datastore to be operational...")); + + // Wait for Datastore to be operational. + var deadline = DateTime.UtcNow.AddSeconds(30); + for (int i = 0; i < 30; i++) + { + try + { + var googleServices = Services.GetRequiredService(); + var datastore = googleServices.Build( + DatastoreClient.DefaultEndpoint, + DatastoreClient.DefaultScopes); + var txn = await datastore.BeginTransactionAsync(new BeginTransactionRequest + { + ProjectId = "local-dev", + }, new Google.Api.Gax.Grpc.CallSettings(null, Expiration.FromTimeout(TimeSpan.FromMilliseconds(100)), null, null, null, null)).ConfigureAwait(true); + await datastore.RollbackAsync(new RollbackRequest + { + ProjectId = "local-dev", + Transaction = txn.Transaction, + }, new Google.Api.Gax.Grpc.CallSettings(null, Expiration.FromTimeout(TimeSpan.FromMilliseconds(100)), null, null, null, null)).ConfigureAwait(true); + break; + } + catch (RpcException ex) when (ex.StatusCode == StatusCode.Unavailable || ex.StatusCode == StatusCode.DeadlineExceeded) + { + if (DateTime.UtcNow > deadline) + { + _messageSink.OnMessage(new DiagnosticMessage($"Ran out of time waiting for Datastore to become ready.")); + break; + } + else + { + _messageSink.OnMessage(new DiagnosticMessage($"Datastore not ready yet on attempt #{i + 1}.")); + await Task.Delay(500).ConfigureAwait(true); + continue; + } + } + } + + _messageSink.OnMessage(new DiagnosticMessage($"Starting hosted services...")); + + foreach (var hostedService in Services.GetServices()) + { + await hostedService.StartAsync(CancellationToken.None).ConfigureAwait(true); + } + + _messageSink.OnMessage(new DiagnosticMessage($"Tests are ready to execute.")); + } + + public async Task DisposeAsync() + { + _messageSink.OnMessage(new DiagnosticMessage($"Stopping hosted services...")); + + foreach (var hostedService in Services.GetServices()) + { + await hostedService.StopAsync(CancellationToken.None).ConfigureAwait(true); + } + + if (Environment.GetEnvironmentVariable("GITLAB_CI") == "true") + { + // No cleanup operations here. + } + else + { + // Shutdown containers on the local Docker instance. + var client = new DockerClientConfiguration().CreateClient(); + + // Cleanup containers if they exist. + /* + var containers = await client.Containers.ListContainersAsync(new Docker.DotNet.Models.ContainersListParameters + { + All = true, + }); + foreach (var expectedContainer in _expectedContainers) + { + var container = containers.FirstOrDefault(x => x.Names.Any(y => y.EndsWith(expectedContainer.name))); + if (container != null) + { + _messageSink.OnMessage(new DiagnosticMessage($"Stopping container: {container.ID}")); + await client.Containers.StopContainerAsync(container.ID, new ContainerStopParameters + { + WaitBeforeKillSeconds = 0, + }); + _messageSink.OnMessage(new DiagnosticMessage($"Removing container: {container.ID}")); + await client.Containers.RemoveContainerAsync(container.ID, new ContainerRemoveParameters + { + Force = true, + }); + } + } + */ + } + + _messageSink.OnMessage(new DiagnosticMessage($"Tests execution has finished.")); + } + + private class TestRepositoryHook : IGlobalRepositoryHook + { + public Task MutateEntityBeforeWrite(string @namespace, Entity entity) + { + return Task.CompletedTask; + } + + public Task PostCreate(string @namespace, T model, IModelTransaction? transaction) where T : Model, new() + { + return Task.CompletedTask; + } + + public Task PostDelete(string @namespace, T model, IModelTransaction? transaction) where T : Model, new() + { + return Task.CompletedTask; + } + + public Task PostUpdate(string @namespace, T model, IModelTransaction? transaction) where T : Model, new() + { + return Task.CompletedTask; + } + + public Task PostUpsert(string @namespace, T model, IModelTransaction? transaction) where T : Model, new() + { + return Task.CompletedTask; + } + } + } +} diff --git a/UET/Redpoint.CloudFramework.Tests.Shared/Configurator.cs b/UET/Redpoint.CloudFramework.Tests.Shared/Configurator.cs new file mode 100644 index 00000000..4733c137 --- /dev/null +++ b/UET/Redpoint.CloudFramework.Tests.Shared/Configurator.cs @@ -0,0 +1,15 @@ +namespace Redpoint.CloudFramework.Tests +{ + using Microsoft.Extensions.DependencyInjection; + using Microsoft.Extensions.Hosting; + using Redpoint.CloudFramework.Startup; + + internal class Configurator : BaseConfigurator + { + public void Configure(IHostEnvironment hostEnvironment, IServiceCollection services) + { + PreStartupConfigureServices(hostEnvironment, services); + PostStartupConfigureServices(services); + } + } +} diff --git a/UET/Redpoint.CloudFramework.Tests.Shared/ContainerConfiguration.cs b/UET/Redpoint.CloudFramework.Tests.Shared/ContainerConfiguration.cs new file mode 100644 index 00000000..b4ab33a4 --- /dev/null +++ b/UET/Redpoint.CloudFramework.Tests.Shared/ContainerConfiguration.cs @@ -0,0 +1,51 @@ +namespace Redpoint.CloudFramework.Tests +{ + using System; + using System.Collections.Generic; + + internal static class ContainerConfiguration + { + internal static readonly IReadOnlyList<(string type, string name, string image, string[] arguments, int port)> _expectedContainers = new List<(string type, string name, string image, string[] arguments, int port)> + { + ( + "redis", + $"rcftest-redis", + "redis:6.0.10", + Array.Empty(), + 6379 + ), + ( + "pubsub", + $"rcftest-pubsub", + "gcr.io/google.com/cloudsdktool/cloud-sdk:latest", + new[] { + "gcloud", + "beta", + "emulators", + "pubsub", + "start", + "--host-port=0.0.0.0:9000" + }, + 9000 + ), + ( + "datastore", + $"rcftest-datastore", + "gcr.io/google.com/cloudsdktool/cloud-sdk:latest", + new[] { + "gcloud", + "beta", + "emulators", + "datastore", + "start", + // Firestore guarantees strong consistency now, so this + // should be reasonably safe. + "--consistency=1.0", + "--host-port=0.0.0.0:9001", + "--no-store-on-disk" + }, + 9001 + ), + }; + } +} diff --git a/UET/Redpoint.CloudFramework.Tests.Shared/ICloudFrameworkTestEnvironment.cs b/UET/Redpoint.CloudFramework.Tests.Shared/ICloudFrameworkTestEnvironment.cs new file mode 100644 index 00000000..f11c4d04 --- /dev/null +++ b/UET/Redpoint.CloudFramework.Tests.Shared/ICloudFrameworkTestEnvironment.cs @@ -0,0 +1,9 @@ +namespace Redpoint.CloudFramework.Tests +{ + using System; + + public interface ICloudFrameworkTestEnvironment + { + IServiceProvider Services { get; } + } +} diff --git a/UET/Redpoint.CloudFramework.Tests.Shared/NestedCloudFrameworkTestEnvironment.cs b/UET/Redpoint.CloudFramework.Tests.Shared/NestedCloudFrameworkTestEnvironment.cs new file mode 100644 index 00000000..5de661c0 --- /dev/null +++ b/UET/Redpoint.CloudFramework.Tests.Shared/NestedCloudFrameworkTestEnvironment.cs @@ -0,0 +1,14 @@ +namespace Redpoint.CloudFramework.Tests +{ + using System; + + internal class NestedCloudFrameworkTestEnvironment : ICloudFrameworkTestEnvironment + { + public NestedCloudFrameworkTestEnvironment(IServiceProvider serviceProvider) + { + Services = serviceProvider; + } + + public IServiceProvider Services { get; } + } +} diff --git a/UET/Redpoint.CloudFramework.Tests.Shared/NullLogProgress.cs b/UET/Redpoint.CloudFramework.Tests.Shared/NullLogProgress.cs new file mode 100644 index 00000000..c1ca641c --- /dev/null +++ b/UET/Redpoint.CloudFramework.Tests.Shared/NullLogProgress.cs @@ -0,0 +1,12 @@ +namespace Redpoint.CloudFramework.Tests +{ + using Docker.DotNet.Models; + using System; + + internal class NullLogProgress : IProgress + { + public void Report(JSONMessage value) + { + } + } +} diff --git a/UET/Redpoint.CloudFramework.Tests.Shared/RealFirestoreTestEnvironment.cs b/UET/Redpoint.CloudFramework.Tests.Shared/RealFirestoreTestEnvironment.cs new file mode 100644 index 00000000..22a14e64 --- /dev/null +++ b/UET/Redpoint.CloudFramework.Tests.Shared/RealFirestoreTestEnvironment.cs @@ -0,0 +1,66 @@ +namespace Redpoint.CloudFramework.Tests +{ + using Microsoft.Extensions.Configuration; + using Microsoft.Extensions.DependencyInjection; + using Microsoft.Extensions.Hosting; + using Microsoft.Extensions.Logging; + using Redpoint.CloudFramework.Counter; + using Redpoint.CloudFramework.Locking; + using Redpoint.CloudFramework.Repository.Datastore; + using Redpoint.CloudFramework.Repository; + using Redpoint.CloudFramework.Startup; + using Redpoint.CloudFramework.Tracing; + using StackExchange.Redis; + using System; + using Redpoint.CloudFramework.Prefix; + + public static class RealFirestoreTestEnvironment + { + public static ICloudFrameworkTestEnvironment CreateWithServices( + Action servicesFactory) + { + ArgumentNullException.ThrowIfNull(servicesFactory); + + var services = new ServiceCollection(); + + if (Environment.GetEnvironmentVariable("GITLAB_CI") == "true") + { + services.AddSingleton(sp => + { + return new ConnectionMultiplexerProxy( + $"redis:6379", + sp.GetRequiredService>()); + }); + } + else + { + Environment.SetEnvironmentVariable("REDIS_SERVER", "localhost:61000"); + } + + var hostEnvironment = new TestRealFirestoreHostEnvironment(); + services.AddSingleton(hostEnvironment); + services.AddSingleton(sp => + { + return new ConfigurationBuilder().Build(); + }); + new Configurator().Configure(hostEnvironment, services); + services.AddSingleton(); + + // Add namespaced services. + services.AddScoped(); + services.AddScoped(); + services.AddScoped(); + services.AddScoped(); + services.AddSingleton(); + services.AddSingleton(); + + services.AddHttpClient(); + + servicesFactory(services); + +#pragma warning disable IL3050 // Calling members annotated with 'RequiresDynamicCodeAttribute' may break functionality when AOT compiling. + return new NestedCloudFrameworkTestEnvironment(services.BuildServiceProvider()); +#pragma warning restore IL3050 // Calling members annotated with 'RequiresDynamicCodeAttribute' may break functionality when AOT compiling. + } + } +} diff --git a/UET/Redpoint.CloudFramework.Tests.Shared/Redpoint.CloudFramework.Tests.Shared.csproj b/UET/Redpoint.CloudFramework.Tests.Shared/Redpoint.CloudFramework.Tests.Shared.csproj new file mode 100644 index 00000000..0ef9ab97 --- /dev/null +++ b/UET/Redpoint.CloudFramework.Tests.Shared/Redpoint.CloudFramework.Tests.Shared.csproj @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/UET/Redpoint.CloudFramework.Tests.Shared/TestHostEnvironment.cs b/UET/Redpoint.CloudFramework.Tests.Shared/TestHostEnvironment.cs new file mode 100644 index 00000000..11eefa7a --- /dev/null +++ b/UET/Redpoint.CloudFramework.Tests.Shared/TestHostEnvironment.cs @@ -0,0 +1,13 @@ +namespace Redpoint.CloudFramework.Tests +{ + using Microsoft.Extensions.FileProviders; + using Microsoft.Extensions.Hosting; + + internal class TestHostEnvironment : IHostEnvironment + { + public string ApplicationName { get; set; } = null!; + public IFileProvider ContentRootFileProvider { get; set; } = null!; + public string ContentRootPath { get; set; } = null!; + public string EnvironmentName { get; set; } = Environments.Development; + } +} diff --git a/UET/Redpoint.CloudFramework.Tests.Shared/TestRealFirestoreHostEnvironment.cs b/UET/Redpoint.CloudFramework.Tests.Shared/TestRealFirestoreHostEnvironment.cs new file mode 100644 index 00000000..3ca91193 --- /dev/null +++ b/UET/Redpoint.CloudFramework.Tests.Shared/TestRealFirestoreHostEnvironment.cs @@ -0,0 +1,13 @@ +namespace Redpoint.CloudFramework.Tests +{ + using Microsoft.Extensions.FileProviders; + using Microsoft.Extensions.Hosting; + + internal class TestRealFirestoreHostEnvironment : IHostEnvironment + { + public string ApplicationName { get; set; } = null!; + public IFileProvider ContentRootFileProvider { get; set; } = null!; + public string ContentRootPath { get; set; } = null!; + public string EnvironmentName { get; set; } = Environments.Production; + } +} diff --git a/UET/Redpoint.CloudFramework.Tests.Shared/TestTenantService.cs b/UET/Redpoint.CloudFramework.Tests.Shared/TestTenantService.cs new file mode 100644 index 00000000..073b1ab5 --- /dev/null +++ b/UET/Redpoint.CloudFramework.Tests.Shared/TestTenantService.cs @@ -0,0 +1,24 @@ +namespace Redpoint.CloudFramework.Tests +{ + using Google.Cloud.Datastore.V1; + using System; + using System.Threading.Tasks; + + internal class TestTenantService : ICurrentTenantService + { + private class TestTenant : ICurrentTenant + { + public string DatastoreNamespace => "test"; + } + + public Task GetTenant() + { + return Task.FromResult((ICurrentTenant)new TestTenant()); + } + + public Task GetTenantDatastoreKeyFromNamespace(string @namespace) + { + throw new NotImplementedException(); + } + } +} diff --git a/UET/Redpoint.CloudFramework.Tests/BadModel.cs b/UET/Redpoint.CloudFramework.Tests/BadModel.cs new file mode 100644 index 00000000..c155f662 --- /dev/null +++ b/UET/Redpoint.CloudFramework.Tests/BadModel.cs @@ -0,0 +1,11 @@ +using Redpoint.CloudFramework.Models; + +namespace Redpoint.CloudFramework.Tests +{ + [Kind("cf_badModel")] + public class BadModel : AttributedModel + { + [Type(FieldType.String), Indexed] + public object? badField { get; set; } + } +} diff --git a/UET/Redpoint.CloudFramework.Tests/BatchingTests.cs b/UET/Redpoint.CloudFramework.Tests/BatchingTests.cs new file mode 100644 index 00000000..6c433a07 --- /dev/null +++ b/UET/Redpoint.CloudFramework.Tests/BatchingTests.cs @@ -0,0 +1,196 @@ +namespace Redpoint.CloudFramework.Tests +{ + using Redpoint.Collections; + using Redpoint.CloudFramework.Collections.Batching; + using System.Collections.Generic; + using System.Security.Cryptography; + using System.Threading.Tasks; + using Xunit; + using Microsoft.Extensions.DependencyInjection; + using Redpoint.CloudFramework.Repository.Layers; + + [Collection("CloudFramework Test")] + public class BatchingTests + { + class SecondaryObject + { + } + + class TestObject + { + public required int Index { get; init; } + public required int KeyA { get; init; } + public required int KeyB { get; init; } + } + + class QueryableDictionary + { + private readonly Dictionary _data; + + public QueryableDictionary(Dictionary data) + { + _data = data; + } + + public async IAsyncEnumerable> LookupByKey(IAsyncEnumerable keys) + { + await Task.Yield(); + await foreach (var key in keys.ConfigureAwait(false)) + { + yield return new KeyValuePair(key, _data[key]); + } + } + } + + private readonly CloudFrameworkTestEnvironment _env; + + public const int DefaultDelayMs = 0; + + public BatchingTests(CloudFrameworkTestEnvironment env) + { + _env = env; + } + + [Fact] + public async Task BatchJoinByKeyLookupAssociatesCorrectly() + { + var secondaryA = new Dictionary(); + var secondaryB = new Dictionary(); + for (int i = 0; i < 100; i++) + { + secondaryA.Add(i, new SecondaryObject()); + secondaryB.Add(i, new SecondaryObject()); + } + + var primary = new List(); + for (int i = 0; i < 500; i++) + { + primary.Add(new TestObject + { + Index = i, + KeyA = RandomNumberGenerator.GetInt32(100), + KeyB = RandomNumberGenerator.GetInt32(100), + }); + } + + var queryableA = new QueryableDictionary(secondaryA); + var queryableB = new QueryableDictionary(secondaryB); + + var results = await primary + .BatchInto(100) + .ToAsyncEnumerable() + .AsBatchedAsyncEnumerable() + .JoinByDistinctKeyAwait( + p => p.KeyA, + queryableA.LookupByKey) + .JoinByDistinctKeyAwait( + p => p.KeyB, + queryableB.LookupByKey, + (existing, @new) => (a: existing, b: @new)) + .ToListAsync() + .ConfigureAwait(true); + + Assert.Equal(500, results.Count); + for (int i = 0; i < 500; i++) + { + Assert.NotNull(results[i].value); + Assert.NotNull(results[i].related.a); + Assert.NotNull(results[i].related.b); + Assert.Equal(i, results[i].value.Index); + Assert.Same(secondaryA[primary[i].KeyA], results[i].related.a); + Assert.Same(secondaryB[primary[i].KeyB], results[i].related.b); + } + } + + [Fact] + public async Task BatchQueryJoinOverDatastore() + { + var layer = _env.Services.GetRequiredService(); + + var secondary = await layer.CreateAsync( + string.Empty, + Enumerable.Range(0, 100) + .ToAsyncEnumerable() + .Select(x => new TestModel + { + forTest = "Secondary-BatchQueryJoinOverDatastore", + number1 = x, + }), + null, + null, + CancellationToken.None) + .ToListAsync().ConfigureAwait(true); + + var primary = await layer.CreateAsync( + string.Empty, + Enumerable.Range(0, 500) + .ToAsyncEnumerable() + .Select(x => + { + var idx = RandomNumberGenerator.GetInt32(secondary.Count); + return new TestModel + { + forTest = "Primary-BatchQueryJoinOverDatastore", + number1 = x, + number2 = idx, + keyValue = secondary[idx].Key, + }; + }), + null, + null, + CancellationToken.None) + .ToListAsync().ConfigureAwait(true); + + await DatastoreRepositoryLayerTests.HandleEventualConsistency(async () => + { + var loadedModels = await layer + .LoadAsync( + string.Empty, + primary.Select(x => x.Key).ToAsyncEnumerable(), + null, + null, + CancellationToken.None) + .Select(x => x.Value) + .WhereNotNull() + .ToArrayAsync() + .ConfigureAwait(true); + Assert.Equal(500, loadedModels.Length); + }).ConfigureAwait(true); + + var results = await layer + .LoadAsync( + string.Empty, + primary.Select(x => x.Key!).ToAsyncEnumerable(), + null, + null, + CancellationToken.None) + .JoinByDistinctKeyAwait( + p => p.Value!.keyValue!, + (keys, ct) => layer.LoadAsync( + string.Empty, + keys, + null, + null, + ct)) + .ToListAsync() + .ConfigureAwait(true); + Assert.Equal(500, results.Count); + for (int i = 0; i < 500; i++) + { + Assert.NotNull(results[i].value.Key); + Assert.NotNull(results[i].value.Value); + Assert.NotNull(results[i].related); + Assert.Equal(i, results[i].value.Value!.number1); + Assert.Equal( + secondary[(int)results[i].value.Value!.number2!.Value].Key, + results[i].value.Value!.keyValue); + Assert.Equal( + secondary[(int)results[i].value.Value!.number2!.Value].Key, + results[i].related!.Key); + Assert.Equal( + secondary[(int)results[i].value.Value!.number2!.Value].number1, + results[i].related!.number1); + } + } + } +} diff --git a/UET/Redpoint.CloudFramework.Tests/ClassificationTests.cs b/UET/Redpoint.CloudFramework.Tests/ClassificationTests.cs new file mode 100644 index 00000000..dc04c6ad --- /dev/null +++ b/UET/Redpoint.CloudFramework.Tests/ClassificationTests.cs @@ -0,0 +1,183 @@ +namespace Redpoint.CloudFramework.Tests +{ + using Redpoint.Collections; + using System.Collections.Generic; + using System.Linq; + using System.Threading; + using System.Threading.Tasks; + using Xunit; + + public class ClassificationTests + { + private static readonly int[] _sourceArray = new[] { 4, 5, 6 }; + private static readonly int[] _sourceArray2 = new[] { + 11, + 22, + 31, + 42, + 91, + 52, + 61, + 72, + 81, + }; + + [Fact] + public void CreateClassifer() + { + var inputs = _sourceArray.ToAsyncEnumerable(); + + inputs.Classify(x => x >= 5 ? "high" : "low"); + } + + [Fact] + public void ConnectClassifer() + { + var inputs = _sourceArray.ToAsyncEnumerable(); + + inputs.Classify(x => x >= 5 ? "high" : "low") + .AndForClassification("low", input => input * 10) + .AndForClassification("high", input => input * 100); + } + + [Fact] + public async Task IterateClassifier() + { + var inputs = _sourceArray.ToAsyncEnumerable(); + + var results = await inputs.Classify(x => x >= 5 ? "high" : "low") + .AndForClassification("low", input => input * 10) + .AndForClassification("high", input => input * 100) + .ToListAsync().ConfigureAwait(true); + + Assert.Contains(40, results); + Assert.Contains(500, results); + Assert.Contains(600, results); + } + + private class ObservePulledValues : IAsyncEnumerable + { + private readonly IAsyncEnumerable _input; + + public ObservePulledValues(IAsyncEnumerable input) + { + _input = input; + } + + public List PulledValues { get; } = new List(); + + public async IAsyncEnumerator GetAsyncEnumerator(CancellationToken cancellationToken = default) + { + await foreach (var value in _input.WithCancellation(cancellationToken)) + { + PulledValues.Add(value); + yield return value; + } + } + } + + [Fact] + public async Task CancellationTokenBehavesAsExpected() + { + var inputs = new ObservePulledValues(_sourceArray2.ToAsyncEnumerable()); + + await foreach (var value in inputs.ConfigureAwait(false)) + { + if (value == 91) + { + break; + } + } + + Assert.Contains(11, inputs.PulledValues); + Assert.Contains(22, inputs.PulledValues); + Assert.Contains(31, inputs.PulledValues); + Assert.Contains(42, inputs.PulledValues); + Assert.Contains(91, inputs.PulledValues); + Assert.DoesNotContain(52, inputs.PulledValues); + Assert.DoesNotContain(61, inputs.PulledValues); + Assert.DoesNotContain(72, inputs.PulledValues); + Assert.DoesNotContain(81, inputs.PulledValues); + } + + [Fact] + public async Task ClassifierOnlyPullsAlmostNeededValues() + { + var inputs = new ObservePulledValues(_sourceArray2.ToAsyncEnumerable()); + + var iteratedValues = new List(); + + await foreach (var value in + inputs.Classify(x => (x % 2 == 0) ? "2" : "1") + .AndForClassification("1", input => input * 10) + .AndForClassification("2", input => input * 100).ConfigureAwait(false)) + { + iteratedValues.Add(value); + + if (value == 910) + { + break; + } + } + + Assert.Equal(3, iteratedValues.Count); + + Assert.Contains(110, iteratedValues); + Assert.Contains(310, iteratedValues); + Assert.Contains(910, iteratedValues); + + Assert.Contains(11, inputs.PulledValues); + Assert.Contains(22, inputs.PulledValues); + Assert.Contains(31, inputs.PulledValues); + Assert.Contains(42, inputs.PulledValues); + Assert.Contains(91, inputs.PulledValues); + Assert.DoesNotContain(52, inputs.PulledValues); + Assert.DoesNotContain(61, inputs.PulledValues); + Assert.DoesNotContain(72, inputs.PulledValues); + Assert.DoesNotContain(81, inputs.PulledValues); + } + + [Fact] + public async Task ClassifierReturnsOtherValuesWithSlowProcessor() + { + var inputs = new ObservePulledValues(_sourceArray2.ToAsyncEnumerable()); + + var iteratedValues = new List(); + + await foreach (var value in + inputs.Classify(x => (x % 2 == 0) ? "2" : "1") + .AndForClassificationAwait("1", async input => { await Task.Delay(10).ConfigureAwait(true); return input * 10; }) + .AndForClassification("2", input => input * 100).ConfigureAwait(false)) + { + iteratedValues.Add(value); + + if (value == 910) + { + break; + } + } + + Assert.Equal(7, iteratedValues.Count); + + Assert.Contains(2200, iteratedValues); + Assert.Contains(4200, iteratedValues); + Assert.Contains(5200, iteratedValues); + Assert.Contains(7200, iteratedValues); + Assert.Contains(110, iteratedValues); + Assert.Contains(310, iteratedValues); + Assert.Contains(910, iteratedValues); + Assert.DoesNotContain(610, iteratedValues); + Assert.DoesNotContain(810, iteratedValues); + + Assert.Contains(11, inputs.PulledValues); + Assert.Contains(22, inputs.PulledValues); + Assert.Contains(31, inputs.PulledValues); + Assert.Contains(42, inputs.PulledValues); + Assert.Contains(91, inputs.PulledValues); + Assert.Contains(52, inputs.PulledValues); + Assert.Contains(61, inputs.PulledValues); + Assert.Contains(72, inputs.PulledValues); + Assert.Contains(81, inputs.PulledValues); + } + } +} diff --git a/UET/Redpoint.CloudFramework.Tests/CloudFrameworkCollectionFixture.cs b/UET/Redpoint.CloudFramework.Tests/CloudFrameworkCollectionFixture.cs new file mode 100644 index 00000000..e00ea8e5 --- /dev/null +++ b/UET/Redpoint.CloudFramework.Tests/CloudFrameworkCollectionFixture.cs @@ -0,0 +1,9 @@ +namespace Redpoint.CloudFramework.Tests +{ + using Xunit; + + [CollectionDefinition("CloudFramework Test")] + public class CloudFrameworkCollectionFixture : ICollectionFixture + { + } +} diff --git a/UET/Redpoint.CloudFramework.Tests/DataProtectorTests.cs b/UET/Redpoint.CloudFramework.Tests/DataProtectorTests.cs new file mode 100644 index 00000000..5655d17a --- /dev/null +++ b/UET/Redpoint.CloudFramework.Tests/DataProtectorTests.cs @@ -0,0 +1,60 @@ +namespace Redpoint.CloudFramework.Tests +{ + using Microsoft.Extensions.Configuration; + using Microsoft.Extensions.Primitives; + using Redpoint.CloudFramework.DataProtection; + using System; + using System.Collections.Generic; + using System.Text; + using Xunit; + + public class DataProtectorTests + { + internal class MockConfiguration : IConfiguration + { + public string? this[string key] + { + get + { + switch (key) + { + case "CloudFramework:Security:AES:Key": + return "/kiievoYGVVUONHYJBwUhjjQjgwUkhRpGF6F/luR7YY="; + case "CloudFramework:Security:AES:IV": + return "keqOqvOgSbQU1/cPjFM9FA=="; + default: + return null; + } + } + set => throw new NotImplementedException(); + } + + public IEnumerable GetChildren() + { + throw new NotImplementedException(); + } + + public IChangeToken GetReloadToken() + { + throw new NotImplementedException(); + } + + public IConfigurationSection GetSection(string key) + { + throw new NotImplementedException(); + } + } + + [Fact] + public void TestDecryption() + { + var protector1 = new StaticDataProtector(null!, new MockConfiguration(), null!); + var protector2 = new StaticDataProtector(null!, new MockConfiguration(), null!); + + var originalValue = "Hello World"; + var encryptedValue = protector1.Protect(Encoding.ASCII.GetBytes(originalValue)); + var decryptedValue = Encoding.ASCII.GetString(protector2.Unprotect(encryptedValue)); + Assert.Equal(originalValue, decryptedValue); + } + } +} diff --git a/UET/Redpoint.CloudFramework.Tests/DatastoreRepositoryLayerTests.cs b/UET/Redpoint.CloudFramework.Tests/DatastoreRepositoryLayerTests.cs new file mode 100644 index 00000000..d313463f --- /dev/null +++ b/UET/Redpoint.CloudFramework.Tests/DatastoreRepositoryLayerTests.cs @@ -0,0 +1,2599 @@ +using Google.Cloud.Datastore.V1; +using Google.Protobuf; +using Google.Protobuf.WellKnownTypes; +using Google.Type; +using Grpc.Core; +using Microsoft.Extensions.DependencyInjection; +using NodaTime; +using Redpoint.CloudFramework.Repository; +using Redpoint.CloudFramework.Repository.Layers; +using Xunit; +using Xunit.Sdk; +using static Google.Cloud.Datastore.V1.Key.Types; +using Value = Google.Cloud.Datastore.V1.Value; + +namespace Redpoint.CloudFramework.Tests +{ + [Collection("CloudFramework Test")] + public class DatastoreRepositoryLayerTests + { + private readonly CloudFrameworkTestEnvironment _env; + + public const int DefaultDelayMs = 0; + + public DatastoreRepositoryLayerTests(CloudFrameworkTestEnvironment env) + { + _env = env; + } + + internal static async Task HandleEventualConsistency(Func task) + { + for (int i = 0; i < 20; i++) + { + try + { + await task().ConfigureAwait(true); + return; + } + catch (XunitException) + { + await Task.Delay(100).ConfigureAwait(true); + } + } + + await task().ConfigureAwait(true); + } + + [Fact] + public async Task TestCreate() + { + var model = new TestModel + { + forTest = "TestCreate", + string1 = "test", + number1 = 10, + number2 = 20, + timestamp = SystemClock.Instance.GetCurrentInstant(), + }; + + var layer = _env.Services.GetRequiredService(); + + var returnedModel = await layer.CreateAsync(string.Empty, new[] { model }.ToAsyncEnumerable(), null, null, CancellationToken.None).FirstAsync().ConfigureAwait(true); + + Assert.NotNull(model.Key); + Assert.NotNull(returnedModel.Key); + Assert.Equal(model.Key, returnedModel.Key); + Assert.Equal(model, returnedModel); + } + + [Fact] + public async Task TestCreateFiresEntityModificationEvent() + { + var model = new TestModel + { + forTest = "TestCreateFiresEntityModificationEvent", + string1 = "test", + number1 = 10, + number2 = 20, + timestamp = SystemClock.Instance.GetCurrentInstant(), + }; + + var layer = _env.Services.GetRequiredService(); + + Key[]? modifiedKeys = null; + layer.OnNonTransactionalEntitiesModified.Add((ev, cancellationToken) => + { + modifiedKeys = ev.Keys; + return Task.CompletedTask; + }); + + var returnedModel = await layer.CreateAsync(string.Empty, new[] { model }.ToAsyncEnumerable(), null, null, CancellationToken.None).FirstAsync().ConfigureAwait(true); + + Assert.NotNull(model.Key); + Assert.NotNull(returnedModel.Key); + Assert.Equal(model.Key, returnedModel.Key); + Assert.Equal(model, returnedModel); + Assert.NotNull(modifiedKeys); + Assert.Contains(model.Key, modifiedKeys); + } + + [Fact] + public async Task TestCreateNullThrowsException() + { + var layer = _env.Services.GetRequiredService(); + + await Assert.ThrowsAsync(async () => + { + await layer.CreateAsync(string.Empty, new TestModel[] { null! }.ToAsyncEnumerable(), null, null, CancellationToken.None).FirstAsync().ConfigureAwait(true); + }).ConfigureAwait(true); + } + + [Fact] + public async Task TestUpsertNullThrowsException() + { + var layer = _env.Services.GetRequiredService(); + + await Assert.ThrowsAsync(async () => + { + await layer.UpsertAsync(string.Empty, new TestModel[] { null! }.ToAsyncEnumerable(), null, null, CancellationToken.None).FirstAsync().ConfigureAwait(true); + }).ConfigureAwait(true); + } + + [Fact] + public async Task TestUpdateNullThrowsException() + { + var layer = _env.Services.GetRequiredService(); + + await Assert.ThrowsAsync(async () => + { + await layer.UpdateAsync(string.Empty, new TestModel[] { null! }.ToAsyncEnumerable(), null, null, CancellationToken.None).FirstAsync().ConfigureAwait(true); + }).ConfigureAwait(true); + } + + [Fact] + public async Task TestDeleteNullThrowsException() + { + var layer = _env.Services.GetRequiredService(); + + await Assert.ThrowsAsync(async () => + { + await layer.DeleteAsync(string.Empty, new TestModel[] { null! }.ToAsyncEnumerable(), null, null, CancellationToken.None).ConfigureAwait(true); + }).ConfigureAwait(true); + } + + [Fact] + public async Task TestDeleteSameKeyDoesNotThrow() + { + var model = new TestModel + { + forTest = "TestDeleteSameKeyDoesNotThrow", + string1 = "test", + number1 = 10, + number2 = 20, + timestamp = SystemClock.Instance.GetCurrentInstant(), + }; + + var layer = _env.Services.GetRequiredService(); + + var returnedModel = await layer.CreateAsync(string.Empty, new[] { model }.ToAsyncEnumerable(), null, null, CancellationToken.None).FirstAsync().ConfigureAwait(true); + + await DatastoreRepositoryLayerTests.HandleEventualConsistency(async () => + { + Assert.NotNull(await layer.LoadAsync(string.Empty, returnedModel.Key, null, null, CancellationToken.None).ConfigureAwait(true)); + }).ConfigureAwait(true); + + // This should not throw. + await layer.DeleteAsync(string.Empty, new[] { model, model }.ToAsyncEnumerable(), null, null, CancellationToken.None).ConfigureAwait(true); + } + + [Fact] + public void TestIncorrectEventAssignmentFiresException() + { + var layer = _env.Services.GetRequiredService(); + + Assert.Throws(() => + { + layer.OnNonTransactionalEntitiesModified.Add(null!); + }); + } + + [Fact] + public async Task TestCreateMultiple() + { + var models = new[] + { + new TestModel + { + forTest = "TestCreateMultiple", + string1 = "test1", + number1 = 10, + number2 = 20, + timestamp = SystemClock.Instance.GetCurrentInstant(), + }, + new TestModel + { + forTest = "TestCreateMultiple", + string1 = "test2", + number1 = 11, + number2 = 22, + timestamp = SystemClock.Instance.GetCurrentInstant(), + }, + }; + + var layer = _env.Services.GetRequiredService(); + + var returnedModels = await layer.CreateAsync(string.Empty, models.ToAsyncEnumerable(), null, null, CancellationToken.None).ToArrayAsync().ConfigureAwait(true); + + Assert.All(returnedModels, x => + { + Assert.NotNull(x.Key); + }); + Assert.Equal(models, returnedModels); + } + + [Fact] + public async Task TestCreateThenQuery() + { + var instant = SystemClock.Instance.GetCurrentInstant(); + + var model = new TestModel + { + forTest = "TestCreateThenQuery", + string1 = "test", + number1 = 10, + number2 = 20, + timestamp = instant, + }; + + var layer = _env.Services.GetRequiredService(); + + await layer.CreateAsync(string.Empty, new[] { model }.ToAsyncEnumerable(), null, null, CancellationToken.None).FirstAsync().ConfigureAwait(true); + + await DatastoreRepositoryLayerTests.HandleEventualConsistency(async () => + { + var result = await layer.QueryAsync( + string.Empty, + x => x.timestamp == instant && x.forTest == "TestCreateThenQuery", + null, + 1, + null, + null, + CancellationToken.None).FirstOrDefaultAsync().ConfigureAwait(true); + + Assert.NotNull(result); + Assert.Equal(result.Key, model.Key); + }).ConfigureAwait(true); + } + + [Fact] + public async Task TestCreateFailsWithDuplicate() + { + var firstModel = new TestModel + { + forTest = "TestCreateThenQuery", + string1 = "test", + number1 = 10, + number2 = 20, + timestamp = SystemClock.Instance.GetCurrentInstant(), + }; + + var layer = _env.Services.GetRequiredService(); + + await layer.CreateAsync(string.Empty, new[] { firstModel }.ToAsyncEnumerable(), null, null, CancellationToken.None).FirstAsync().ConfigureAwait(true); + + await DatastoreRepositoryLayerTests.HandleEventualConsistency(async () => + { + var secondModel = new TestModel + { + Key = firstModel.Key, + forTest = "TestCreateThenQuery", + string1 = "test1", + number1 = 11, + number2 = 22, + timestamp = SystemClock.Instance.GetCurrentInstant(), + }; + + var ex = await Assert.ThrowsAsync(async () => + { + await layer.CreateAsync(string.Empty, new[] { secondModel }.ToAsyncEnumerable(), null, null, CancellationToken.None).FirstAsync().ConfigureAwait(true); + }).ConfigureAwait(true); + Assert.Equal(StatusCode.AlreadyExists, ex.StatusCode); + }).ConfigureAwait(true); + } + + [Fact] + public async Task TestUpsert() + { + var firstModel = new TestModel + { + forTest = "TestCreateThenQuery", + string1 = "test", + number1 = 10, + number2 = 20, + timestamp = SystemClock.Instance.GetCurrentInstant(), + }; + + var layer = _env.Services.GetRequiredService(); + + var returnedModel = await layer.UpsertAsync(string.Empty, new[] { firstModel }.ToAsyncEnumerable(), null, null, CancellationToken.None).FirstAsync().ConfigureAwait(true); + + Assert.Equal(firstModel, returnedModel); + Assert.NotNull(firstModel.Key); + Assert.NotNull(returnedModel.Key); + } + + [Fact] + public async Task TestUpsertDuplicate() + { + var firstModel = new TestModel + { + forTest = "TestCreateThenQuery", + string1 = "test", + number1 = 10, + number2 = 20, + timestamp = SystemClock.Instance.GetCurrentInstant(), + }; + + var layer = _env.Services.GetRequiredService(); + + await layer.CreateAsync(string.Empty, new[] { firstModel }.ToAsyncEnumerable(), null, null, CancellationToken.None).FirstAsync().ConfigureAwait(true); + + await DatastoreRepositoryLayerTests.HandleEventualConsistency(async () => + { + var secondModel = new TestModel + { + Key = firstModel.Key, + forTest = "TestCreateThenQuery", + string1 = "test1", + number1 = 11, + number2 = 22, + timestamp = SystemClock.Instance.GetCurrentInstant(), + }; + + await layer.UpsertAsync(string.Empty, new[] { secondModel }.ToAsyncEnumerable(), null, null, CancellationToken.None).FirstAsync().ConfigureAwait(true); + }).ConfigureAwait(true); + } + + [Fact] + public async Task TestUpsertDuplicateThenQuery() + { + var firstModel = new TestModel + { + forTest = "TestCreateThenQuery", + string1 = "test", + number1 = 10, + number2 = 20, + timestamp = SystemClock.Instance.GetCurrentInstant(), + }; + + var layer = _env.Services.GetRequiredService(); + + await layer.CreateAsync(string.Empty, new[] { firstModel }.ToAsyncEnumerable(), null, null, CancellationToken.None).FirstAsync().ConfigureAwait(true); + + var secondInstant = SystemClock.Instance.GetCurrentInstant(); + + var secondModel = new TestModel + { + Key = firstModel.Key, + forTest = "TestCreateThenQuery", + string1 = "test1", + number1 = 11, + number2 = 22, + timestamp = secondInstant, + }; + + await layer.UpsertAsync(string.Empty, new[] { secondModel }.ToAsyncEnumerable(), null, null, CancellationToken.None).FirstAsync().ConfigureAwait(true); + + await DatastoreRepositoryLayerTests.HandleEventualConsistency(async () => + { + var result = await layer.QueryAsync( + string.Empty, + x => + x.forTest == "TestCreateThenQuery" && + x.string1 == "test1" && + x.number1 == 11 && + x.number2 == 22 && + x.timestamp == secondInstant, + null, + 1, + null, + null, + CancellationToken.None).FirstOrDefaultAsync().ConfigureAwait(true); + + Assert.NotNull(result); + Assert.Equal(result.Key, secondModel.Key); + }).ConfigureAwait(true); + } + + [Theory] + [InlineData(9, 2)] + [InlineData(10, 1)] + [InlineData(11, 0)] + [InlineData(12, 0)] + public async Task TestQueryGreaterThan(int threshold, int count) + { + var instant = SystemClock.Instance.GetCurrentInstant(); + + var models = new[] + { + new TestModel + { + forTest = "TestQueryGreaterThan", + string1 = "test1", + number1 = 10, + number2 = 20, + timestamp = instant, + }, + new TestModel + { + forTest = "TestQueryGreaterThan", + string1 = "test2", + number1 = 11, + number2 = 22, + timestamp = instant, + }, + }; + + var layer = _env.Services.GetRequiredService(); + + await layer.CreateAsync(string.Empty, models.ToAsyncEnumerable(), null, null, CancellationToken.None).ToArrayAsync().ConfigureAwait(true); + + await DatastoreRepositoryLayerTests.HandleEventualConsistency(async () => + { + Assert.Equal(count, await layer.QueryAsync( + string.Empty, + x => + x.forTest == "TestQueryGreaterThan" && + x.timestamp == instant && + x.number1 > threshold, + null, + null, + null, + null, + CancellationToken.None).CountAsync().ConfigureAwait(true)); + }).ConfigureAwait(true); + } + + [Theory] + [InlineData(9, 2)] + [InlineData(10, 2)] + [InlineData(11, 1)] + [InlineData(12, 0)] + public async Task TestQueryGreaterThanOrEqualTo(int threshold, int count) + { + var instant = SystemClock.Instance.GetCurrentInstant(); + + var models = new[] + { + new TestModel + { + forTest = "TestQueryGreaterThan", + string1 = "test1", + number1 = 10, + number2 = 20, + timestamp = instant, + }, + new TestModel + { + forTest = "TestQueryGreaterThan", + string1 = "test2", + number1 = 11, + number2 = 22, + timestamp = instant, + }, + }; + + var layer = _env.Services.GetRequiredService(); + + await layer.CreateAsync(string.Empty, models.ToAsyncEnumerable(), null, null, CancellationToken.None).ToArrayAsync().ConfigureAwait(true); + + await DatastoreRepositoryLayerTests.HandleEventualConsistency(async () => + { + Assert.Equal(count, await layer.QueryAsync( + string.Empty, + x => + x.forTest == "TestQueryGreaterThan" && + x.timestamp == instant && + x.number1 >= threshold, + null, + null, + null, + null, + CancellationToken.None).CountAsync().ConfigureAwait(true)); + }).ConfigureAwait(true); + } + + [Theory] + [InlineData(9, 0)] + [InlineData(10, 0)] + [InlineData(11, 1)] + [InlineData(12, 2)] + public async Task TestQueryLessThan(int threshold, int count) + { + var instant = SystemClock.Instance.GetCurrentInstant(); + + var models = new[] + { + new TestModel + { + forTest = "TestQueryGreaterThan", + string1 = "test1", + number1 = 10, + number2 = 20, + timestamp = instant, + }, + new TestModel + { + forTest = "TestQueryGreaterThan", + string1 = "test2", + number1 = 11, + number2 = 22, + timestamp = instant, + }, + }; + + var layer = _env.Services.GetRequiredService(); + + await layer.CreateAsync(string.Empty, models.ToAsyncEnumerable(), null, null, CancellationToken.None).ToArrayAsync().ConfigureAwait(true); + + await DatastoreRepositoryLayerTests.HandleEventualConsistency(async () => + { + Assert.Equal(count, await layer.QueryAsync( + string.Empty, + x => + x.forTest == "TestQueryGreaterThan" && + x.timestamp == instant && + x.number1 < threshold, + null, + null, + null, + null, + CancellationToken.None).CountAsync().ConfigureAwait(true)); + }).ConfigureAwait(true); + } + + [Theory] + [InlineData(9, 0)] + [InlineData(10, 1)] + [InlineData(11, 2)] + [InlineData(12, 2)] + public async Task TestQueryLessThanOrEqualTo(int threshold, int count) + { + var instant = SystemClock.Instance.GetCurrentInstant(); + + var models = new[] + { + new TestModel + { + forTest = "TestQueryGreaterThan", + string1 = "test1", + number1 = 10, + number2 = 20, + timestamp = instant, + }, + new TestModel + { + forTest = "TestQueryGreaterThan", + string1 = "test2", + number1 = 11, + number2 = 22, + timestamp = instant, + }, + }; + + var layer = _env.Services.GetRequiredService(); + + await layer.CreateAsync(string.Empty, models.ToAsyncEnumerable(), null, null, CancellationToken.None).ToArrayAsync().ConfigureAwait(true); + + await DatastoreRepositoryLayerTests.HandleEventualConsistency(async () => + { + Assert.Equal(count, await layer.QueryAsync( + string.Empty, + x => + x.forTest == "TestQueryGreaterThan" && + x.timestamp == instant && + x.number1 <= threshold, + null, + null, + null, + null, + CancellationToken.None).CountAsync().ConfigureAwait(true)); + }).ConfigureAwait(true); + } + + [Fact] + public async Task TestQueryHasAncestor() + { + var instant = SystemClock.Instance.GetCurrentInstant(); + + var layer = _env.Services.GetRequiredService(); + + var factory = await layer.GetKeyFactoryAsync(string.Empty, null, CancellationToken.None).ConfigureAwait(true); + var parentKey = await layer.AllocateKeyAsync(string.Empty, null, null, CancellationToken.None).ConfigureAwait(true); + var childKey = parentKey.WithElement(new TestModel().GetKind(), "child"); + + var models = new[] + { + new TestModel + { + Key = parentKey, + forTest = "TestQueryHasAncestor", + string1 = "parent", + number1 = 10, + number2 = 20, + timestamp = instant, + }, + new TestModel + { + Key = childKey, + forTest = "TestQueryHasAncestor", + string1 = "child", + number1 = 11, + number2 = 22, + timestamp = instant, + } + }; + + await layer.CreateAsync(string.Empty, models.ToAsyncEnumerable(), null, null, CancellationToken.None).ToArrayAsync().ConfigureAwait(true); + + await DatastoreRepositoryLayerTests.HandleEventualConsistency(async () => + { + Assert.Equal(1, await layer.QueryAsync( + string.Empty, + x => + x.Key.HasAncestor(parentKey) && + x.forTest == "TestQueryHasAncestor" && + x.timestamp == instant && + x.number1 == 11 && + x.number2 == 22, + null, + null, + null, + null, + CancellationToken.None).CountAsync().ConfigureAwait(true)); + }).ConfigureAwait(true); + } + + [Fact] + public async Task TestQueryNull() + { + var instant = SystemClock.Instance.GetCurrentInstant(); + + var model = new TestModel + { + forTest = "TestQueryNull", + string1 = null, + number1 = 10, + number2 = 20, + timestamp = instant, + }; + + var layer = _env.Services.GetRequiredService(); + + await layer.CreateAsync(string.Empty, new[] { model }.ToAsyncEnumerable(), null, null, CancellationToken.None).FirstAsync().ConfigureAwait(true); + + await DatastoreRepositoryLayerTests.HandleEventualConsistency(async () => + { + var result = await layer.QueryAsync( + string.Empty, + x => x.timestamp == instant && x.forTest == "TestQueryNull" && x.string1 == null, + null, + 1, + null, + null, + CancellationToken.None).FirstOrDefaultAsync().ConfigureAwait(true); + + Assert.NotNull(result); + Assert.Equal(result.Key, model.Key); + }).ConfigureAwait(true); + } + + [Fact] + public async Task TestQueryEverything() + { + var layer = _env.Services.GetRequiredService(); + + // Just make sure this doesn't throw an exception; there's no state we can + // check against when doing an "everything" query. + await layer.QueryAsync( + string.Empty, + x => true, + null, + 1, + null, + null, + CancellationToken.None).FirstOrDefaultAsync().ConfigureAwait(true); + } + + [Fact] + public async Task TestInvalidQueriesAreCaught() + { + var layer = _env.Services.GetRequiredService(); + + await Assert.ThrowsAsync(async () => + { + await layer.QueryAsync( + string.Empty, + x => x.Key == null, + null, + null, + null, + null, + CancellationToken.None).CountAsync().ConfigureAwait(true); + }).ConfigureAwait(true); + await Assert.ThrowsAsync(async () => + { + await layer.QueryAsync( + string.Empty, + x => x.keyValue!.HasAncestor(null), + null, + null, + null, + null, + CancellationToken.None).CountAsync().ConfigureAwait(true); + }).ConfigureAwait(true); + await Assert.ThrowsAsync(async () => + { + await layer.QueryAsync( + string.Empty, + x => ((TestModel)null!).Key.HasAncestor(null), + null, + null, + null, + null, + CancellationToken.None).CountAsync().ConfigureAwait(true); + }).ConfigureAwait(true); + await Assert.ThrowsAsync(async () => + { + await layer.QueryAsync( + string.Empty, + x => x.keyValue!.IsRoot, + null, + null, + null, + null, + CancellationToken.None).CountAsync().ConfigureAwait(true); + }).ConfigureAwait(true); + await Assert.ThrowsAsync(async () => + { + await layer.QueryAsync( + string.Empty, + x => x.keyValue!.Equals(null), + null, + null, + null, + null, + CancellationToken.None).CountAsync().ConfigureAwait(true); + }).ConfigureAwait(true); + await Assert.ThrowsAsync(async () => + { + await layer.QueryAsync( + string.Empty, + x => ((Key)null!).HasAncestor(null), + null, + null, + null, + null, + CancellationToken.None).CountAsync().ConfigureAwait(true); + }).ConfigureAwait(true); + await Assert.ThrowsAsync(async () => + { + await layer.QueryAsync( + string.Empty, + x => x.number1 != 20, + null, + null, + null, + null, + CancellationToken.None).CountAsync().ConfigureAwait(true); + }).ConfigureAwait(true); + await Assert.ThrowsAsync(async () => + { + await layer.QueryAsync( + string.Empty, + x => ((TestModel)null!).number1 == 20, + null, + null, + null, + null, + CancellationToken.None).CountAsync().ConfigureAwait(true); + }).ConfigureAwait(true); + await Assert.ThrowsAsync(async () => + { + await layer.QueryAsync( + string.Empty, + x => false, + null, + null, + null, + null, + CancellationToken.None).CountAsync().ConfigureAwait(true); + }).ConfigureAwait(true); + await Assert.ThrowsAsync(async () => + { + await layer.QueryAsync( + string.Empty, + x => false, + null, + null, + null, + null, + CancellationToken.None).CountAsync().ConfigureAwait(true); + }).ConfigureAwait(true); + await Assert.ThrowsAsync(async () => + { + await layer.QueryAsync( + string.Empty, + x => x.untracked == null, + null, + null, + null, + null, + CancellationToken.None).CountAsync().ConfigureAwait(true); + }).ConfigureAwait(true); + await Assert.ThrowsAsync(async () => + { + await layer.QueryAsync( + string.Empty, + x => x.GetKind() == null, + null, + null, + null, + null, + CancellationToken.None).CountAsync().ConfigureAwait(true); + }).ConfigureAwait(true); + await Assert.ThrowsAsync(async () => + { + await layer.QueryAsync( + string.Empty, + x => x.badField == new object(), + null, + null, + null, + null, + CancellationToken.None).CountAsync().ConfigureAwait(true); + }).ConfigureAwait(true); + await Assert.ThrowsAsync(async () => + { + await layer.QueryAsync( + string.Empty, + null!, + null, + null, + null, + null, + CancellationToken.None).CountAsync().ConfigureAwait(true); + }).ConfigureAwait(true); + await Assert.ThrowsAsync(async () => + { + await layer.QueryAsync( + string.Empty, + x => x.string1 == null, + x => true, + null, + null, + null, + CancellationToken.None).CountAsync().ConfigureAwait(true); + }).ConfigureAwait(true); + await Assert.ThrowsAsync(async () => + { + await layer.QueryAsync( + string.Empty, + x => x.string1 == null, + x => x.number1 > x.number2, + null, + null, + null, + CancellationToken.None).CountAsync().ConfigureAwait(true); + }).ConfigureAwait(true); + } + + [Fact] + public async Task TestQueryOrdering() + { + var instant = SystemClock.Instance.GetCurrentInstant(); + + var models = new[] + { + new TestModel + { + forTest = "TestQueryOrdering", + string1 = "test1", + number1 = 10, + number2 = 20, + timestamp = instant, + }, + new TestModel + { + forTest = "TestQueryOrdering", + string1 = "test2", + number1 = 10, + number2 = 21, + timestamp = instant, + }, + new TestModel + { + forTest = "TestQueryOrdering", + string1 = "test3", + number1 = 11, + number2 = 22, + timestamp = instant, + }, + }; + + var layer = _env.Services.GetRequiredService(); + + var returnedModels = await layer.CreateAsync(string.Empty, models.ToAsyncEnumerable(), null, null, CancellationToken.None).ToArrayAsync().ConfigureAwait(true); + + await DatastoreRepositoryLayerTests.HandleEventualConsistency(async () => + { + var result = await layer.QueryAsync( + string.Empty, + x => + x.timestamp == instant && + x.forTest == "TestQueryOrdering", + x => x.number1 < x.number1 | x.number2 > x.number2, + 3, + null, + null, + CancellationToken.None).ToArrayAsync().ConfigureAwait(true); + + Assert.Equal(3, result.Length); + Assert.Equal("test2", result[0].string1); + Assert.Equal("test1", result[1].string1); + Assert.Equal("test3", result[2].string1); + }).ConfigureAwait(true); + } + + [Fact] + public async Task TestTransactionalQuery() + { + var instant = SystemClock.Instance.GetCurrentInstant(); + + var layer = _env.Services.GetRequiredService(); + + var factory = await layer.GetKeyFactoryAsync(string.Empty, null, CancellationToken.None).ConfigureAwait(true); + var parentKey = await layer.AllocateKeyAsync(string.Empty, null, null, CancellationToken.None).ConfigureAwait(true); + var childKey = parentKey.WithElement(new PathElement { Kind = new TestModel().GetKind() }); + + var transaction = await layer.BeginTransactionAsync(string.Empty, Repository.Transaction.TransactionMode.ReadWrite, null, CancellationToken.None).ConfigureAwait(true); + + var result = await layer.QueryAsync( + string.Empty, + x => + x.Key.HasAncestor(parentKey) && + x.timestamp == instant && + x.forTest == "TestTransactionalQuery", + null, + null, + transaction, + null, + CancellationToken.None).FirstOrDefaultAsync().ConfigureAwait(true); + + Assert.Null(result); + + var model = new TestModel + { + Key = childKey, + forTest = "TestTransactionalQuery", + timestamp = instant, + }; + + var createdModel = await layer.CreateAsync(string.Empty, new[] { model }.ToAsyncEnumerable(), transaction, null, CancellationToken.None).FirstOrDefaultAsync().ConfigureAwait(true); + + Assert.NotNull(createdModel); + Assert.Equal(model, createdModel); + Assert.NotNull(model.Key); + Assert.NotNull(createdModel.Key); + + await layer.CommitAsync(string.Empty, transaction, null, CancellationToken.None).ConfigureAwait(true); + } + + [Fact] + public async Task TestTransactionCreate() + { + var layer = _env.Services.GetRequiredService(); + + var transaction = await layer.BeginTransactionAsync(string.Empty, Repository.Transaction.TransactionMode.ReadWrite, null, CancellationToken.None).ConfigureAwait(true); + + var firstModel = new TestModel + { + forTest = "TestTransactionCreate", + string1 = "test", + number1 = 10, + number2 = 20, + timestamp = SystemClock.Instance.GetCurrentInstant(), + }; + + var returnedModel = await layer.UpsertAsync(string.Empty, new[] { firstModel }.ToAsyncEnumerable(), transaction, null, CancellationToken.None).FirstAsync().ConfigureAwait(true); + + Assert.Equal(firstModel, returnedModel); + Assert.NotNull(firstModel.Key); + Assert.NotNull(returnedModel.Key); + + await layer.CommitAsync(string.Empty, transaction, null, CancellationToken.None).ConfigureAwait(true); + + await DatastoreRepositoryLayerTests.HandleEventualConsistency(async () => + { + Assert.NotNull(await layer.LoadAsync(string.Empty, firstModel.Key, null, null, CancellationToken.None).ConfigureAwait(true)); + }).ConfigureAwait(true); + } + + [Fact] + public async Task TestTransactionCreateWithRollback() + { + var layer = _env.Services.GetRequiredService(); + + var transaction = await layer.BeginTransactionAsync(string.Empty, Repository.Transaction.TransactionMode.ReadWrite, null, CancellationToken.None).ConfigureAwait(true); + + var firstModel = new TestModel + { + forTest = "TestTransactionCreateWithRollback", + string1 = "test", + number1 = 10, + number2 = 20, + timestamp = SystemClock.Instance.GetCurrentInstant(), + }; + + var returnedModel = await layer.UpsertAsync(string.Empty, new[] { firstModel }.ToAsyncEnumerable(), transaction, null, CancellationToken.None).FirstAsync().ConfigureAwait(true); + + Assert.Equal(firstModel, returnedModel); + Assert.NotNull(firstModel.Key); + Assert.NotNull(returnedModel.Key); + + await layer.RollbackAsync(string.Empty, transaction, null, CancellationToken.None).ConfigureAwait(true); + + Assert.Null(await layer.LoadAsync(string.Empty, firstModel.Key, null, null, CancellationToken.None).ConfigureAwait(true)); + } + + [Fact] + public async Task TestTransactionUpsert() + { + var layer = _env.Services.GetRequiredService(); + + var transaction = await layer.BeginTransactionAsync(string.Empty, Repository.Transaction.TransactionMode.ReadWrite, null, CancellationToken.None).ConfigureAwait(true); + + var firstModel = new TestModel + { + forTest = "TestTransactionUpsert", + string1 = "test", + number1 = 10, + number2 = 20, + timestamp = SystemClock.Instance.GetCurrentInstant(), + }; + + var returnedModel = await layer.UpsertAsync(string.Empty, new[] { firstModel }.ToAsyncEnumerable(), transaction, null, CancellationToken.None).FirstAsync().ConfigureAwait(true); + + Assert.Equal(firstModel, returnedModel); + Assert.NotNull(firstModel.Key); + Assert.NotNull(returnedModel.Key); + + await layer.CommitAsync(string.Empty, transaction, null, CancellationToken.None).ConfigureAwait(true); + + await DatastoreRepositoryLayerTests.HandleEventualConsistency(async () => + { + Assert.NotNull(await layer.LoadAsync(string.Empty, firstModel.Key, null, null, CancellationToken.None).ConfigureAwait(true)); + }).ConfigureAwait(true); + } + + [Theory] + [InlineData(10, "a")] + [InlineData(20, "b")] + public async Task TestTransactionalUpdate(int value, string expectedValue) + { + var layer = _env.Services.GetRequiredService(); + + var firstModel = new TestModel + { + forTest = "TestTransactionalUpdate", + string1 = "", + number1 = value, + number2 = 20, + timestamp = SystemClock.Instance.GetCurrentInstant(), + }; + + await layer.CreateAsync(string.Empty, new[] { firstModel }.ToAsyncEnumerable(), null, null, CancellationToken.None).FirstAsync().ConfigureAwait(true); + + await DatastoreRepositoryLayerTests.HandleEventualConsistency(async () => + { + Assert.NotNull(await layer.LoadAsync(string.Empty, firstModel.Key, null, null, CancellationToken.None).ConfigureAwait(true)); + }).ConfigureAwait(true); + + var transaction = await layer.BeginTransactionAsync(string.Empty, Repository.Transaction.TransactionMode.ReadWrite, null, CancellationToken.None).ConfigureAwait(true); + + var loadedModel = await layer.LoadAsync(string.Empty, firstModel.Key, transaction, null, CancellationToken.None).ConfigureAwait(true); + + loadedModel!.string1 = loadedModel.number1 == 10 ? "a" : "b"; + + await layer.UpdateAsync(string.Empty, new[] { loadedModel }.ToAsyncEnumerable(), transaction, null, CancellationToken.None).ToArrayAsync().ConfigureAwait(true); + + var oldModel = await layer.LoadAsync(string.Empty, firstModel.Key, null, null, CancellationToken.None).ConfigureAwait(true); + + Assert.Equal(oldModel!.string1, string.Empty); + Assert.Equal(oldModel.number1, value); + + await layer.CommitAsync(string.Empty, transaction, null, CancellationToken.None).ConfigureAwait(true); + + await DatastoreRepositoryLayerTests.HandleEventualConsistency(async () => + { + var refreshedModel = await layer.LoadAsync(string.Empty, firstModel.Key, null, null, CancellationToken.None).ConfigureAwait(true); + + Assert.Equal(refreshedModel!.string1, expectedValue); + Assert.Equal(refreshedModel.number1, loadedModel.number1); + }).ConfigureAwait(true); + } + + [Theory] + [InlineData(10, "a")] + [InlineData(20, "b")] + public async Task TestTransactionalUpsert(int value, string expectedValue) + { + var layer = _env.Services.GetRequiredService(); + + var firstModel = new TestModel + { + forTest = "TestTransactionalUpdate", + string1 = "", + number1 = value, + number2 = 20, + timestamp = SystemClock.Instance.GetCurrentInstant(), + }; + + await layer.CreateAsync(string.Empty, new[] { firstModel }.ToAsyncEnumerable(), null, null, CancellationToken.None).FirstAsync().ConfigureAwait(true); + + await DatastoreRepositoryLayerTests.HandleEventualConsistency(async () => + { + Assert.NotNull(await layer.LoadAsync(string.Empty, firstModel.Key, null, null, CancellationToken.None).ConfigureAwait(true)); + }).ConfigureAwait(true); + + var transaction = await layer.BeginTransactionAsync(string.Empty, Repository.Transaction.TransactionMode.ReadWrite, null, CancellationToken.None).ConfigureAwait(true); + + var loadedModel = await layer.LoadAsync(string.Empty, firstModel.Key, transaction, null, CancellationToken.None).ConfigureAwait(true); + + loadedModel!.string1 = loadedModel.number1 == 10 ? "a" : "b"; + + await layer.UpsertAsync(string.Empty, new[] { loadedModel }.ToAsyncEnumerable(), transaction, null, CancellationToken.None).ToArrayAsync().ConfigureAwait(true); + + var oldModel = await layer.LoadAsync(string.Empty, firstModel.Key, null, null, CancellationToken.None).ConfigureAwait(true); + + Assert.Equal(oldModel!.string1, string.Empty); + Assert.Equal(oldModel.number1, value); + + await layer.CommitAsync(string.Empty, transaction, null, CancellationToken.None).ConfigureAwait(true); + + await DatastoreRepositoryLayerTests.HandleEventualConsistency(async () => + { + var refreshedModel = await layer.LoadAsync(string.Empty, firstModel.Key, null, null, CancellationToken.None).ConfigureAwait(true); + + Assert.Equal(refreshedModel!.string1, expectedValue); + Assert.Equal(refreshedModel.number1, loadedModel.number1); + }).ConfigureAwait(true); + } + + [Fact] + public async Task TestUpdate() + { + var model = new TestModel + { + forTest = "TestUpdate", + string1 = "test", + number1 = 10, + number2 = 20, + timestamp = SystemClock.Instance.GetCurrentInstant(), + }; + + var layer = _env.Services.GetRequiredService(); + + await layer.CreateAsync(string.Empty, new[] { model }.ToAsyncEnumerable(), null, null, CancellationToken.None).FirstAsync().ConfigureAwait(true); + + model.string1 = "updated"; + + await layer.UpdateAsync(string.Empty, new[] { model }.ToAsyncEnumerable(), null, null, CancellationToken.None).FirstAsync().ConfigureAwait(true); + + await DatastoreRepositoryLayerTests.HandleEventualConsistency(async () => + { + var refreshedModel = await layer.LoadAsync(string.Empty, model.Key, null, null, CancellationToken.None).ConfigureAwait(true); + + Assert.Equal("updated", refreshedModel!.string1); + }).ConfigureAwait(true); + } + + [Fact] + public async Task TestUpdateThrowsIfEntityDoesNotExist() + { + var layer = _env.Services.GetRequiredService(); + + var model = new TestModel + { + Key = (await layer.GetKeyFactoryAsync(string.Empty, null, CancellationToken.None).ConfigureAwait(true)).CreateKey("nonexistant"), + forTest = "TestUpdate", + string1 = "test", + number1 = 10, + number2 = 20, + timestamp = SystemClock.Instance.GetCurrentInstant(), + }; + + var ex = await Assert.ThrowsAsync(async () => + { + await layer.UpdateAsync(string.Empty, new[] { model }.ToAsyncEnumerable(), null, null, CancellationToken.None).FirstAsync().ConfigureAwait(true); + }).ConfigureAwait(true); + Assert.Equal(StatusCode.NotFound, ex.StatusCode); + } + + [Fact] + public async Task TestUpdateDoesNotUpdateCreationTime() + { + var model = new TestModel + { + forTest = "TestUpdate", + string1 = "test", + number1 = 10, + number2 = 20, + timestamp = SystemClock.Instance.GetCurrentInstant(), + }; + + var layer = _env.Services.GetRequiredService(); + + await layer.CreateAsync(string.Empty, new[] { model }.ToAsyncEnumerable(), null, null, CancellationToken.None).FirstAsync().ConfigureAwait(true); + + var createdDate = model.dateCreatedUtc; + Assert.NotNull(createdDate); + + await Task.Delay(200).ConfigureAwait(true); + + model.string1 = "updated"; + + await layer.UpdateAsync(string.Empty, new[] { model }.ToAsyncEnumerable(), null, null, CancellationToken.None).FirstAsync().ConfigureAwait(true); + + await DatastoreRepositoryLayerTests.HandleEventualConsistency(async () => + { + var refreshedModel = await layer.LoadAsync(string.Empty, model.Key, null, null, CancellationToken.None).ConfigureAwait(true); + + Assert.Equal("updated", refreshedModel!.string1); + Assert.NotNull(createdDate); + + // We compare milliseconds because Datastore has less resolution than C#, so there is + // some loss at the tick level even under normal operation. We use the Task.Delay above + // to ensure that if there is misbehaviour, it will be caught. + Assert.Equal(createdDate!.Value.ToUnixTimeMilliseconds(), refreshedModel.dateCreatedUtc!.Value.ToUnixTimeMilliseconds()); + }).ConfigureAwait(true); + } + + [Fact] + public async Task TestUpsertDoesNotUpdateCreationTime() + { + var model = new TestModel + { + forTest = "TestUpdate", + string1 = "test", + number1 = 10, + number2 = 20, + timestamp = SystemClock.Instance.GetCurrentInstant(), + }; + + var layer = _env.Services.GetRequiredService(); + + await layer.CreateAsync(string.Empty, new[] { model }.ToAsyncEnumerable(), null, null, CancellationToken.None).FirstAsync().ConfigureAwait(true); + + var createdDate = model.dateCreatedUtc; + Assert.NotNull(createdDate); + + await Task.Delay(200).ConfigureAwait(true); + + model.string1 = "updated"; + + await layer.UpsertAsync(string.Empty, new[] { model }.ToAsyncEnumerable(), null, null, CancellationToken.None).FirstAsync().ConfigureAwait(true); + + await DatastoreRepositoryLayerTests.HandleEventualConsistency(async () => + { + var refreshedModel = await layer.LoadAsync(string.Empty, model.Key, null, null, CancellationToken.None).ConfigureAwait(true); + + Assert.Equal("updated", refreshedModel!.string1); + Assert.NotNull(createdDate); + + // We compare milliseconds because Datastore has less resolution than C#, so there is + // some loss at the tick level even under normal operation. We use the Task.Delay above + // to ensure that if there is misbehaviour, it will be caught. + Assert.Equal(createdDate!.Value.ToUnixTimeMilliseconds(), refreshedModel.dateCreatedUtc!.Value.ToUnixTimeMilliseconds()); + }).ConfigureAwait(true); + } + + [Fact] + public async Task TestCreateThenDelete() + { + var model = new TestModel + { + forTest = "TestCreate", + string1 = "test", + number1 = 10, + number2 = 20, + timestamp = SystemClock.Instance.GetCurrentInstant(), + }; + + var layer = _env.Services.GetRequiredService(); + + var returnedModel = await layer.CreateAsync(string.Empty, new[] { model }.ToAsyncEnumerable(), null, null, CancellationToken.None).FirstAsync().ConfigureAwait(true); + + Assert.NotNull(model.Key); + Assert.NotNull(returnedModel.Key); + Assert.Equal(model.Key, returnedModel.Key); + Assert.Equal(model, returnedModel); + + await DatastoreRepositoryLayerTests.HandleEventualConsistency(async () => + { + Assert.NotNull(await layer.LoadAsync(string.Empty, model.Key, null, null, CancellationToken.None).ConfigureAwait(true)); + }).ConfigureAwait(true); + + await layer.DeleteAsync(string.Empty, new[] { model }.ToAsyncEnumerable(), null, null, CancellationToken.None).ConfigureAwait(true); + + await DatastoreRepositoryLayerTests.HandleEventualConsistency(async () => + { + Assert.Null(await layer.LoadAsync(string.Empty, model.Key, null, null, CancellationToken.None).ConfigureAwait(true)); + }).ConfigureAwait(true); + } + + [Fact] + public async Task TestTransactionalDelete() + { + var model = new TestModel + { + forTest = "TestCreate", + string1 = "test", + number1 = 10, + number2 = 20, + timestamp = SystemClock.Instance.GetCurrentInstant(), + }; + + var layer = _env.Services.GetRequiredService(); + + var returnedModel = await layer.CreateAsync(string.Empty, new[] { model }.ToAsyncEnumerable(), null, null, CancellationToken.None).FirstAsync().ConfigureAwait(true); + + Assert.NotNull(model.Key); + Assert.NotNull(returnedModel.Key); + Assert.Equal(model.Key, returnedModel.Key); + Assert.Equal(model, returnedModel); + + await DatastoreRepositoryLayerTests.HandleEventualConsistency(async () => + { + Assert.NotNull(await layer.LoadAsync(string.Empty, model.Key, null, null, CancellationToken.None).ConfigureAwait(true)); + }).ConfigureAwait(true); + + var transaction = await layer.BeginTransactionAsync(string.Empty, Repository.Transaction.TransactionMode.ReadWrite, null, CancellationToken.None).ConfigureAwait(true); + + await layer.DeleteAsync(string.Empty, new[] { model }.ToAsyncEnumerable(), transaction, null, CancellationToken.None).ConfigureAwait(true); + + Assert.NotNull(await layer.LoadAsync(string.Empty, model.Key, null, null, CancellationToken.None).ConfigureAwait(true)); + + await layer.CommitAsync(string.Empty, transaction, null, CancellationToken.None).ConfigureAwait(true); + + await DatastoreRepositoryLayerTests.HandleEventualConsistency(async () => + { + Assert.Null(await layer.LoadAsync(string.Empty, model.Key, null, null, CancellationToken.None).ConfigureAwait(true)); + }).ConfigureAwait(true); + } + + [Fact] + public async Task TestLoadNullThrowsException() + { + var layer = _env.Services.GetRequiredService(); + + await Assert.ThrowsAsync(async () => + { + await layer.LoadAsync(string.Empty, (Key)null!, null, null, CancellationToken.None).ConfigureAwait(true); + }).ConfigureAwait(true); + await Assert.ThrowsAsync(async () => + { + await layer.LoadAsync(string.Empty, (IAsyncEnumerable)null!, null, null, CancellationToken.None).ToArrayAsync().ConfigureAwait(true); + }).ConfigureAwait(true); + } + + [Fact] + public async Task TestLoadMultiple() + { + var models = new[] + { + new TestModel + { + forTest = "TestCreateMultiple", + string1 = "test1", + number1 = 10, + number2 = 20, + timestamp = SystemClock.Instance.GetCurrentInstant(), + }, + new TestModel + { + forTest = "TestCreateMultiple", + string1 = "test2", + number1 = 11, + number2 = 22, + timestamp = SystemClock.Instance.GetCurrentInstant(), + }, + }; + + var layer = _env.Services.GetRequiredService(); + + var returnedModels = await layer.CreateAsync(string.Empty, models.ToAsyncEnumerable(), null, null, CancellationToken.None).ToArrayAsync().ConfigureAwait(true); + + Assert.All(returnedModels, x => + { + Assert.NotNull(x.Key); + }); + Assert.Equal(models, returnedModels); + + await DatastoreRepositoryLayerTests.HandleEventualConsistency(async () => + { + var loadedModels = await layer.LoadAsync(string.Empty, returnedModels.Select(x => x.Key).ToAsyncEnumerable(), null, null, CancellationToken.None).Select(x => x.Value).ToArrayAsync().ConfigureAwait(true); + + Assert.Equal(models.Select(x => x.Key), loadedModels.Select(x => x!.Key)); + }).ConfigureAwait(true); + } + + [Fact] + public async Task TestLoadMultipleMissing() + { + var layer = _env.Services.GetRequiredService(); + + var keyFactory = await layer.GetKeyFactoryAsync(string.Empty, null, CancellationToken.None).ConfigureAwait(true); + + var models = new[] + { + new TestModel + { + Key = keyFactory.CreateKey("TestLoadMultipleMissing-1"), + forTest = "TestLoadMultipleMissing", + string1 = "test1", + number1 = 10, + number2 = 20, + timestamp = SystemClock.Instance.GetCurrentInstant(), + }, + new TestModel + { + Key = keyFactory.CreateKey("TestLoadMultipleMissing-3"), + forTest = "TestLoadMultipleMissing", + string1 = "test1", + number1 = 10, + number2 = 20, + timestamp = SystemClock.Instance.GetCurrentInstant(), + }, + }; + + var returnedModels = await layer.UpsertAsync(string.Empty, models.ToAsyncEnumerable(), null, null, CancellationToken.None).ToArrayAsync().ConfigureAwait(true); + + await DatastoreRepositoryLayerTests.HandleEventualConsistency(async () => + { + var loadedModels = await layer.LoadAsync(string.Empty, returnedModels.Select(x => x.Key).ToAsyncEnumerable(), null, null, CancellationToken.None).Select(x => x.Value).ToArrayAsync().ConfigureAwait(true); + Assert.Equal(2, loadedModels.Length); + }).ConfigureAwait(true); + + var nullLoadAttempt = await layer.LoadAsync( + string.Empty, + new[] + { + models[0].Key, + keyFactory.CreateKey("TestLoadMultipleMissing-2"), + models[1].Key, + keyFactory.CreateKey("TestLoadMultipleMissing-4"), + }.ToAsyncEnumerable(), + null, + null, + CancellationToken.None).ToDictionaryAsync(k => k.Key, v => v.Value).ConfigureAwait(true); + + Assert.Equal(4, nullLoadAttempt.Count); + Assert.Contains(keyFactory.CreateKey("TestLoadMultipleMissing-1"), nullLoadAttempt); + Assert.Contains(keyFactory.CreateKey("TestLoadMultipleMissing-2"), nullLoadAttempt); + Assert.Contains(keyFactory.CreateKey("TestLoadMultipleMissing-3"), nullLoadAttempt); + Assert.Contains(keyFactory.CreateKey("TestLoadMultipleMissing-4"), nullLoadAttempt); + Assert.NotNull(nullLoadAttempt[keyFactory.CreateKey("TestLoadMultipleMissing-1")]); + Assert.Null(nullLoadAttempt[keyFactory.CreateKey("TestLoadMultipleMissing-2")]); + Assert.NotNull(nullLoadAttempt[keyFactory.CreateKey("TestLoadMultipleMissing-3")]); + Assert.Null(nullLoadAttempt[keyFactory.CreateKey("TestLoadMultipleMissing-4")]); + } + + [Fact] + public async Task TestLoadMultipleDuplicate() + { + var models = new[] + { + new TestModel + { + forTest = "TestCreateMultiple", + string1 = "test1", + number1 = 10, + number2 = 20, + timestamp = SystemClock.Instance.GetCurrentInstant(), + } + }; + + var layer = _env.Services.GetRequiredService(); + + var returnedModels = await layer.CreateAsync(string.Empty, models.ToAsyncEnumerable(), null, null, CancellationToken.None).ToArrayAsync().ConfigureAwait(true); + + Assert.All(returnedModels, x => + { + Assert.NotNull(x.Key); + }); + Assert.Equal(models, returnedModels); + + await DatastoreRepositoryLayerTests.HandleEventualConsistency(async () => + { + Assert.Equal(1, await layer.LoadAsync(string.Empty, new Key[] { models[0].Key, new Key(models[0].Key) }.ToAsyncEnumerable(), null, null, CancellationToken.None).CountAsync().ConfigureAwait(true)); + }).ConfigureAwait(true); + } + + [Fact] + public async Task TestTransactionalLoadMultiple() + { + var models = new[] + { + new TestModel + { + forTest = "TestCreateMultiple", + string1 = "test1", + number1 = 10, + number2 = 20, + timestamp = SystemClock.Instance.GetCurrentInstant(), + }, + new TestModel + { + forTest = "TestCreateMultiple", + string1 = "test2", + number1 = 11, + number2 = 22, + timestamp = SystemClock.Instance.GetCurrentInstant(), + }, + }; + + var layer = _env.Services.GetRequiredService(); + + var returnedModels = await layer.CreateAsync(string.Empty, models.ToAsyncEnumerable(), null, null, CancellationToken.None).ToArrayAsync().ConfigureAwait(true); + + Assert.All(returnedModels, x => + { + Assert.NotNull(x.Key); + }); + Assert.Equal(models, returnedModels); + + var transaction = await layer.BeginTransactionAsync(string.Empty, Repository.Transaction.TransactionMode.ReadWrite, null, CancellationToken.None).ConfigureAwait(true); + + await DatastoreRepositoryLayerTests.HandleEventualConsistency(async () => + { + var loadedModels = await layer.LoadAsync(string.Empty, returnedModels.Select(x => x.Key).ToAsyncEnumerable(), transaction, null, CancellationToken.None).Select(x => x.Value).ToArrayAsync().ConfigureAwait(true); + + Assert.Equal(models.Select(x => x.Key), loadedModels.Select(x => x!.Key)); + }).ConfigureAwait(true); + + await layer.CommitAsync(string.Empty, transaction, null, CancellationToken.None).ConfigureAwait(true); + } + + [Fact] + public async Task TestLoadAcrossNamespaces() + { + var layer = _env.Services.GetRequiredService(); + + var models = new[] + { + new TestModel + { + Key = (await layer.GetKeyFactoryAsync("a", null, CancellationToken.None).ConfigureAwait(true)).CreateIncompleteKey(), + forTest = "TestLoadAcrossNamespaces", + string1 = "test1", + number1 = 10, + number2 = 20, + timestamp = SystemClock.Instance.GetCurrentInstant(), + }, + new TestModel + { + Key = (await layer.GetKeyFactoryAsync("b", null, CancellationToken.None).ConfigureAwait(true)).CreateIncompleteKey(), + forTest = "TestLoadAcrossNamespaces", + string1 = "test2", + number1 = 11, + number2 = 22, + timestamp = SystemClock.Instance.GetCurrentInstant(), + }, + }; + + await layer.CreateAsync("a", new[] { models[0] }.ToAsyncEnumerable(), null, null, CancellationToken.None).ToArrayAsync().ConfigureAwait(true); + await layer.CreateAsync("b", new[] { models[1] }.ToAsyncEnumerable(), null, null, CancellationToken.None).ToArrayAsync().ConfigureAwait(true); + + await DatastoreRepositoryLayerTests.HandleEventualConsistency(async () => + { + var loadedModels = await layer.LoadAcrossNamespacesAsync(models.Select(x => x.Key).ToAsyncEnumerable(), null, CancellationToken.None).Select(x => x.Value).ToArrayAsync().ConfigureAwait(true); + + Assert.Equal(models.Select(x => x.Key), loadedModels.Select(x => x!.Key)); + }).ConfigureAwait(true); + + Assert.Equal("keys", (await Assert.ThrowsAsync(async () => + { + await layer.LoadAcrossNamespacesAsync(null!, null, CancellationToken.None).ToArrayAsync().ConfigureAwait(true); + }).ConfigureAwait(true)).ParamName); + Assert.Equal("keys", (await Assert.ThrowsAsync(async () => + { + await layer.LoadAcrossNamespacesAsync(new Key[] { null! }.ToAsyncEnumerable(), null, CancellationToken.None).ToArrayAsync().ConfigureAwait(true); + }).ConfigureAwait(true)).ParamName); + } + + [Theory] + [InlineData("", "a")] + [InlineData("a", "b")] + [InlineData("a", "")] + public async Task TestMismatchedNamespaceIsCaughtOnCreate(string nsInModel, string nsOnOp) + { + var layer = _env.Services.GetRequiredService(); + + var model = new TestModel + { + Key = (await layer.GetKeyFactoryAsync(nsInModel, null, CancellationToken.None).ConfigureAwait(true)).CreateIncompleteKey(), + forTest = "TestMismatchedNamespaceIsCaughtOnCreate", + string1 = "test1", + number1 = 10, + number2 = 20, + timestamp = SystemClock.Instance.GetCurrentInstant(), + }; + + await Assert.ThrowsAsync(async () => + { + await layer.CreateAsync(nsOnOp, new[] { model }.ToAsyncEnumerable(), null, null, CancellationToken.None).ToArrayAsync().ConfigureAwait(true); + }).ConfigureAwait(true); + } + + [Theory] + [InlineData("", "a")] + [InlineData("a", "b")] + [InlineData("a", "")] + public async Task TestMismatchedNamespaceIsCaughtOnUpdate(string nsInModel, string nsOnOp) + { + var layer = _env.Services.GetRequiredService(); + + var model = new TestModel + { + Key = (await layer.GetKeyFactoryAsync(nsInModel, null, CancellationToken.None).ConfigureAwait(true)).CreateIncompleteKey(), + forTest = "TestMismatchedNamespaceIsCaughtOnCreate", + string1 = "test1", + number1 = 10, + number2 = 20, + timestamp = SystemClock.Instance.GetCurrentInstant(), + }; + + await Assert.ThrowsAsync(async () => + { + await layer.UpdateAsync(nsOnOp, new[] { model }.ToAsyncEnumerable(), null, null, CancellationToken.None).ToArrayAsync().ConfigureAwait(true); + }).ConfigureAwait(true); + } + + [Theory] + [InlineData("", "a")] + [InlineData("a", "b")] + [InlineData("a", "")] + public async Task TestMismatchedNamespaceIsCaughtOnUpsert(string nsInModel, string nsOnOp) + { + var layer = _env.Services.GetRequiredService(); + + var model = new TestModel + { + Key = (await layer.GetKeyFactoryAsync(nsInModel, null, CancellationToken.None).ConfigureAwait(true)).CreateIncompleteKey(), + forTest = "TestMismatchedNamespaceIsCaughtOnCreate", + string1 = "test1", + number1 = 10, + number2 = 20, + timestamp = SystemClock.Instance.GetCurrentInstant(), + }; + + await Assert.ThrowsAsync(async () => + { + await layer.UpsertAsync(nsOnOp, new[] { model }.ToAsyncEnumerable(), null, null, CancellationToken.None).ToArrayAsync().ConfigureAwait(true); + }).ConfigureAwait(true); + } + + [Theory] + [InlineData("", "a")] + [InlineData("a", "b")] + [InlineData("a", "")] + public async Task TestMismatchedNamespaceIsCaughtOnDelete(string nsInModel, string nsOnOp) + { + var layer = _env.Services.GetRequiredService(); + + var model = new TestModel + { + Key = (await layer.GetKeyFactoryAsync(nsInModel, null, CancellationToken.None).ConfigureAwait(true)).CreateIncompleteKey(), + forTest = "TestMismatchedNamespaceIsCaughtOnCreate", + string1 = "test1", + number1 = 10, + number2 = 20, + timestamp = SystemClock.Instance.GetCurrentInstant(), + }; + + await Assert.ThrowsAsync(async () => + { + await layer.DeleteAsync(nsOnOp, new[] { model }.ToAsyncEnumerable(), null, null, CancellationToken.None).ConfigureAwait(true); + }).ConfigureAwait(true); + } + + [Fact] + public async Task TestNullNamespaceThrowsException() + { + var layer = _env.Services.GetRequiredService(); + + Assert.Equal("namespace", (await Assert.ThrowsAsync(async () => + { + await layer.QueryAsync(null!, x => true, null, null, null, null, CancellationToken.None).ToArrayAsync().ConfigureAwait(true); + }).ConfigureAwait(true)).ParamName); + Assert.Equal("namespace", (await Assert.ThrowsAsync(async () => + { + await layer.LoadAsync(null!, (Key)null!, null, null, CancellationToken.None).ConfigureAwait(true); + }).ConfigureAwait(true)).ParamName); + Assert.Equal("namespace", (await Assert.ThrowsAsync(async () => + { + await layer.LoadAsync(null!, (IAsyncEnumerable)null!, null, null, CancellationToken.None).ToArrayAsync().ConfigureAwait(true); + }).ConfigureAwait(true)).ParamName); + Assert.Equal("namespace", (await Assert.ThrowsAsync(async () => + { + await layer.CreateAsync(null!, Array.Empty().ToAsyncEnumerable(), null, null, CancellationToken.None).ToArrayAsync().ConfigureAwait(true); + }).ConfigureAwait(true)).ParamName); + Assert.Equal("namespace", (await Assert.ThrowsAsync(async () => + { + await layer.UpsertAsync(null!, Array.Empty().ToAsyncEnumerable(), null, null, CancellationToken.None).ToArrayAsync().ConfigureAwait(true); + }).ConfigureAwait(true)).ParamName); + Assert.Equal("namespace", (await Assert.ThrowsAsync(async () => + { + await layer.UpdateAsync(null!, Array.Empty().ToAsyncEnumerable(), null, null, CancellationToken.None).ToArrayAsync().ConfigureAwait(true); + }).ConfigureAwait(true)).ParamName); + Assert.Equal("namespace", (await Assert.ThrowsAsync(async () => + { + await layer.DeleteAsync(null!, Array.Empty().ToAsyncEnumerable(), null, null, CancellationToken.None).ConfigureAwait(true); + }).ConfigureAwait(true)).ParamName); + Assert.Equal("namespace", (await Assert.ThrowsAsync(async () => + { + await layer.AllocateKeyAsync(null!, null, null, CancellationToken.None).ConfigureAwait(true); + }).ConfigureAwait(true)).ParamName); + Assert.Equal("namespace", (await Assert.ThrowsAsync(async () => + { + await layer.GetKeyFactoryAsync(null!, null, CancellationToken.None).ConfigureAwait(true); + }).ConfigureAwait(true)).ParamName); + Assert.Equal("namespace", (await Assert.ThrowsAsync(async () => + { + await layer.BeginTransactionAsync(null!, Repository.Transaction.TransactionMode.ReadWrite, null, CancellationToken.None).ConfigureAwait(true); + }).ConfigureAwait(true)).ParamName); + Assert.Equal("namespace", (await Assert.ThrowsAsync(async () => + { + await layer.RollbackAsync(null!, null!, null, CancellationToken.None).ConfigureAwait(true); + }).ConfigureAwait(true)).ParamName); + Assert.Equal("namespace", (await Assert.ThrowsAsync(async () => + { + await layer.CommitAsync(null!, null!, null, CancellationToken.None).ConfigureAwait(true); + }).ConfigureAwait(true)).ParamName); + } + + [Fact] + public void TestDefaultedModelHasCorrectDefaultsWhenConstructed() + { + var model = new DefaultedModel(); + + Assert.Equal("test", model.myString); + Assert.True(model.myBool); + Assert.Equal(10, model.myInteger); + } + + [Fact] + public void TestDefaultedModelWithoutDefaultsOnValueTypesThrows() + { + Assert.Throws(() => + { + new DefaultedInvalidModel(); + }); + } + + [Fact] + public async Task TestDefaultedModelHasDefaultsWhenLoaded() + { + // To test this, we first create with DefaultedBypassModel, and then load with DefaultedModel + // to make sure it's defaulting on load. + + var model = new DefaultedBypassModel(); + + Assert.Null(model.myString); + Assert.Null(model.myBool); + Assert.Null(model.myInteger); + + var layer = _env.Services.GetRequiredService(); + + await layer.CreateAsync(string.Empty, new[] { model }.ToAsyncEnumerable(), null, null, CancellationToken.None).FirstAsync().ConfigureAwait(true); + + await DatastoreRepositoryLayerTests.HandleEventualConsistency(async () => + { + var loadedModel = await layer.LoadAsync(string.Empty, model.Key, null, null, CancellationToken.None).ConfigureAwait(true); + + Assert.Equal("test", loadedModel!.myString); + Assert.True(loadedModel.myBool); + Assert.Equal(10, loadedModel.myInteger); + }).ConfigureAwait(true); + } + + [Fact] + public async Task TestDefaultedModelHasDefaultsWhenSaved() + { + // To test this, we first create with DefaultedBypassModel, and then load with DefaultedModel + // to make sure it's defaulting on load. + + var model = new DefaultedBypassModel(); + + Assert.Null(model.myString); + Assert.Null(model.myBool); + Assert.Null(model.myInteger); + + var layer = _env.Services.GetRequiredService(); + + await layer.CreateAsync(string.Empty, new[] { model }.ToAsyncEnumerable(), null, null, CancellationToken.None).FirstAsync().ConfigureAwait(true); + + await DatastoreRepositoryLayerTests.HandleEventualConsistency(async () => + { + var loadedModel = await layer.LoadAsync(string.Empty, model.Key, null, null, CancellationToken.None).ConfigureAwait(true); + + Assert.Equal("test", loadedModel!.myString); + Assert.True(loadedModel.myBool); + Assert.Equal(10, loadedModel.myInteger); + + await layer.UpdateAsync(string.Empty, new[] { loadedModel }.ToAsyncEnumerable(), null, null, CancellationToken.None).FirstAsync().ConfigureAwait(true); + + await DatastoreRepositoryLayerTests.HandleEventualConsistency(async () => + { + model = await layer.LoadAsync(string.Empty, model.Key, null, null, CancellationToken.None).ConfigureAwait(true); + + Assert.Equal("test", model!.myString); + Assert.True(model.myBool); + Assert.Equal(10, model.myInteger); + }).ConfigureAwait(true); + }).ConfigureAwait(true); + } + + [Fact] + public async Task TestPaginatedQueryWith4Entities() + { + const string name = "TestPaginatedQueryWith4Entities"; + var instant = SystemClock.Instance.GetCurrentInstant(); + + var models = new[] + { + new TestModel + { + forTest = name, + string1 = "test", + number1 = 10, + number2 = 20, + timestamp = instant, + }, + new TestModel + { + forTest = name, + string1 = "test", + number1 = 11, + number2 = 20, + timestamp = instant, + }, + new TestModel + { + forTest = name, + string1 = "test", + number1 = 12, + number2 = 20, + timestamp = instant, + }, + new TestModel + { + forTest = name, + string1 = "test", + number1 = 13, + number2 = 20, + timestamp = instant, + } + }; + + var layer = _env.Services.GetRequiredService(); + + await layer.CreateAsync(string.Empty, models.ToAsyncEnumerable(), null, null, CancellationToken.None).FirstAsync().ConfigureAwait(true); + + // Just wait until we can load everything. + await DatastoreRepositoryLayerTests.HandleEventualConsistency(async () => + { + Assert.Equal(4, await layer.QueryAsync( + string.Empty, + x => x.timestamp == instant && x.forTest == name, + null, + null, + null, + null, + CancellationToken.None).CountAsync().ConfigureAwait(true)); + }).ConfigureAwait(true); + + var set = await layer.QueryPaginatedAsync( + string.Empty, + null!, + 2, + x => x.timestamp == instant && x.forTest == name, + null, + null, + null, + CancellationToken.None).ConfigureAwait(true); + Assert.Equal(2, set.Results.Count); + Assert.NotNull(set.NextCursor); + + var nextSet = await layer.QueryPaginatedAsync( + string.Empty, + set.NextCursor!, + 2, + x => x.timestamp == instant && x.forTest == name, + null, + null, + null, + CancellationToken.None).ConfigureAwait(true); + Assert.Equal(2, nextSet.Results.Count); + Assert.NotNull(nextSet.NextCursor); + + var finalSet = await layer.QueryPaginatedAsync( + string.Empty, + nextSet.NextCursor!, + 2, + x => x.timestamp == instant && x.forTest == name, + null, + null, + null, + CancellationToken.None).ConfigureAwait(true); + Assert.Empty(finalSet.Results); + Assert.Null(finalSet.NextCursor); + } + + [Fact] + public async Task TestPaginatedQueryWith5Entities() + { + const string name = "TestPaginatedQueryWith5Entities"; + var instant = SystemClock.Instance.GetCurrentInstant(); + + var models = new[] + { + new TestModel + { + forTest = name, + string1 = "test", + number1 = 10, + number2 = 20, + timestamp = instant, + }, + new TestModel + { + forTest = name, + string1 = "test", + number1 = 11, + number2 = 20, + timestamp = instant, + }, + new TestModel + { + forTest = name, + string1 = "test", + number1 = 12, + number2 = 20, + timestamp = instant, + }, + new TestModel + { + forTest = name, + string1 = "test", + number1 = 13, + number2 = 20, + timestamp = instant, + }, + new TestModel + { + forTest = name, + string1 = "test", + number1 = 14, + number2 = 20, + timestamp = instant, + } + }; + + var layer = _env.Services.GetRequiredService(); + + await layer.CreateAsync(string.Empty, models.ToAsyncEnumerable(), null, null, CancellationToken.None).FirstAsync().ConfigureAwait(true); + + // Just wait until we can load everything. + await DatastoreRepositoryLayerTests.HandleEventualConsistency(async () => + { + Assert.Equal(5, await layer.QueryAsync( + string.Empty, + x => x.timestamp == instant && x.forTest == name, + null, + null, + null, + null, + CancellationToken.None).CountAsync().ConfigureAwait(true)); + }).ConfigureAwait(true); + + var set = await layer.QueryPaginatedAsync( + string.Empty, + null!, + 2, + x => x.timestamp == instant && x.forTest == name, + null, + null, + null, + CancellationToken.None).ConfigureAwait(true); + Assert.Equal(2, set.Results.Count); + Assert.NotNull(set.NextCursor); + + var nextSet = await layer.QueryPaginatedAsync( + string.Empty, + set.NextCursor!, + 2, + x => x.timestamp == instant && x.forTest == name, + null, + null, + null, + CancellationToken.None).ConfigureAwait(true); + Assert.Equal(2, nextSet.Results.Count); + Assert.NotNull(nextSet.NextCursor); + + var nextNextSet = await layer.QueryPaginatedAsync( + string.Empty, + nextSet.NextCursor!, + 2, + x => x.timestamp == instant && x.forTest == name, + null, + null, + null, + CancellationToken.None).ConfigureAwait(true); + Assert.Single(nextNextSet.Results); + + if (nextNextSet.NextCursor == null) + { + // This is permitted and matches production (older Datastore emulators + // can return a non-null cursor here). + } + else + { + var finalSet = await layer.QueryPaginatedAsync( + string.Empty, + nextNextSet.NextCursor, + 2, + x => x.timestamp == instant && x.forTest == name, + null, + null, + null, + CancellationToken.None).ConfigureAwait(true); + Assert.Empty(finalSet.Results); + Assert.Null(finalSet.NextCursor); + } + } + + [Fact] + public async Task TestGeographicQueriesSparse() + { + const string name = "TestGeographicQueriesSparse"; + + var instant = SystemClock.Instance.GetCurrentInstant(); + + var models = new[] + { + new GeoSparseModel + { + forTest = name, + timestamp = instant, + descriptor = "melbourne", + location = new LatLng { Latitude = -37.8136, Longitude = 144.9631 }, + }, + new GeoSparseModel + { + forTest = name, + timestamp = instant, + descriptor = "geelong", + location = new LatLng { Latitude = -38.1499, Longitude = 144.3617 }, + }, + }; + + var layer = _env.Services.GetRequiredService(); + + var returnedModels = await layer.CreateAsync(string.Empty, models.ToAsyncEnumerable(), null, null, CancellationToken.None).ToArrayAsync().ConfigureAwait(true); + + await DatastoreRepositoryLayerTests.HandleEventualConsistency(async () => + { + Assert.Equal( + 2, + await layer.QueryAsync( + string.Empty, + x => x.forTest == name && x.timestamp == instant, + null, + null, + null, + null, + CancellationToken.None).CountAsync().ConfigureAwait(true)); + }).ConfigureAwait(true); + + const float targetDistance = 35.0f; + + // Within distance of both locations (just barely). + Assert.Equal(2, await layer.QueryAsync( + string.Empty, + x => + x.forTest == name && x.timestamp == instant && + x.location!.WithinKilometers(new LatLng { Latitude = -37.91298645369961, Longitude = 144.61227791800124 }, targetDistance), + null, + null, + null, + null, + CancellationToken.None).CountAsync().ConfigureAwait(true)); + + // Outside radius of both locations (just barely). + Assert.Equal(0, await layer.QueryAsync( + string.Empty, + x => + x.forTest == name && x.timestamp == instant && + x.location!.WithinKilometers(new LatLng { Latitude = -37.88113253803819, Longitude = 144.5707632417446 }, targetDistance), + null, + null, + null, + null, + CancellationToken.None).CountAsync().ConfigureAwait(true)); + + // Within radius of Geelong. + var geelong = await layer.QueryAsync( + string.Empty, + x => + x.forTest == name && x.timestamp == instant && + x.location!.WithinKilometers(new LatLng { Latitude = -37.88412126495574, Longitude = 144.56687590591972 }, targetDistance), + null, + null, + null, + null, + CancellationToken.None).ToListAsync().ConfigureAwait(true); + Assert.Single(geelong); + Assert.Equal("geelong", geelong[0].descriptor); + + // Within radius of Melbourne. + var melbourne = await layer.QueryAsync( + string.Empty, + x => + x.forTest == name && x.timestamp == instant && + x.location!.WithinKilometers(new LatLng { Latitude = -37.88131344781548, Longitude = 144.57575923614561 }, targetDistance), + null, + null, + null, + null, + CancellationToken.None).ToListAsync().ConfigureAwait(true); + Assert.Single(melbourne); + Assert.Equal("melbourne", melbourne[0].descriptor); + } + + [Fact] + public async Task TestGeographicQueriesDense() + { + const string name = "TestGeographicQueriesDense"; + + var instant = SystemClock.Instance.GetCurrentInstant(); + + var models = new[] + { + new GeoDenseModel + { + forTest = name, + timestamp = instant, + descriptor = "melbourne", + location = new LatLng { Latitude = -37.8136, Longitude = 144.9631 }, + }, + new GeoDenseModel + { + forTest = name, + timestamp = instant, + descriptor = "geelong", + location = new LatLng { Latitude = -38.1499, Longitude = 144.3617 }, + }, + }; + + var layer = _env.Services.GetRequiredService(); + + var returnedModels = await layer.CreateAsync(string.Empty, models.ToAsyncEnumerable(), null, null, CancellationToken.None).ToArrayAsync().ConfigureAwait(true); + + await DatastoreRepositoryLayerTests.HandleEventualConsistency(async () => + { + Assert.Equal( + 2, + await layer.QueryAsync( + string.Empty, + x => x.forTest == name && x.timestamp == instant, + null, + null, + null, + null, + CancellationToken.None).CountAsync().ConfigureAwait(true)); + }).ConfigureAwait(true); + + const float targetDistance = 35.0f; + + // Within distance of both locations (just barely). + Assert.Equal(2, await layer.QueryAsync( + string.Empty, + x => + x.forTest == name && x.timestamp == instant && + x.location!.WithinKilometers(new LatLng { Latitude = -37.91298645369961, Longitude = 144.61227791800124 }, targetDistance), + null, + null, + null, + null, + CancellationToken.None).CountAsync().ConfigureAwait(true)); + + // Outside radius of both locations (just barely). + Assert.Equal(0, await layer.QueryAsync( + string.Empty, + x => + x.forTest == name && x.timestamp == instant && + x.location!.WithinKilometers(new LatLng { Latitude = -37.88113253803819, Longitude = 144.5707632417446 }, targetDistance), + null, + null, + null, + null, + CancellationToken.None).CountAsync().ConfigureAwait(true)); + + // Within radius of Geelong. + var geelong = await layer.QueryAsync( + string.Empty, + x => + x.forTest == name && x.timestamp == instant && + x.location!.WithinKilometers(new LatLng { Latitude = -37.88412126495574, Longitude = 144.56687590591972 }, targetDistance), + null, + null, + null, + null, + CancellationToken.None).ToListAsync().ConfigureAwait(true); + Assert.Single(geelong); + Assert.Equal("geelong", geelong[0].descriptor); + + // Within radius of Melbourne. + var melbourne = await layer.QueryAsync( + string.Empty, + x => + x.forTest == name && x.timestamp == instant && + x.location!.WithinKilometers(new LatLng { Latitude = -37.88131344781548, Longitude = 144.57575923614561 }, targetDistance), + null, + null, + null, + null, + CancellationToken.None).ToListAsync().ConfigureAwait(true); + Assert.Single(melbourne); + Assert.Equal("melbourne", melbourne[0].descriptor); + } + + [Fact] + public async Task TestGeographicQueriesOrderNearest() + { + const string name = "TestGeographicQueriesOrderNearest"; + + var instant = SystemClock.Instance.GetCurrentInstant(); + + var models = new[] + { + new GeoSparseModel + { + forTest = name, + timestamp = instant, + descriptor = "melbourne", + location = new LatLng { Latitude = -37.8136, Longitude = 144.9631 }, + }, + new GeoSparseModel + { + forTest = name, + timestamp = instant, + descriptor = "geelong", + location = new LatLng { Latitude = -38.1499, Longitude = 144.3617 }, + }, + new GeoSparseModel + { + forTest = name, + timestamp = instant, + descriptor = "ballarat", + location = new LatLng { Latitude = -37.5622, Longitude = 143.8503 }, + }, + }; + + var layer = _env.Services.GetRequiredService(); + + var returnedModels = await layer.CreateAsync(string.Empty, models.ToAsyncEnumerable(), null, null, CancellationToken.None).ToArrayAsync().ConfigureAwait(true); + + await DatastoreRepositoryLayerTests.HandleEventualConsistency(async () => + { + Assert.Equal( + 3, + await layer.QueryAsync( + string.Empty, + x => x.forTest == name && x.timestamp == instant, + null, + null, + null, + null, + CancellationToken.None).CountAsync().ConfigureAwait(true)); + }).ConfigureAwait(true); + + var results = await layer.QueryAsync( + string.Empty, + x => + x.forTest == name && x.timestamp == instant && + x.location!.WithinKilometers(new LatLng { Latitude = -38.250966756220556, Longitude = 144.58046654644946 }, 100.0f), + x => x.location!.Nearest(), + null, + null, + null, + CancellationToken.None).ToListAsync().ConfigureAwait(true); + + Assert.Equal(3, results.Count); + Assert.Equal("geelong", results[0].descriptor); + Assert.Equal("melbourne", results[1].descriptor); + Assert.Equal("ballarat", results[2].descriptor); + } + + [Fact] + public async Task TestGeographicQueriesOrderFurthest() + { + const string name = "TestGeographicQueriesOrderFurthest"; + + var instant = SystemClock.Instance.GetCurrentInstant(); + + var models = new[] + { + new GeoSparseModel + { + forTest = name, + timestamp = instant, + descriptor = "melbourne", + location = new LatLng { Latitude = -37.8136, Longitude = 144.9631 }, + }, + new GeoSparseModel + { + forTest = name, + timestamp = instant, + descriptor = "geelong", + location = new LatLng { Latitude = -38.1499, Longitude = 144.3617 }, + }, + new GeoSparseModel + { + forTest = name, + timestamp = instant, + descriptor = "ballarat", + location = new LatLng { Latitude = -37.5622, Longitude = 143.8503 }, + }, + }; + + var layer = _env.Services.GetRequiredService(); + + var returnedModels = await layer.CreateAsync(string.Empty, models.ToAsyncEnumerable(), null, null, CancellationToken.None).ToArrayAsync().ConfigureAwait(true); + + await DatastoreRepositoryLayerTests.HandleEventualConsistency(async () => + { + Assert.Equal( + 3, + await layer.QueryAsync( + string.Empty, + x => x.forTest == name && x.timestamp == instant, + null, + null, + null, + null, + CancellationToken.None).CountAsync().ConfigureAwait(true)); + }).ConfigureAwait(true); + + var results = await layer.QueryAsync( + string.Empty, + x => + x.forTest == name && x.timestamp == instant && + x.location!.WithinKilometers(new LatLng { Latitude = -38.250966756220556, Longitude = 144.58046654644946 }, 100.0f), + x => x.location!.Furthest(), + null, + null, + null, + CancellationToken.None).ToListAsync().ConfigureAwait(true); + + Assert.Equal(3, results.Count); + Assert.Equal("ballarat", results[0].descriptor); + Assert.Equal("melbourne", results[1].descriptor); + Assert.Equal("geelong", results[2].descriptor); + } + + [Fact] + public async Task TestCreateAndLoadWithEmbeddedEntity() + { + var layer = _env.Services.GetRequiredService(); + + var subentity1 = new Entity(); + subentity1.Key = null; + subentity1["a"] = "hello"; + + var subentity2 = new Entity(); + subentity2.Key = null; + subentity2["a"] = "world"; + + var subentity3 = new Entity(); + subentity3.Key = null; + subentity3["a"] = "blah"; + + var entity = new Entity(); + entity.Key = null; + entity["null"] = Value.ForNull(); + entity["string"] = "test"; + entity["integer"] = 5; + entity["double"] = 5.0; + entity["array"] = new[] + { + subentity1, + subentity2, + }; + entity["arrayString"] = new[] + { + "hello", + "world" + }; + entity["blob"] = ByteString.CopyFromUtf8("test"); + entity["entity"] = subentity3; + entity["geopoint"] = new LatLng { Latitude = 20, Longitude = 40 }; + entity["key"] = (await layer.GetKeyFactoryAsync(string.Empty, null, CancellationToken.None).ConfigureAwait(true)).CreateKey(1); + entity["timestamp"] = Timestamp.FromDateTimeOffset(DateTimeOffset.UtcNow); + + var model = new EmbeddedEntityModel + { + forTest = "TestCreateAndLoadWithEmbeddedEntity", + timestamp = SystemClock.Instance.GetCurrentInstant(), + entity = entity, + }; + + var returnedModel = await layer.CreateAsync(string.Empty, new[] { model }.ToAsyncEnumerable(), null, null, CancellationToken.None).FirstAsync().ConfigureAwait(true); + + Assert.NotNull(model.Key); + Assert.NotNull(returnedModel.Key); + Assert.Equal(model.Key, returnedModel.Key); + Assert.Equal(model, returnedModel); + + await DatastoreRepositoryLayerTests.HandleEventualConsistency(async () => + { + Assert.NotNull(await layer.LoadAsync(string.Empty, returnedModel.Key, null, null, CancellationToken.None).ConfigureAwait(true)); + }).ConfigureAwait(true); + + var loadedModel = await layer.LoadAsync(string.Empty, returnedModel.Key, null, null, CancellationToken.None).ConfigureAwait(true); + + Assert.NotEqual(loadedModel, model); + Assert.NotNull(loadedModel!.timestamp); + Assert.NotNull(loadedModel.entity); + foreach (var property in entity.Properties) + { + DatastoreRepositoryLayerTests.AssertProperty(loadedModel.entity!, property.Key, property.Value); + } + } + + private static void AssertProperty(Entity entity, string name, Value compareWith) + { + Assert.True(entity.Properties.ContainsKey(name)); + Assert.NotNull(entity[name]); + if (compareWith.ValueTypeCase == Value.ValueTypeOneofCase.TimestampValue) + { + Assert.Equal(compareWith.ValueTypeCase, entity[name].ValueTypeCase); + Assert.Equal(compareWith.TimestampValue.Seconds, entity[name].TimestampValue.Seconds); + } + else + { + Assert.Equal(compareWith, entity[name]); + } + } + + [Fact] + public async Task TestCreateAndQueryWithEmbeddedEntity() + { + var layer = _env.Services.GetRequiredService(); + + var subentity1 = new Entity(); + subentity1.Key = null; + subentity1["a"] = "hello"; + + var subentity2 = new Entity(); + subentity2.Key = null; + subentity2["a"] = "world"; + + var subentity3 = new Entity(); + subentity3.Key = null; + subentity3["a"] = "blah"; + + var entity = new Entity(); + entity.Key = null; + entity["null"] = Value.ForNull(); + entity["string"] = "test"; + entity["integer"] = 5; + entity["double"] = 5.0; + entity["array"] = new[] + { + subentity1, + subentity2, + }; + entity["arrayString"] = new[] + { + "hello", + "world" + }; + entity["blob"] = ByteString.CopyFromUtf8("test"); + entity["entity"] = subentity3; + entity["geopoint"] = new LatLng { Latitude = 20, Longitude = 40 }; + entity["key"] = (await layer.GetKeyFactoryAsync(string.Empty, null, CancellationToken.None).ConfigureAwait(true)).CreateKey(1); + entity["timestamp"] = Timestamp.FromDateTimeOffset(DateTimeOffset.UtcNow); + + const string name = "TestCreateAndQueryWithEmbeddedEntity"; + var timestamp = SystemClock.Instance.GetCurrentInstant(); + + var model = new EmbeddedEntityModel + { + forTest = name, + timestamp = timestamp, + entity = entity, + }; + + var returnedModel = await layer.CreateAsync(string.Empty, new[] { model }.ToAsyncEnumerable(), null, null, CancellationToken.None).FirstAsync().ConfigureAwait(true); + + Assert.NotNull(model.Key); + Assert.NotNull(returnedModel.Key); + Assert.Equal(model.Key, returnedModel.Key); + Assert.Equal(model, returnedModel); + + await DatastoreRepositoryLayerTests.HandleEventualConsistency(async () => + { + Assert.NotNull(await layer.LoadAsync(string.Empty, returnedModel.Key, null, null, CancellationToken.None).ConfigureAwait(true)); + }).ConfigureAwait(true); + + var loadedModel = await layer.QueryAsync( + string.Empty, + x => + x.forTest == name && + x.timestamp == timestamp && + x.entity!["string"].StringValue == "test", + null, + 1, + null, + null, + CancellationToken.None).FirstOrDefaultAsync().ConfigureAwait(true); + Assert.NotNull(loadedModel); + + Assert.NotEqual(loadedModel, model); + Assert.NotNull(loadedModel.timestamp); + Assert.NotNull(loadedModel.entity); + foreach (var property in entity.Properties) + { + DatastoreRepositoryLayerTests.AssertProperty(loadedModel.entity!, property.Key, property.Value); + } + + loadedModel = await layer.QueryAsync( + string.Empty, + x => + x.forTest == name && + x.timestamp == timestamp && + x.entity!["string"].StringValue == "not_found", + null, + 1, + null, + null, + CancellationToken.None).FirstOrDefaultAsync().ConfigureAwait(true); + Assert.Null(loadedModel); + + loadedModel = await layer.QueryAsync( + string.Empty, + x => + x.forTest == name && + x.timestamp == timestamp && + x.entity!["arrayString"].StringValue == "hello", + null, + 1, + null, + null, + CancellationToken.None).FirstOrDefaultAsync().ConfigureAwait(true); + Assert.NotNull(loadedModel); + + Assert.NotEqual(loadedModel, model); + Assert.NotNull(loadedModel.timestamp); + Assert.NotNull(loadedModel.entity); + foreach (var property in entity.Properties) + { + DatastoreRepositoryLayerTests.AssertProperty(loadedModel.entity!, property.Key, property.Value); + } + + loadedModel = await layer.QueryAsync( + string.Empty, + x => + x.forTest == name && + x.timestamp == timestamp && + x.entity!["array"].EntityValue["a"].StringValue == "world", + null, + 1, + null, + null, + CancellationToken.None).FirstOrDefaultAsync().ConfigureAwait(true); + Assert.NotNull(loadedModel); + + Assert.NotEqual(loadedModel, model); + Assert.NotNull(loadedModel.timestamp); + Assert.NotNull(loadedModel.entity); + foreach (var property in entity.Properties) + { + DatastoreRepositoryLayerTests.AssertProperty(loadedModel.entity!, property.Key, property.Value); + } + } + } +} diff --git a/UET/Redpoint.CloudFramework.Tests/DefaultedBypassModel.cs b/UET/Redpoint.CloudFramework.Tests/DefaultedBypassModel.cs new file mode 100644 index 00000000..ff5455b3 --- /dev/null +++ b/UET/Redpoint.CloudFramework.Tests/DefaultedBypassModel.cs @@ -0,0 +1,17 @@ +using Redpoint.CloudFramework.Models; + +namespace Redpoint.CloudFramework.Tests +{ + [Kind("cf_defaultedModel")] + public class DefaultedBypassModel : AttributedModel + { + [Type(FieldType.String), Indexed] + public string? myString { get; set; } + + [Type(FieldType.Boolean), Indexed] + public bool? myBool { get; set; } + + [Type(FieldType.Integer), Indexed] + public long? myInteger { get; set; } + } +} diff --git a/UET/Redpoint.CloudFramework.Tests/DefaultedInvalidModel.cs b/UET/Redpoint.CloudFramework.Tests/DefaultedInvalidModel.cs new file mode 100644 index 00000000..7fe6ef0b --- /dev/null +++ b/UET/Redpoint.CloudFramework.Tests/DefaultedInvalidModel.cs @@ -0,0 +1,17 @@ +using Redpoint.CloudFramework.Models; + +namespace Redpoint.CloudFramework.Tests +{ + [Kind("cf_defaultedModel")] + public class DefaultedInvalidModel : AttributedModel + { + [Type(FieldType.String), Indexed] + public string? myString { get; set; } + + [Type(FieldType.Boolean), Indexed] + public bool myBool { get; set; } + + [Type(FieldType.Integer), Indexed] + public long myInteger { get; set; } + } +} diff --git a/UET/Redpoint.CloudFramework.Tests/DefaultedModel.cs b/UET/Redpoint.CloudFramework.Tests/DefaultedModel.cs new file mode 100644 index 00000000..587d7cbe --- /dev/null +++ b/UET/Redpoint.CloudFramework.Tests/DefaultedModel.cs @@ -0,0 +1,21 @@ +using Redpoint.CloudFramework.Models; + +namespace Redpoint.CloudFramework.Tests +{ + [Kind("cf_defaultedModel")] + public class DefaultedModel : AttributedModel + { + // This model exists to ensure AttributedModel initializes + // properties to their default values. +#pragma warning disable CS8618 + [Type(FieldType.String), Indexed, Default("test")] + public string myString { get; set; } +#pragma warning restore CS8618 + + [Type(FieldType.Boolean), Indexed, Default(true)] + public bool myBool { get; set; } + + [Type(FieldType.Integer), Indexed, Default(10)] + public long myInteger { get; set; } + } +} diff --git a/UET/Redpoint.CloudFramework.Tests/EmbeddedEntityModel.cs b/UET/Redpoint.CloudFramework.Tests/EmbeddedEntityModel.cs new file mode 100644 index 00000000..915cd671 --- /dev/null +++ b/UET/Redpoint.CloudFramework.Tests/EmbeddedEntityModel.cs @@ -0,0 +1,20 @@ +namespace Redpoint.CloudFramework.Tests +{ + using Google.Cloud.Datastore.V1; + using NodaTime; + using Redpoint.CloudFramework.Models; + using System; + + [Kind("cf_embeddedEntityModel")] + public class EmbeddedEntityModel : AttributedModel + { + [Type(FieldType.String), Indexed] + public string? forTest { get; set; } + + [Type(FieldType.Timestamp), Indexed] + public Instant? timestamp { get; set; } + + [Type(FieldType.EmbeddedEntity), Indexed] + public Entity? entity { get; set; } + } +} diff --git a/UET/Redpoint.CloudFramework.Tests/GeoDenseModel.cs b/UET/Redpoint.CloudFramework.Tests/GeoDenseModel.cs new file mode 100644 index 00000000..d0a6a88e --- /dev/null +++ b/UET/Redpoint.CloudFramework.Tests/GeoDenseModel.cs @@ -0,0 +1,22 @@ +using Google.Type; +using NodaTime; +using Redpoint.CloudFramework.Models; + +namespace Redpoint.CloudFramework.Tests +{ + [Kind("cf_geoDenseModel")] + public class GeoDenseModel : AttributedModel + { + [Type(FieldType.String), Indexed] + public string? forTest { get; set; } + + [Type(FieldType.Timestamp), Indexed] + public Instant? timestamp { get; set; } + + [Type(FieldType.String), Indexed] + public string? descriptor { get; set; } + + [Type(FieldType.Geopoint), Geopoint(6), Indexed] + public LatLng? location { get; set; } + } +} diff --git a/UET/Redpoint.CloudFramework.Tests/GeoSparseModel.cs b/UET/Redpoint.CloudFramework.Tests/GeoSparseModel.cs new file mode 100644 index 00000000..ed954bb4 --- /dev/null +++ b/UET/Redpoint.CloudFramework.Tests/GeoSparseModel.cs @@ -0,0 +1,22 @@ +using Google.Type; +using NodaTime; +using Redpoint.CloudFramework.Models; + +namespace Redpoint.CloudFramework.Tests +{ + [Kind("cf_geoSparseModel")] + public class GeoSparseModel : AttributedModel + { + [Type(FieldType.String), Indexed] + public string? forTest { get; set; } + + [Type(FieldType.Timestamp), Indexed] + public Instant? timestamp { get; set; } + + [Type(FieldType.String), Indexed] + public string? descriptor { get; set; } + + [Type(FieldType.Geopoint), Geopoint(1), Indexed] + public LatLng? location { get; set; } + } +} diff --git a/UET/Redpoint.CloudFramework.Tests/GlobalRepositoryTests.cs b/UET/Redpoint.CloudFramework.Tests/GlobalRepositoryTests.cs new file mode 100644 index 00000000..9b62a678 --- /dev/null +++ b/UET/Redpoint.CloudFramework.Tests/GlobalRepositoryTests.cs @@ -0,0 +1,36 @@ +namespace Redpoint.CloudFramework.Tests +{ + using Google.Cloud.Datastore.V1; + using Microsoft.Extensions.DependencyInjection; + using Redpoint.CloudFramework.Datastore; + using Redpoint.CloudFramework.Repository; + using System.Threading.Tasks; + using Xunit; + +#pragma warning disable CS0618 // Type or member is obsolete + + [Collection("CloudFramework Test")] + public class GlobalRepositoryTests + { + private readonly CloudFrameworkTestEnvironment _env; + + public GlobalRepositoryTests(CloudFrameworkTestEnvironment env) + { + _env = env; + } + + [Fact] + public async Task TestLegacyHasAncestorQueryDoesNotCrash() + { + var repository = _env.Services.GetRequiredService(); + + var key = await repository.CreateNamedKey(string.Empty, "blah").ConfigureAwait(true); + + var query = await repository.CreateQuery(string.Empty).ConfigureAwait(true); + query.Query.Filter = Filter.HasAncestor(key); + var results = await repository.RunUncachedQuery(string.Empty, query, readConsistency: ReadOptions.Types.ReadConsistency.Strong).ConfigureAwait(true); + } + } + +#pragma warning restore CS0618 // Type or member is obsolete +} diff --git a/UET/Redpoint.CloudFramework.Tests/RedisCacheRepositoryLayerTests.cs b/UET/Redpoint.CloudFramework.Tests/RedisCacheRepositoryLayerTests.cs new file mode 100644 index 00000000..56708daf --- /dev/null +++ b/UET/Redpoint.CloudFramework.Tests/RedisCacheRepositoryLayerTests.cs @@ -0,0 +1,1875 @@ +using Google.Cloud.Datastore.V1; +using Google.Protobuf; +using Google.Protobuf.WellKnownTypes; +using Google.Type; +using Microsoft.Extensions.DependencyInjection; +using NodaTime; +using Redpoint.CloudFramework.Models; +using Redpoint.CloudFramework.Repository.Layers; +using Redpoint.CloudFramework.Repository.Metrics; +using StackExchange.Redis; +using System.Text; +using Xunit; +using Xunit.Sdk; +using static Google.Cloud.Datastore.V1.Key.Types; +using Value = Google.Cloud.Datastore.V1.Value; + +namespace Redpoint.CloudFramework.Tests +{ + [Collection("CloudFramework Test")] + public class RedisLayerRepositoryLayerTests + { + private readonly CloudFrameworkTestEnvironment _env; + + public const int DefaultDelayMs = 0; + + public RedisLayerRepositoryLayerTests(CloudFrameworkTestEnvironment env) + { + _env = env; + } + + private static async Task HandleEventualConsistency(Func task) + { + for (int i = 0; i < 20; i++) + { + try + { + await task().ConfigureAwait(true); + return; + } + catch (XunitException) + { + await Task.Delay(100).ConfigureAwait(true); + } + } + + await task().ConfigureAwait(true); + } + + [Kind("cf_TestLoadedEntityMatchesCreatedEntity")] + private class TestLoadedEntityMatchesCreatedEntity_Model : RedisTestModel { } + + [Kind("cf_TestLoadedEntityIsInCache")] + private class TestLoadedEntityIsInCache_Model : RedisTestModel { } + + [Kind("cf_TestMultipleEntityLoadWorks")] + private class TestMultipleEntityLoadWorks_Model : RedisTestModel { } + + [Kind("cf_TestMultipleEntityLoadWorksWithoutCacheClear")] + private class TestMultipleEntityLoadWorksWithoutCacheClear_Model : RedisTestModel { } + + [Kind("cf_TestUpdatedEntityIsNotInCache")] + private class TestUpdatedEntityIsNotInCache_Model : RedisTestModel { } + + [Kind("cf_TestUpsertedEntityIsNotInCache")] + private class TestUpsertedEntityIsNotInCache_Model : RedisTestModel { } + + [Kind("cf_TestDeletedEntityIsNotInCache")] + private class TestDeletedEntityIsNotInCache_Model : RedisTestModel { } + + [Kind("cf_TestCreateThenQuery")] + private class TestCreateThenQuery_Model : RedisTestModel { } + + [Kind("cf_TestCreateThenQueryThenUpdateThenQuery")] + private class TestCreateThenQueryThenUpdateThenQuery_Model : RedisTestModel { } + + [Kind("cf_TestReaderCountIsSetWhileReading")] + private class TestReaderCountIsSetWhileReading_Model : RedisTestModel { } + + [Kind("cf_TestTransactionalUpdateInvalidatesQuery")] + private class TestTransactionalUpdateInvalidatesQuery_Model : RedisTestModel { } + + [Kind("cf_TestCreateInvalidatesQuery")] + private class TestCreateInvalidatesQuery_Model : RedisTestModel { } + + [Kind("cf_TestUpdateWithNoOriginalDataDoesNotCrash")] + private class TestUpdateWithNoOriginalDataDoesNotCrash_Model : RedisTestModel { } + + [Kind("cf_TestUpdateInvalidatesRelevantQuery")] + private class TestUpdateInvalidatesRelevantQuery_Model : RedisTestModel { } + + [Kind("cf_TestUpdateDoesNotInvalidateIrrelevantQuery")] + private class TestUpdateDoesNotInvalidateIrrelevantQuery_Model : RedisTestModel { } + + [Kind("cf_TestTransactionalUpdateDoesNotInvalidateCacheUntilCommit")] + private class TestTransactionalUpdateDoesNotInvalidateCacheUntilCommit_Model : RedisTestModel { } + + [Kind("cf_TestTransactionalUpdateFromNull")] + private class TestTransactionalUpdateFromNull_Model : RedisTestModel { } + + [Kind("cf_TestNonTransactionalUpdateFromNull")] + private class TestNonTransactionalUpdateFromNull_Model : RedisTestModel { } + + [Kind("cf_TestQueryOrdering")] + private class TestQueryOrdering_Model : RedisTestModel { } + + [Kind("cf_TestQueryEverything")] + private class TestQueryEverything_Model : RedisTestModel { } + + [Kind("cf_TestDeletedEntityIsNotInCachedQueryEverything")] + private class TestDeletedEntityIsNotInCachedQueryEverything_Model : RedisTestModel { } + + [Fact] + public async Task TestLoadedEntityMatchesCreatedEntity() + { + var model = new TestLoadedEntityMatchesCreatedEntity_Model + { + forTest = "TestLoadedEntityMatchesCreatedEntity", + string1 = "test", + number1 = 10, + number2 = 20, + timestamp = SystemClock.Instance.GetCurrentInstant(), + }; + + var layer = _env.Services.GetRequiredService(); + var directLayer = _env.Services.GetRequiredService(); + + await layer.CreateAsync(string.Empty, new[] { model }.ToAsyncEnumerable(), null, null, CancellationToken.None).FirstAsync().ConfigureAwait(true); + + await RedisLayerRepositoryLayerTests.HandleEventualConsistency(async () => + { + Assert.NotNull(await directLayer.LoadAsync(string.Empty, model.Key, null, null, CancellationToken.None).ConfigureAwait(true)); + }).ConfigureAwait(true); + + Assert.NotNull(await layer.LoadAsync(string.Empty, model.Key, null, null, CancellationToken.None).ConfigureAwait(true)); + + var metrics = new RepositoryOperationMetrics(); + var loadedModel = await layer.LoadAsync(string.Empty, model.Key, null, metrics, CancellationToken.None).ConfigureAwait(true); + + Assert.Equal(loadedModel!.Key, model.Key); + Assert.Equal(loadedModel.forTest, model.forTest); + Assert.Equal(loadedModel.string1, model.string1); + Assert.Equal(loadedModel.number1, model.number1); + Assert.Equal(loadedModel.number2, model.number2); + // NOTE: We can't compare the timestamp field, since Datastore doesn't have + // as much resolution as C# or the Redis caching layer. + + Assert.True(metrics.CacheDidRead); + } + + private string SerializePathElement(PathElement pe) + { + var kind = pe.Kind.Contains('-', StringComparison.Ordinal) ? Convert.ToBase64String(Encoding.UTF8.GetBytes(pe.Kind)) : pe.Kind; + if (pe.IdTypeCase == PathElement.IdTypeOneofCase.None) + { + return $"{kind}-none"; + } + else if (pe.IdTypeCase == PathElement.IdTypeOneofCase.Id) + { + return $"{kind}-id-{pe.Id}"; + } + else if (pe.IdTypeCase == PathElement.IdTypeOneofCase.Name) + { + return $"{kind}-name-{Convert.ToBase64String(Encoding.UTF8.GetBytes(pe.Name))}"; + } + throw new NotImplementedException(); + } + + private string GetSimpleCacheKey(Key key) + { + ArgumentNullException.ThrowIfNull(key); + if (key.PartitionId == null) throw new ArgumentNullException("key.PartitionId"); + if (key.PartitionId.ProjectId == null) throw new ArgumentNullException("key.PartitionId.ProjectId"); + if (key.PartitionId.NamespaceId == null) throw new ArgumentNullException("key.PartitionId.NamespaceId"); + if (key.Path == null) throw new ArgumentNullException("key.Path"); + return $"KEY:{key.PartitionId.ProjectId}/{key.PartitionId.NamespaceId}:{string.Join(":", key.Path.Select(SerializePathElement))}"; + } + + [Fact] + public async Task TestLoadedEntityIsInCache() + { + var model = new TestLoadedEntityIsInCache_Model + { + forTest = "TestLoadedEntityIsInCache", + string1 = "test", + number1 = 10, + number2 = 20, + timestamp = SystemClock.Instance.GetCurrentInstant(), + }; + + var layer = _env.Services.GetRequiredService(); + var directLayer = _env.Services.GetRequiredService(); + var redis = _env.Services.GetRequiredService().GetDatabase(); + + await layer.CreateAsync(string.Empty, new[] { model }.ToAsyncEnumerable(), null, null, CancellationToken.None).FirstAsync().ConfigureAwait(true); + + await RedisLayerRepositoryLayerTests.HandleEventualConsistency(async () => + { + Assert.NotNull(await directLayer.LoadAsync(string.Empty, model.Key, null, null, CancellationToken.None).ConfigureAwait(true)); + }).ConfigureAwait(true); + + Assert.NotNull(await layer.LoadAsync(string.Empty, model.Key, null, null, CancellationToken.None).ConfigureAwait(true)); + + var value = await redis.StringGetAsync(GetSimpleCacheKey(model.Key)).ConfigureAwait(true); + Assert.True(value.HasValue); + + // The format of the cached value is not stable, so we don't test the contents. We just care that + // it's in the cache. + } + + [Fact] + public async Task TestMultipleEntityLoadWorks() + { + var models = new[] + { + new TestMultipleEntityLoadWorks_Model + { + forTest = "TestMultipleEntityLoadWorks", + string1 = "test", + number1 = 10, + number2 = 20, + timestamp = SystemClock.Instance.GetCurrentInstant(), + }, + new TestMultipleEntityLoadWorks_Model + { + forTest = "TestMultipleEntityLoadWorks", + string1 = "test2", + number1 = 11, + number2 = 21, + timestamp = SystemClock.Instance.GetCurrentInstant(), + }, + }; + + var layer = _env.Services.GetRequiredService(); + var directLayer = _env.Services.GetRequiredService(); + var redis = _env.Services.GetRequiredService().GetDatabase(); + + await layer.CreateAsync(string.Empty, models.ToAsyncEnumerable(), null, null, CancellationToken.None).ToListAsync().ConfigureAwait(true); + + await RedisLayerRepositoryLayerTests.HandleEventualConsistency(async () => + { + Assert.NotNull(await directLayer.LoadAsync(string.Empty, models[0].Key, null, null, CancellationToken.None).ConfigureAwait(true)); + Assert.NotNull(await directLayer.LoadAsync(string.Empty, models[1].Key, null, null, CancellationToken.None).ConfigureAwait(true)); + }).ConfigureAwait(true); + + Assert.NotNull(await layer.LoadAsync(string.Empty, models[0].Key, null, null, CancellationToken.None).ConfigureAwait(true)); + Assert.NotNull(await layer.LoadAsync(string.Empty, models[1].Key, null, null, CancellationToken.None).ConfigureAwait(true)); + + await redis.KeyDeleteAsync(GetSimpleCacheKey(models[0].Key)).ConfigureAwait(true); + await redis.KeyDeleteAsync(GetSimpleCacheKey(models[1].Key)).ConfigureAwait(true); + + for (int i = 0; i < 2; i++) + { + var loadedModels = await layer.LoadAsync(string.Empty, models.Select(x => x.Key).ToAsyncEnumerable(), null, null, CancellationToken.None).ToDictionaryAsync(k => k.Key, v => v.Value).ConfigureAwait(true); + + Assert.True(loadedModels.ContainsKey(models[0].Key)); + Assert.True(loadedModels.ContainsKey(models[1].Key)); + + Assert.Equal(loadedModels[models[0].Key]!.Key, models[0].Key); + Assert.Equal(loadedModels[models[0].Key]!.forTest, models[0].forTest); + Assert.Equal(loadedModels[models[0].Key]!.string1, models[0].string1); + Assert.Equal(loadedModels[models[0].Key]!.number1, models[0].number1); + Assert.Equal(loadedModels[models[0].Key]!.number2, models[0].number2); + + Assert.Equal(loadedModels[models[1].Key]!.Key, models[1].Key); + Assert.Equal(loadedModels[models[1].Key]!.forTest, models[1].forTest); + Assert.Equal(loadedModels[models[1].Key]!.string1, models[1].string1); + Assert.Equal(loadedModels[models[1].Key]!.number1, models[1].number1); + Assert.Equal(loadedModels[models[1].Key]!.number2, models[1].number2); + + Assert.True((await redis.StringGetAsync(GetSimpleCacheKey(models[0].Key)).ConfigureAwait(true)).HasValue); + Assert.True((await redis.StringGetAsync(GetSimpleCacheKey(models[1].Key)).ConfigureAwait(true)).HasValue); + } + } + + [Fact] + public async Task TestMultipleEntityLoadWorksWithoutCacheClear() + { + var models = new[] + { + new TestMultipleEntityLoadWorksWithoutCacheClear_Model + { + forTest = "TestMultipleEntityLoadWorks", + string1 = "test", + number1 = 10, + number2 = 20, + timestamp = SystemClock.Instance.GetCurrentInstant(), + }, + new TestMultipleEntityLoadWorksWithoutCacheClear_Model + { + forTest = "TestMultipleEntityLoadWorks", + string1 = "test2", + number1 = 11, + number2 = 21, + timestamp = SystemClock.Instance.GetCurrentInstant(), + }, + }; + + var layer = _env.Services.GetRequiredService(); + var directLayer = _env.Services.GetRequiredService(); + var redis = _env.Services.GetRequiredService().GetDatabase(); + + await layer.CreateAsync(string.Empty, models.ToAsyncEnumerable(), null, null, CancellationToken.None).ToListAsync().ConfigureAwait(true); + + await RedisLayerRepositoryLayerTests.HandleEventualConsistency(async () => + { + Assert.NotNull(await directLayer.LoadAsync(string.Empty, models[0].Key, null, null, CancellationToken.None).ConfigureAwait(true)); + Assert.NotNull(await directLayer.LoadAsync(string.Empty, models[1].Key, null, null, CancellationToken.None).ConfigureAwait(true)); + }).ConfigureAwait(true); + + Assert.NotNull(await layer.LoadAsync(string.Empty, models[0].Key, null, null, CancellationToken.None).ConfigureAwait(true)); + Assert.NotNull(await layer.LoadAsync(string.Empty, models[1].Key, null, null, CancellationToken.None).ConfigureAwait(true)); + + await redis.KeyDeleteAsync(GetSimpleCacheKey(models[0].Key)).ConfigureAwait(true); + await redis.KeyDeleteAsync(GetSimpleCacheKey(models[1].Key)).ConfigureAwait(true); + + for (int i = 0; i < 2; i++) + { + var loadedModels = await layer.LoadAsync(string.Empty, models.Select(x => x.Key).ToAsyncEnumerable(), null, null, CancellationToken.None).ToDictionaryAsync(k => k.Key, v => v.Value).ConfigureAwait(true); + + Assert.True(loadedModels.ContainsKey(models[0].Key)); + Assert.True(loadedModels.ContainsKey(models[1].Key)); + + Assert.Equal(loadedModels[models[0].Key]!.Key, models[0].Key); + Assert.Equal(loadedModels[models[0].Key]!.forTest, models[0].forTest); + Assert.Equal(loadedModels[models[0].Key]!.string1, models[0].string1); + Assert.Equal(loadedModels[models[0].Key]!.number1, models[0].number1); + Assert.Equal(loadedModels[models[0].Key]!.number2, models[0].number2); + + Assert.Equal(loadedModels[models[1].Key]!.Key, models[1].Key); + Assert.Equal(loadedModels[models[1].Key]!.forTest, models[1].forTest); + Assert.Equal(loadedModels[models[1].Key]!.string1, models[1].string1); + Assert.Equal(loadedModels[models[1].Key]!.number1, models[1].number1); + Assert.Equal(loadedModels[models[1].Key]!.number2, models[1].number2); + + Assert.True((await redis.StringGetAsync(GetSimpleCacheKey(models[0].Key)).ConfigureAwait(true)).HasValue); + Assert.True((await redis.StringGetAsync(GetSimpleCacheKey(models[1].Key)).ConfigureAwait(true)).HasValue); + } + } + + [Fact] + public async Task TestUpdatedEntityIsNotInCache() + { + var model = new TestUpdatedEntityIsNotInCache_Model + { + forTest = "TestUpdatedEntityIsNotInCache", + string1 = "test", + number1 = 10, + number2 = 20, + timestamp = SystemClock.Instance.GetCurrentInstant(), + }; + + var layer = _env.Services.GetRequiredService(); + var directLayer = _env.Services.GetRequiredService(); + var redis = _env.Services.GetRequiredService().GetDatabase(); + + await layer.CreateAsync(string.Empty, new[] { model }.ToAsyncEnumerable(), null, null, CancellationToken.None).FirstAsync().ConfigureAwait(true); + + var value = await redis.StringGetAsync(GetSimpleCacheKey(model.Key)).ConfigureAwait(true); + Assert.False(value.HasValue); + + await RedisLayerRepositoryLayerTests.HandleEventualConsistency(async () => + { + Assert.NotNull(await directLayer.LoadAsync(string.Empty, model.Key, null, null, CancellationToken.None).ConfigureAwait(true)); + }).ConfigureAwait(true); + + Assert.NotNull(await layer.LoadAsync(string.Empty, model.Key, null, null, CancellationToken.None).ConfigureAwait(true)); + + value = await redis.StringGetAsync(GetSimpleCacheKey(model.Key)).ConfigureAwait(true); + Assert.True(value.HasValue); + + model.string1 = "test2"; + await layer.UpdateAsync(string.Empty, new[] { model }.ToAsyncEnumerable(), null, null, CancellationToken.None).ToListAsync().ConfigureAwait(true); + + value = await redis.StringGetAsync(GetSimpleCacheKey(model.Key)).ConfigureAwait(true); + Assert.False(value.HasValue); + } + + [Fact] + public async Task TestUpsertedEntityIsNotInCache() + { + var model = new TestUpsertedEntityIsNotInCache_Model + { + forTest = "TestUpsertedEntityIsNotInCache", + string1 = "test", + number1 = 10, + number2 = 20, + timestamp = SystemClock.Instance.GetCurrentInstant(), + }; + + var layer = _env.Services.GetRequiredService(); + var directLayer = _env.Services.GetRequiredService(); + var redis = _env.Services.GetRequiredService().GetDatabase(); + + await layer.CreateAsync(string.Empty, new[] { model }.ToAsyncEnumerable(), null, null, CancellationToken.None).FirstAsync().ConfigureAwait(true); + + var value = await redis.StringGetAsync(GetSimpleCacheKey(model.Key)).ConfigureAwait(true); + Assert.False(value.HasValue); + + await RedisLayerRepositoryLayerTests.HandleEventualConsistency(async () => + { + Assert.NotNull(await directLayer.LoadAsync(string.Empty, model.Key, null, null, CancellationToken.None).ConfigureAwait(true)); + }).ConfigureAwait(true); + + Assert.NotNull(await layer.LoadAsync(string.Empty, model.Key, null, null, CancellationToken.None).ConfigureAwait(true)); + + value = await redis.StringGetAsync(GetSimpleCacheKey(model.Key)).ConfigureAwait(true); + Assert.True(value.HasValue); + + model.string1 = "test2"; + await layer.UpsertAsync(string.Empty, new[] { model }.ToAsyncEnumerable(), null, null, CancellationToken.None).ToListAsync().ConfigureAwait(true); + + value = await redis.StringGetAsync(GetSimpleCacheKey(model.Key)).ConfigureAwait(true); + Assert.False(value.HasValue); + } + + [Fact] + public async Task TestDeletedEntityIsNotInCache() + { + var model = new TestDeletedEntityIsNotInCache_Model + { + forTest = "TestDeletedEntityIsNotInCache", + string1 = "test", + number1 = 10, + number2 = 20, + timestamp = SystemClock.Instance.GetCurrentInstant(), + }; + + var layer = _env.Services.GetRequiredService(); + var directLayer = _env.Services.GetRequiredService(); + var redis = _env.Services.GetRequiredService().GetDatabase(); + + await layer.CreateAsync(string.Empty, new[] { model }.ToAsyncEnumerable(), null, null, CancellationToken.None).FirstAsync().ConfigureAwait(true); + + var value = await redis.StringGetAsync(GetSimpleCacheKey(model.Key)).ConfigureAwait(true); + Assert.False(value.HasValue); + + await RedisLayerRepositoryLayerTests.HandleEventualConsistency(async () => + { + Assert.NotNull(await directLayer.LoadAsync(string.Empty, model.Key, null, null, CancellationToken.None).ConfigureAwait(true)); + }).ConfigureAwait(true); + + Assert.NotNull(await layer.LoadAsync(string.Empty, model.Key, null, null, CancellationToken.None).ConfigureAwait(true)); + + value = await redis.StringGetAsync(GetSimpleCacheKey(model.Key)).ConfigureAwait(true); + Assert.True(value.HasValue); + + await layer.DeleteAsync(string.Empty, new[] { model }.ToAsyncEnumerable(), null, null, CancellationToken.None).ConfigureAwait(true); + + value = await redis.StringGetAsync(GetSimpleCacheKey(model.Key)).ConfigureAwait(true); + Assert.False(value.HasValue); + } + + [Fact] + public async Task TestCreateThenQuery() + { + var instant = SystemClock.Instance.GetCurrentInstant(); + + var model = new TestCreateThenQuery_Model + { + forTest = "TestCreateThenQuery", + string1 = "test", + number1 = 10, + number2 = 20, + timestamp = instant, + }; + + var layer = _env.Services.GetRequiredService(); + var directLayer = _env.Services.GetRequiredService(); + + await layer.CreateAsync(string.Empty, new[] { model }.ToAsyncEnumerable(), null, null, CancellationToken.None).FirstAsync().ConfigureAwait(true); + + await RedisLayerRepositoryLayerTests.HandleEventualConsistency(async () => + { + Assert.Equal(1, await directLayer.QueryAsync( + string.Empty, + x => x.timestamp == instant && x.forTest == "TestCreateThenQuery", + null, + 1, + null, + null, + CancellationToken.None).CountAsync().ConfigureAwait(true)); + }).ConfigureAwait(true); + + { + var metrics = new RepositoryOperationMetrics(); + var result = await layer.QueryAsync( + string.Empty, + x => x.timestamp == instant && x.forTest == "TestCreateThenQuery", + null, + 1, + null, + metrics, + CancellationToken.None).FirstOrDefaultAsync().ConfigureAwait(true); + + Assert.NotNull(result); + Assert.Equal(result.Key, model.Key); + Assert.True(metrics.CacheDidWrite); + Assert.False(metrics.CacheDidRead); + } + + { + var metrics = new RepositoryOperationMetrics(); + var result = await layer.QueryAsync( + string.Empty, + x => x.timestamp == instant && x.forTest == "TestCreateThenQuery", + null, + 1, + null, + metrics, + CancellationToken.None).FirstOrDefaultAsync().ConfigureAwait(true); + + Assert.NotNull(result); + Assert.Equal(result.Key, model.Key); + Assert.False(metrics.CacheDidWrite); + Assert.True(metrics.CacheDidRead); + } + } + + [Fact] + public async Task TestCreateThenQueryThenUpdateThenQuery() + { + var instant = SystemClock.Instance.GetCurrentInstant(); + + var model = new TestCreateThenQueryThenUpdateThenQuery_Model + { + forTest = "TestCreateThenQueryThenUpdateThenQuery", + string1 = "test", + number1 = 10, + number2 = 20, + timestamp = instant, + }; + + var layer = _env.Services.GetRequiredService(); + var directLayer = _env.Services.GetRequiredService(); + var cache = (_env.Services.GetRequiredService()).GetDatabase(); + + await layer.CreateAsync(string.Empty, new[] { model }.ToAsyncEnumerable(), null, null, CancellationToken.None).FirstAsync().ConfigureAwait(true); + + await RedisLayerRepositoryLayerTests.HandleEventualConsistency(async () => + { + Assert.Equal(1, await directLayer.QueryAsync( + string.Empty, + x => x.timestamp == instant && x.forTest == "TestCreateThenQueryThenUpdateThenQuery", + null, + 1, + null, + null, + CancellationToken.None).CountAsync().ConfigureAwait(true)); + }).ConfigureAwait(true); + + { + var metrics = new RepositoryOperationMetrics(); + var result = await layer.QueryAsync( + string.Empty, + x => x.timestamp == instant && x.forTest == "TestCreateThenQueryThenUpdateThenQuery", + null, + 1, + null, + metrics, + CancellationToken.None).FirstOrDefaultAsync().ConfigureAwait(true); + + Assert.NotNull(result); + Assert.Equal(result.Key, model.Key); + Assert.True(metrics.CacheDidWrite); + Assert.False(metrics.CacheDidRead); + + Assert.True(await cache.KeyExistsAsync($"QUERYCACHE:{metrics.CacheHash}").ConfigureAwait(true), $"QUERYCACHE:{metrics.CacheHash} does not exist"); + Assert.False(await cache.KeyExistsAsync($"QUERYRC:{metrics.CacheHash}").ConfigureAwait(true), $"QUERYRC:{metrics.CacheHash} exists"); + Assert.False(await cache.KeyExistsAsync($"QUERYWC:{metrics.CacheHash}").ConfigureAwait(true), $"QUERYWC:{metrics.CacheHash} exists"); + Assert.True(await cache.KeyExistsAsync($"QUERYDATA:{metrics.CacheHash}").ConfigureAwait(true), $"QUERYDATA:{metrics.CacheHash} does not exist"); + + Assert.False(await cache.KeyTimeToLiveAsync($"QUERYCACHE:{metrics.CacheHash}").ConfigureAwait(true) == null, $"QUERYCACHE:{metrics.CacheHash} is persistent (should be TTL)"); + Assert.False(await cache.KeyTimeToLiveAsync($"QUERYDATA:{metrics.CacheHash}").ConfigureAwait(true) == null, $"QUERYDATA:{metrics.CacheHash} is persistent (should be TTL)"); + } + + var queryMetrics = new RepositoryOperationMetrics(); + { + var result = await layer.QueryAsync( + string.Empty, + x => x.timestamp == instant && x.forTest == "TestCreateThenQueryThenUpdateThenQuery", + null, + 1, + null, + queryMetrics, + CancellationToken.None).FirstOrDefaultAsync().ConfigureAwait(true); + + Assert.NotNull(result); + Assert.Equal(result.Key, model.Key); + Assert.False(queryMetrics.CacheDidWrite); + Assert.True(queryMetrics.CacheDidRead); + + Assert.True(await cache.KeyExistsAsync($"QUERYCACHE:{queryMetrics.CacheHash}").ConfigureAwait(true), $"QUERYCACHE:{queryMetrics.CacheHash} does not exist"); + Assert.False(await cache.KeyExistsAsync($"QUERYRC:{queryMetrics.CacheHash}").ConfigureAwait(true), $"QUERYRC:{queryMetrics.CacheHash} exists"); + Assert.False(await cache.KeyExistsAsync($"QUERYWC:{queryMetrics.CacheHash}").ConfigureAwait(true), $"QUERYWC:{queryMetrics.CacheHash} exists"); + Assert.True(await cache.KeyExistsAsync($"QUERYDATA:{queryMetrics.CacheHash}").ConfigureAwait(true), $"QUERYDATA:{queryMetrics.CacheHash} does not exist"); + + Assert.False(await cache.KeyTimeToLiveAsync($"QUERYCACHE:{queryMetrics.CacheHash}").ConfigureAwait(true) == null, $"QUERYCACHE:{queryMetrics.CacheHash} is persistent (should be TTL)"); + Assert.False(await cache.KeyTimeToLiveAsync($"QUERYDATA:{queryMetrics.CacheHash}").ConfigureAwait(true) == null, $"QUERYDATA:{queryMetrics.CacheHash} is persistent (should be TTL)"); + } + + { + var metrics = new RepositoryOperationMetrics(); + model.string1 = "test2"; + await layer.UpdateAsync(string.Empty, new[] { model }.ToAsyncEnumerable(), null, metrics, CancellationToken.None).FirstAsync().ConfigureAwait(true); + + Assert.True(metrics.CacheQueriesFlushed >= 1); + + Assert.False(await cache.KeyExistsAsync($"QUERYCACHE:{queryMetrics.CacheHash}").ConfigureAwait(true), $"QUERYCACHE:{queryMetrics.CacheHash} exists"); + Assert.False(await cache.KeyExistsAsync($"QUERYRC:{queryMetrics.CacheHash}").ConfigureAwait(true), $"QUERYRC:{queryMetrics.CacheHash} exists"); + Assert.False(await cache.KeyExistsAsync($"QUERYWC:{queryMetrics.CacheHash}").ConfigureAwait(true), $"QUERYWC:{queryMetrics.CacheHash} exists"); + Assert.False(await cache.KeyExistsAsync($"QUERYDATA:{queryMetrics.CacheHash}").ConfigureAwait(true), $"QUERYDATA:{queryMetrics.CacheHash} exists"); + } + + { + var metrics = new RepositoryOperationMetrics(); + var result = await layer.QueryAsync( + string.Empty, + x => x.timestamp == instant && x.forTest == "TestCreateThenQueryThenUpdateThenQuery", + null, + 1, + null, + metrics, + CancellationToken.None).FirstOrDefaultAsync().ConfigureAwait(true); + + Assert.NotNull(result); + Assert.Equal(result.Key, model.Key); + Assert.True(metrics.CacheDidWrite); + Assert.False(metrics.CacheDidRead); + + Assert.True(await cache.KeyExistsAsync($"QUERYCACHE:{metrics.CacheHash}").ConfigureAwait(true), $"QUERYCACHE:{metrics.CacheHash} does not exist"); + Assert.False(await cache.KeyExistsAsync($"QUERYRC:{metrics.CacheHash}").ConfigureAwait(true), $"QUERYRC:{metrics.CacheHash} exists"); + Assert.False(await cache.KeyExistsAsync($"QUERYWC:{metrics.CacheHash}").ConfigureAwait(true), $"QUERYWC:{metrics.CacheHash} exists"); + Assert.True(await cache.KeyExistsAsync($"QUERYDATA:{metrics.CacheHash}").ConfigureAwait(true), $"QUERYDATA:{metrics.CacheHash} does not exist"); + + Assert.False(await cache.KeyTimeToLiveAsync($"QUERYCACHE:{metrics.CacheHash}").ConfigureAwait(true) == null, $"QUERYCACHE:{metrics.CacheHash} is persistent (should be TTL)"); + Assert.False(await cache.KeyTimeToLiveAsync($"QUERYDATA:{metrics.CacheHash}").ConfigureAwait(true) == null, $"QUERYDATA:{metrics.CacheHash} is persistent (should be TTL)"); + } + + { + var metrics = new RepositoryOperationMetrics(); + var result = await layer.QueryAsync( + string.Empty, + x => x.timestamp == instant && x.forTest == "TestCreateThenQueryThenUpdateThenQuery", + null, + 1, + null, + metrics, + CancellationToken.None).FirstOrDefaultAsync().ConfigureAwait(true); + + Assert.NotNull(result); + Assert.Equal(result.Key, model.Key); + Assert.False(metrics.CacheDidWrite); + Assert.True(metrics.CacheDidRead); + + Assert.True(await cache.KeyExistsAsync($"QUERYCACHE:{metrics.CacheHash}").ConfigureAwait(true), $"QUERYCACHE:{metrics.CacheHash} does not exist"); + Assert.False(await cache.KeyExistsAsync($"QUERYRC:{metrics.CacheHash}").ConfigureAwait(true), $"QUERYRC:{metrics.CacheHash} exists"); + Assert.False(await cache.KeyExistsAsync($"QUERYWC:{metrics.CacheHash}").ConfigureAwait(true), $"QUERYWC:{metrics.CacheHash} exists"); + Assert.True(await cache.KeyExistsAsync($"QUERYDATA:{metrics.CacheHash}").ConfigureAwait(true), $"QUERYDATA:{metrics.CacheHash} does not exist"); + + Assert.False(await cache.KeyTimeToLiveAsync($"QUERYCACHE:{metrics.CacheHash}").ConfigureAwait(true) == null, $"QUERYCACHE:{metrics.CacheHash} is persistent (should be TTL)"); + Assert.False(await cache.KeyTimeToLiveAsync($"QUERYDATA:{metrics.CacheHash}").ConfigureAwait(true) == null, $"QUERYDATA:{metrics.CacheHash} is persistent (should be TTL)"); + } + } + + [Fact] + public async Task TestReaderCountIsSetWhileReading() + { + var instant = SystemClock.Instance.GetCurrentInstant(); + + var models = new[] + { + new TestReaderCountIsSetWhileReading_Model + { + forTest = "TestReaderCountIsSetWhileReading", + string1 = "test", + number1 = 10, + number2 = 20, + timestamp = instant, + }, + new TestReaderCountIsSetWhileReading_Model + { + forTest = "TestReaderCountIsSetWhileReading", + string1 = "test2", + number1 = 10, + number2 = 20, + timestamp = instant, + }, + }; + + var layer = _env.Services.GetRequiredService(); + var directLayer = _env.Services.GetRequiredService(); + var cache = (_env.Services.GetRequiredService()).GetDatabase(); + + await layer.CreateAsync(string.Empty, models.ToAsyncEnumerable(), null, null, CancellationToken.None).FirstAsync().ConfigureAwait(true); + + await RedisLayerRepositoryLayerTests.HandleEventualConsistency(async () => + { + Assert.Equal(2, await directLayer.QueryAsync( + string.Empty, + x => x.timestamp == instant && x.forTest == "TestReaderCountIsSetWhileReading", + null, + null, + null, + null, + CancellationToken.None).CountAsync().ConfigureAwait(true)); + }).ConfigureAwait(true); + + await layer.QueryAsync( + string.Empty, + x => x.timestamp == instant && x.forTest == "TestReaderCountIsSetWhileReading", + null, + null, + null, + null, + CancellationToken.None).ToListAsync().ConfigureAwait(true); + + var metrics = new RepositoryOperationMetrics(); + var enumerator = layer.QueryAsync( + string.Empty, + x => x.timestamp == instant && x.forTest == "TestReaderCountIsSetWhileReading", + null, + null, + null, + metrics, + CancellationToken.None).GetAsyncEnumerator(); + + Assert.True((await cache.StringGetAsync($"QUERYRC:{metrics.CacheHash}").ConfigureAwait(true)).IsNull); + while (await enumerator.MoveNextAsync().ConfigureAwait(true)) + { + if (enumerator.Current != null) + { + Assert.Equal("1", await cache.StringGetAsync($"QUERYRC:{metrics.CacheHash}").ConfigureAwait(true)); + } + } + await enumerator.DisposeAsync().ConfigureAwait(true); + Assert.True((await cache.StringGetAsync($"QUERYRC:{metrics.CacheHash}").ConfigureAwait(true)).IsNull); + } + + [Fact] + public async Task TestTransactionalUpdateInvalidatesQuery() + { + const string name = "TestTransactionalUpdateInvalidatesQuery"; + + var instant = SystemClock.Instance.GetCurrentInstant(); + + var models = new[] + { + new TestTransactionalUpdateInvalidatesQuery_Model + { + forTest = name, + string1 = "test", + number1 = 10, + number2 = 20, + timestamp = instant, + }, + }; + + var layer = _env.Services.GetRequiredService(); + var directLayer = _env.Services.GetRequiredService(); + var cache = (_env.Services.GetRequiredService()).GetDatabase(); + + await layer.CreateAsync(string.Empty, models.ToAsyncEnumerable(), null, null, CancellationToken.None).FirstAsync().ConfigureAwait(true); + + await RedisLayerRepositoryLayerTests.HandleEventualConsistency(async () => + { + Assert.Equal(1, await directLayer.QueryAsync( + string.Empty, + x => x.timestamp == instant && x.forTest == name, + null, + null, + null, + null, + CancellationToken.None).CountAsync().ConfigureAwait(true)); + }).ConfigureAwait(true); + + await layer.QueryAsync( + string.Empty, + x => x.timestamp == instant && x.forTest == name, + null, + null, + null, + null, + CancellationToken.None).ToListAsync().ConfigureAwait(true); + + var metrics = new RepositoryOperationMetrics(); + var entity = await layer.QueryAsync( + string.Empty, + x => x.timestamp == instant && x.forTest == name, + null, + null, + null, + metrics, + CancellationToken.None).FirstOrDefaultAsync().ConfigureAwait(true); + + Assert.True(metrics.CacheDidRead); + + var updateMetrics = new RepositoryOperationMetrics(); + var transaction = await layer.BeginTransactionAsync(string.Empty, Repository.Transaction.TransactionMode.ReadWrite, null, CancellationToken.None).ConfigureAwait(true); + models[0].string1 = "test2"; + await layer.UpdateAsync(string.Empty, models.ToAsyncEnumerable(), transaction, null, CancellationToken.None).ToListAsync().ConfigureAwait(true); + await layer.CommitAsync(string.Empty, transaction, updateMetrics, CancellationToken.None).ConfigureAwait(true); + + Assert.True(updateMetrics.CacheQueriesFlushed > 1); + + Assert.False(await cache.KeyExistsAsync($"QUERYCACHE:{metrics.CacheHash}").ConfigureAwait(true), $"QUERYCACHE:{metrics.CacheHash} exists"); + Assert.False(await cache.KeyExistsAsync($"QUERYRC:{metrics.CacheHash}").ConfigureAwait(true), $"QUERYRC:{metrics.CacheHash} exists"); + Assert.False(await cache.KeyExistsAsync($"QUERYWC:{metrics.CacheHash}").ConfigureAwait(true), $"QUERYWC:{metrics.CacheHash} exists"); + Assert.False(await cache.KeyExistsAsync($"QUERYDATA:{metrics.CacheHash}").ConfigureAwait(true), $"QUERYDATA:{metrics.CacheHash} exists"); + + metrics = new RepositoryOperationMetrics(); + entity = await layer.QueryAsync( + string.Empty, + x => x.timestamp == instant && x.forTest == name, + null, + null, + null, + metrics, + CancellationToken.None).FirstOrDefaultAsync().ConfigureAwait(true); + + Assert.NotNull(entity); + Assert.False(metrics.CacheDidRead); + Assert.True(metrics.CacheDidWrite); + Assert.Equal("test2", entity.string1); + } + + [Fact] + public async Task TestCreateInvalidatesQuery() + { + const string name = "TestCreateInvalidatesQuery"; + + var instant = SystemClock.Instance.GetCurrentInstant(); + + var model = new TestCreateInvalidatesQuery_Model + { + forTest = name, + string1 = "test", + number1 = 10, + number2 = 20, + timestamp = instant, + }; + + var layer = _env.Services.GetRequiredService(); + var directLayer = _env.Services.GetRequiredService(); + var cache = (_env.Services.GetRequiredService()).GetDatabase(); + + await layer.CreateAsync(string.Empty, new[] { model }.ToAsyncEnumerable(), null, null, CancellationToken.None).FirstAsync().ConfigureAwait(true); + + await RedisLayerRepositoryLayerTests.HandleEventualConsistency(async () => + { + Assert.Equal(1, await directLayer.QueryAsync( + string.Empty, + x => x.timestamp == instant && x.forTest == name, + null, + 1, + null, + null, + CancellationToken.None).CountAsync().ConfigureAwait(true)); + }).ConfigureAwait(true); + + { + var metrics = new RepositoryOperationMetrics(); + var result = await layer.QueryAsync( + string.Empty, + x => x.timestamp == instant && x.forTest == name, + null, + 1, + null, + metrics, + CancellationToken.None).FirstOrDefaultAsync().ConfigureAwait(true); + + Assert.NotNull(result); + Assert.Equal(result.Key, model.Key); + Assert.True(metrics.CacheDidWrite); + Assert.False(metrics.CacheDidRead); + + Assert.True(await cache.KeyExistsAsync($"QUERYCACHE:{metrics.CacheHash}").ConfigureAwait(true), $"QUERYCACHE:{metrics.CacheHash} does not exist"); + Assert.False(await cache.KeyExistsAsync($"QUERYRC:{metrics.CacheHash}").ConfigureAwait(true), $"QUERYRC:{metrics.CacheHash} exists"); + Assert.False(await cache.KeyExistsAsync($"QUERYWC:{metrics.CacheHash}").ConfigureAwait(true), $"QUERYWC:{metrics.CacheHash} exists"); + Assert.True(await cache.KeyExistsAsync($"QUERYDATA:{metrics.CacheHash}").ConfigureAwait(true), $"QUERYDATA:{metrics.CacheHash} does not exist"); + + Assert.False(await cache.KeyTimeToLiveAsync($"QUERYCACHE:{metrics.CacheHash}").ConfigureAwait(true) == null, $"QUERYCACHE:{metrics.CacheHash} is persistent (should be TTL)"); + Assert.False(await cache.KeyTimeToLiveAsync($"QUERYDATA:{metrics.CacheHash}").ConfigureAwait(true) == null, $"QUERYDATA:{metrics.CacheHash} is persistent (should be TTL)"); + } + + var queryMetrics = new RepositoryOperationMetrics(); + { + var result = await layer.QueryAsync( + string.Empty, + x => x.timestamp == instant && x.forTest == name, + null, + 1, + null, + queryMetrics, + CancellationToken.None).FirstOrDefaultAsync().ConfigureAwait(true); + + Assert.NotNull(result); + Assert.Equal(result.Key, model.Key); + Assert.False(queryMetrics.CacheDidWrite); + Assert.True(queryMetrics.CacheDidRead); + + Assert.True(await cache.KeyExistsAsync($"QUERYCACHE:{queryMetrics.CacheHash}").ConfigureAwait(true), $"QUERYCACHE:{queryMetrics.CacheHash} does not exist"); + Assert.False(await cache.KeyExistsAsync($"QUERYRC:{queryMetrics.CacheHash}").ConfigureAwait(true), $"QUERYRC:{queryMetrics.CacheHash} exists"); + Assert.False(await cache.KeyExistsAsync($"QUERYWC:{queryMetrics.CacheHash}").ConfigureAwait(true), $"QUERYWC:{queryMetrics.CacheHash} exists"); + Assert.True(await cache.KeyExistsAsync($"QUERYDATA:{queryMetrics.CacheHash}").ConfigureAwait(true), $"QUERYDATA:{queryMetrics.CacheHash} does not exist"); + + Assert.False(await cache.KeyTimeToLiveAsync($"QUERYCACHE:{queryMetrics.CacheHash}").ConfigureAwait(true) == null, $"QUERYCACHE:{queryMetrics.CacheHash} is persistent (should be TTL)"); + Assert.False(await cache.KeyTimeToLiveAsync($"QUERYDATA:{queryMetrics.CacheHash}").ConfigureAwait(true) == null, $"QUERYDATA:{queryMetrics.CacheHash} is persistent (should be TTL)"); + } + + { + var newModel = new TestCreateInvalidatesQuery_Model + { + forTest = name, + string1 = "test2", + number1 = 10, + number2 = 20, + timestamp = instant, + }; + + var metrics = new RepositoryOperationMetrics(); + await layer.CreateAsync(string.Empty, new[] { newModel }.ToAsyncEnumerable(), null, metrics, CancellationToken.None).FirstAsync().ConfigureAwait(true); + + Assert.False(await cache.KeyExistsAsync($"QUERYCACHE:{queryMetrics.CacheHash}").ConfigureAwait(true), $"QUERYCACHE:{queryMetrics.CacheHash} exists"); + Assert.False(await cache.KeyExistsAsync($"QUERYRC:{queryMetrics.CacheHash}").ConfigureAwait(true), $"QUERYRC:{queryMetrics.CacheHash} exists"); + Assert.False(await cache.KeyExistsAsync($"QUERYWC:{queryMetrics.CacheHash}").ConfigureAwait(true), $"QUERYWC:{queryMetrics.CacheHash} exists"); + Assert.False(await cache.KeyExistsAsync($"QUERYDATA:{queryMetrics.CacheHash}").ConfigureAwait(true), $"QUERYDATA:{queryMetrics.CacheHash} exists"); + } + } + + [Fact] + public async Task TestUpdateWithNoOriginalDataDoesNotCrash() + { + const string name = "TestUpdateWithNoOriginalDataDoesNotCrash"; + + var instant = SystemClock.Instance.GetCurrentInstant(); + + var models = new[] + { + new TestUpdateWithNoOriginalDataDoesNotCrash_Model + { + forTest = name, + string1 = "test1", + number1 = 10, + number2 = 20, + timestamp = instant, + }, + new TestUpdateWithNoOriginalDataDoesNotCrash_Model + { + forTest = name, + string1 = "test2", + number1 = 10, + number2 = 20, + timestamp = instant, + }, + }; + + var layer = _env.Services.GetRequiredService(); + var directLayer = _env.Services.GetRequiredService(); + var cache = (_env.Services.GetRequiredService()).GetDatabase(); + + await layer.CreateAsync(string.Empty, models.ToAsyncEnumerable(), null, null, CancellationToken.None).FirstAsync().ConfigureAwait(true); + + await RedisLayerRepositoryLayerTests.HandleEventualConsistency(async () => + { + Assert.Equal(2, await directLayer.QueryAsync( + string.Empty, + x => x.timestamp == instant && x.forTest == name, + null, + null, + null, + null, + CancellationToken.None).CountAsync().ConfigureAwait(true)); + }).ConfigureAwait(true); + + { + var metrics = new RepositoryOperationMetrics(); + var result = await layer.QueryAsync( + string.Empty, + x => x.timestamp == instant && x.forTest == name && x.string1 == "test1", + null, + null, + null, + metrics, + CancellationToken.None).ToListAsync().ConfigureAwait(true); + + Assert.Single(result); + Assert.True(metrics.CacheDidWrite); + Assert.False(metrics.CacheDidRead); + + Assert.True(await cache.KeyExistsAsync($"QUERYCACHE:{metrics.CacheHash}").ConfigureAwait(true), $"QUERYCACHE:{metrics.CacheHash} does not exist"); + Assert.False(await cache.KeyExistsAsync($"QUERYRC:{metrics.CacheHash}").ConfigureAwait(true), $"QUERYRC:{metrics.CacheHash} exists"); + Assert.False(await cache.KeyExistsAsync($"QUERYWC:{metrics.CacheHash}").ConfigureAwait(true), $"QUERYWC:{metrics.CacheHash} exists"); + Assert.True(await cache.KeyExistsAsync($"QUERYDATA:{metrics.CacheHash}").ConfigureAwait(true), $"QUERYDATA:{metrics.CacheHash} does not exist"); + + Assert.False(await cache.KeyTimeToLiveAsync($"QUERYCACHE:{metrics.CacheHash}").ConfigureAwait(true) == null, $"QUERYCACHE:{metrics.CacheHash} is persistent (should be TTL)"); + Assert.False(await cache.KeyTimeToLiveAsync($"QUERYDATA:{metrics.CacheHash}").ConfigureAwait(true) == null, $"QUERYDATA:{metrics.CacheHash} is persistent (should be TTL)"); + } + + var queryMetrics = new RepositoryOperationMetrics(); + { + var result = await layer.QueryAsync( + string.Empty, + x => x.timestamp == instant && x.forTest == name && x.string1 == "test1", + null, + null, + null, + queryMetrics, + CancellationToken.None).ToListAsync().ConfigureAwait(true); + + Assert.Single(result); + Assert.False(queryMetrics.CacheDidWrite); + Assert.True(queryMetrics.CacheDidRead); + + Assert.True(await cache.KeyExistsAsync($"QUERYCACHE:{queryMetrics.CacheHash}").ConfigureAwait(true), $"QUERYCACHE:{queryMetrics.CacheHash} does not exist"); + Assert.False(await cache.KeyExistsAsync($"QUERYRC:{queryMetrics.CacheHash}").ConfigureAwait(true), $"QUERYRC:{queryMetrics.CacheHash} exists"); + Assert.False(await cache.KeyExistsAsync($"QUERYWC:{queryMetrics.CacheHash}").ConfigureAwait(true), $"QUERYWC:{queryMetrics.CacheHash} exists"); + Assert.True(await cache.KeyExistsAsync($"QUERYDATA:{queryMetrics.CacheHash}").ConfigureAwait(true), $"QUERYDATA:{queryMetrics.CacheHash} does not exist"); + + Assert.False(await cache.KeyTimeToLiveAsync($"QUERYCACHE:{queryMetrics.CacheHash}").ConfigureAwait(true) == null, $"QUERYCACHE:{queryMetrics.CacheHash} is persistent (should be TTL)"); + Assert.False(await cache.KeyTimeToLiveAsync($"QUERYDATA:{queryMetrics.CacheHash}").ConfigureAwait(true) == null, $"QUERYDATA:{queryMetrics.CacheHash} is persistent (should be TTL)"); + } + + { + models[0].string1 = "test3"; + + var metrics = new RepositoryOperationMetrics(); + await layer.UpdateAsync(string.Empty, new[] { models[0] }.ToAsyncEnumerable(), null, metrics, CancellationToken.None).FirstAsync().ConfigureAwait(true); + + Assert.False(await cache.KeyExistsAsync($"QUERYCACHE:{queryMetrics.CacheHash}").ConfigureAwait(true), $"QUERYCACHE:{queryMetrics.CacheHash} exists"); + Assert.False(await cache.KeyExistsAsync($"QUERYRC:{queryMetrics.CacheHash}").ConfigureAwait(true), $"QUERYRC:{queryMetrics.CacheHash} exists"); + Assert.False(await cache.KeyExistsAsync($"QUERYWC:{queryMetrics.CacheHash}").ConfigureAwait(true), $"QUERYWC:{queryMetrics.CacheHash} exists"); + Assert.False(await cache.KeyExistsAsync($"QUERYDATA:{queryMetrics.CacheHash}").ConfigureAwait(true), $"QUERYDATA:{queryMetrics.CacheHash} exists"); + } + } + + [Fact] + public async Task TestUpdateInvalidatesRelevantQuery() + { + const string name = "TestUpdateInvalidatesRelevantQuery"; + + var instant = SystemClock.Instance.GetCurrentInstant(); + + var models = new[] + { + new TestUpdateInvalidatesRelevantQuery_Model + { + forTest = name, + string1 = "test1", + number1 = 10, + number2 = 20, + timestamp = instant, + }, + new TestUpdateInvalidatesRelevantQuery_Model + { + forTest = name, + string1 = "test2", + number1 = 10, + number2 = 20, + timestamp = instant, + }, + }; + + var layer = _env.Services.GetRequiredService(); + var directLayer = _env.Services.GetRequiredService(); + var cache = (_env.Services.GetRequiredService()).GetDatabase(); + + await layer.CreateAsync(string.Empty, models.ToAsyncEnumerable(), null, null, CancellationToken.None).FirstAsync().ConfigureAwait(true); + + await RedisLayerRepositoryLayerTests.HandleEventualConsistency(async () => + { + Assert.Equal(2, await directLayer.QueryAsync( + string.Empty, + x => x.timestamp == instant && x.forTest == name, + null, + null, + null, + null, + CancellationToken.None).CountAsync().ConfigureAwait(true)); + }).ConfigureAwait(true); + + { + var metrics = new RepositoryOperationMetrics(); + var result = await layer.QueryAsync( + string.Empty, + x => x.timestamp == instant && x.forTest == name && x.string1 == "test1", + null, + null, + null, + metrics, + CancellationToken.None).ToListAsync().ConfigureAwait(true); + + Assert.Single(result); + Assert.True(metrics.CacheDidWrite); + Assert.False(metrics.CacheDidRead); + + Assert.True(await cache.KeyExistsAsync($"QUERYCACHE:{metrics.CacheHash}").ConfigureAwait(true), $"QUERYCACHE:{metrics.CacheHash} does not exist"); + Assert.False(await cache.KeyExistsAsync($"QUERYRC:{metrics.CacheHash}").ConfigureAwait(true), $"QUERYRC:{metrics.CacheHash} exists"); + Assert.False(await cache.KeyExistsAsync($"QUERYWC:{metrics.CacheHash}").ConfigureAwait(true), $"QUERYWC:{metrics.CacheHash} exists"); + Assert.True(await cache.KeyExistsAsync($"QUERYDATA:{metrics.CacheHash}").ConfigureAwait(true), $"QUERYDATA:{metrics.CacheHash} does not exist"); + + Assert.False(await cache.KeyTimeToLiveAsync($"QUERYCACHE:{metrics.CacheHash}").ConfigureAwait(true) == null, $"QUERYCACHE:{metrics.CacheHash} is persistent (should be TTL)"); + Assert.False(await cache.KeyTimeToLiveAsync($"QUERYDATA:{metrics.CacheHash}").ConfigureAwait(true) == null, $"QUERYDATA:{metrics.CacheHash} is persistent (should be TTL)"); + } + + var queryMetrics = new RepositoryOperationMetrics(); + { + var result = await layer.QueryAsync( + string.Empty, + x => x.timestamp == instant && x.forTest == name && x.string1 == "test1", + null, + null, + null, + queryMetrics, + CancellationToken.None).ToListAsync().ConfigureAwait(true); + + Assert.Single(result); + Assert.False(queryMetrics.CacheDidWrite); + Assert.True(queryMetrics.CacheDidRead); + + Assert.True(await cache.KeyExistsAsync($"QUERYCACHE:{queryMetrics.CacheHash}").ConfigureAwait(true), $"QUERYCACHE:{queryMetrics.CacheHash} does not exist"); + Assert.False(await cache.KeyExistsAsync($"QUERYRC:{queryMetrics.CacheHash}").ConfigureAwait(true), $"QUERYRC:{queryMetrics.CacheHash} exists"); + Assert.False(await cache.KeyExistsAsync($"QUERYWC:{queryMetrics.CacheHash}").ConfigureAwait(true), $"QUERYWC:{queryMetrics.CacheHash} exists"); + Assert.True(await cache.KeyExistsAsync($"QUERYDATA:{queryMetrics.CacheHash}").ConfigureAwait(true), $"QUERYDATA:{queryMetrics.CacheHash} does not exist"); + + Assert.False(await cache.KeyTimeToLiveAsync($"QUERYCACHE:{queryMetrics.CacheHash}").ConfigureAwait(true) == null, $"QUERYCACHE:{queryMetrics.CacheHash} is persistent (should be TTL)"); + Assert.False(await cache.KeyTimeToLiveAsync($"QUERYDATA:{queryMetrics.CacheHash}").ConfigureAwait(true) == null, $"QUERYDATA:{queryMetrics.CacheHash} is persistent (should be TTL)"); + + models[0] = result[0]; + } + + { + models[0].string1 = "test3"; + + var metrics = new RepositoryOperationMetrics(); + await layer.UpdateAsync(string.Empty, new[] { models[0] }.ToAsyncEnumerable(), null, metrics, CancellationToken.None).FirstAsync().ConfigureAwait(true); + + Assert.False(await cache.KeyExistsAsync($"QUERYCACHE:{queryMetrics.CacheHash}").ConfigureAwait(true), $"QUERYCACHE:{queryMetrics.CacheHash} exists"); + Assert.False(await cache.KeyExistsAsync($"QUERYRC:{queryMetrics.CacheHash}").ConfigureAwait(true), $"QUERYRC:{queryMetrics.CacheHash} exists"); + Assert.False(await cache.KeyExistsAsync($"QUERYWC:{queryMetrics.CacheHash}").ConfigureAwait(true), $"QUERYWC:{queryMetrics.CacheHash} exists"); + Assert.False(await cache.KeyExistsAsync($"QUERYDATA:{queryMetrics.CacheHash}").ConfigureAwait(true), $"QUERYDATA:{queryMetrics.CacheHash} exists"); + } + } + + [Fact] + public async Task TestUpdateDoesNotInvalidateIrrelevantQuery() + { + const string name = "TestUpdateDoesNotInvalidateIrrelevantQuery"; + + var instant = SystemClock.Instance.GetCurrentInstant(); + + var models = new[] + { + new TestUpdateDoesNotInvalidateIrrelevantQuery_Model + { + forTest = name, + string1 = "test1", + number1 = 10, + number2 = 20, + timestamp = instant, + }, + new TestUpdateDoesNotInvalidateIrrelevantQuery_Model + { + forTest = name, + string1 = "test2", + number1 = 10, + number2 = 20, + timestamp = instant, + }, + }; + + var layer = _env.Services.GetRequiredService(); + var directLayer = _env.Services.GetRequiredService(); + var cache = (_env.Services.GetRequiredService()).GetDatabase(); + + await layer.CreateAsync(string.Empty, models.ToAsyncEnumerable(), null, null, CancellationToken.None).FirstAsync().ConfigureAwait(true); + + await RedisLayerRepositoryLayerTests.HandleEventualConsistency(async () => + { + Assert.Equal(2, await directLayer.QueryAsync( + string.Empty, + x => x.timestamp == instant && x.forTest == name, + null, + null, + null, + null, + CancellationToken.None).CountAsync().ConfigureAwait(true)); + }).ConfigureAwait(true); + + { + var metrics = new RepositoryOperationMetrics(); + var result = await layer.QueryAsync( + string.Empty, + x => x.timestamp == instant && x.forTest == name && x.string1 == "test1", + null, + null, + null, + metrics, + CancellationToken.None).ToListAsync().ConfigureAwait(true); + + Assert.Single(result); + Assert.True(metrics.CacheDidWrite); + Assert.False(metrics.CacheDidRead); + + Assert.True(await cache.KeyExistsAsync($"QUERYCACHE:{metrics.CacheHash}").ConfigureAwait(true), $"QUERYCACHE:{metrics.CacheHash} does not exist"); + Assert.False(await cache.KeyExistsAsync($"QUERYRC:{metrics.CacheHash}").ConfigureAwait(true), $"QUERYRC:{metrics.CacheHash} exists"); + Assert.False(await cache.KeyExistsAsync($"QUERYWC:{metrics.CacheHash}").ConfigureAwait(true), $"QUERYWC:{metrics.CacheHash} exists"); + Assert.True(await cache.KeyExistsAsync($"QUERYDATA:{metrics.CacheHash}").ConfigureAwait(true), $"QUERYDATA:{metrics.CacheHash} does not exist"); + + Assert.False(await cache.KeyTimeToLiveAsync($"QUERYCACHE:{metrics.CacheHash}").ConfigureAwait(true) == null, $"QUERYCACHE:{metrics.CacheHash} is persistent (should be TTL)"); + Assert.False(await cache.KeyTimeToLiveAsync($"QUERYDATA:{metrics.CacheHash}").ConfigureAwait(true) == null, $"QUERYDATA:{metrics.CacheHash} is persistent (should be TTL)"); + } + + var queryMetrics = new RepositoryOperationMetrics(); + { + var result = await layer.QueryAsync( + string.Empty, + x => x.timestamp == instant && x.forTest == name && x.string1 == "test1", + null, + null, + null, + queryMetrics, + CancellationToken.None).ToListAsync().ConfigureAwait(true); + + Assert.Single(result); + Assert.False(queryMetrics.CacheDidWrite); + Assert.True(queryMetrics.CacheDidRead); + + Assert.True(await cache.KeyExistsAsync($"QUERYCACHE:{queryMetrics.CacheHash}").ConfigureAwait(true), $"QUERYCACHE:{queryMetrics.CacheHash} does not exist"); + Assert.False(await cache.KeyExistsAsync($"QUERYRC:{queryMetrics.CacheHash}").ConfigureAwait(true), $"QUERYRC:{queryMetrics.CacheHash} exists"); + Assert.False(await cache.KeyExistsAsync($"QUERYWC:{queryMetrics.CacheHash}").ConfigureAwait(true), $"QUERYWC:{queryMetrics.CacheHash} exists"); + Assert.True(await cache.KeyExistsAsync($"QUERYDATA:{queryMetrics.CacheHash}").ConfigureAwait(true), $"QUERYDATA:{queryMetrics.CacheHash} does not exist"); + + Assert.False(await cache.KeyTimeToLiveAsync($"QUERYCACHE:{queryMetrics.CacheHash}").ConfigureAwait(true) == null, $"QUERYCACHE:{queryMetrics.CacheHash} is persistent (should be TTL)"); + Assert.False(await cache.KeyTimeToLiveAsync($"QUERYDATA:{queryMetrics.CacheHash}").ConfigureAwait(true) == null, $"QUERYDATA:{queryMetrics.CacheHash} is persistent (should be TTL)"); + } + + models[1] = (await directLayer.LoadAsync(string.Empty, models[1].Key, null, null, CancellationToken.None).ConfigureAwait(true))!; + + { + models[1].number1 = 200; + + var metrics = new RepositoryOperationMetrics(); + await layer.UpdateAsync(string.Empty, new[] { models[1] }.ToAsyncEnumerable(), null, metrics, CancellationToken.None).FirstAsync().ConfigureAwait(true); + + Assert.True(await cache.KeyExistsAsync($"QUERYCACHE:{queryMetrics.CacheHash}").ConfigureAwait(true), $"QUERYCACHE:{queryMetrics.CacheHash} does not exist"); + Assert.False(await cache.KeyExistsAsync($"QUERYRC:{queryMetrics.CacheHash}").ConfigureAwait(true), $"QUERYRC:{queryMetrics.CacheHash} exists"); + Assert.False(await cache.KeyExistsAsync($"QUERYWC:{queryMetrics.CacheHash}").ConfigureAwait(true), $"QUERYWC:{queryMetrics.CacheHash} exists"); + Assert.True(await cache.KeyExistsAsync($"QUERYDATA:{queryMetrics.CacheHash}").ConfigureAwait(true), $"QUERYDATA:{queryMetrics.CacheHash} does not exist"); + } + } + + [Fact] + public async Task TestTransactionalUpdateDoesNotInvalidateCacheUntilCommit() + { + const string name = "TestTransactionalUpdateDoesNotInvalidateCacheUntilCommit"; + + var instant = SystemClock.Instance.GetCurrentInstant(); + + var models = new[] + { + new TestTransactionalUpdateDoesNotInvalidateCacheUntilCommit_Model + { + forTest = name, + string1 = "test1", + number1 = 10, + number2 = 20, + timestamp = instant, + }, + new TestTransactionalUpdateDoesNotInvalidateCacheUntilCommit_Model + { + forTest = name, + string1 = "test2", + number1 = 10, + number2 = 20, + timestamp = instant, + }, + }; + + var layer = _env.Services.GetRequiredService(); + var directLayer = _env.Services.GetRequiredService(); + var cache = (_env.Services.GetRequiredService()).GetDatabase(); + + await layer.CreateAsync(string.Empty, models.ToAsyncEnumerable(), null, null, CancellationToken.None).FirstAsync().ConfigureAwait(true); + + await RedisLayerRepositoryLayerTests.HandleEventualConsistency(async () => + { + Assert.Equal(2, await directLayer.QueryAsync( + string.Empty, + x => x.timestamp == instant && x.forTest == name, + null, + null, + null, + null, + CancellationToken.None).CountAsync().ConfigureAwait(true)); + }).ConfigureAwait(true); + + { + var metrics = new RepositoryOperationMetrics(); + var result = await layer.QueryAsync( + string.Empty, + x => x.timestamp == instant && x.forTest == name && x.string1 == "test1", + null, + null, + null, + metrics, + CancellationToken.None).ToListAsync().ConfigureAwait(true); + + Assert.Single(result); + Assert.True(metrics.CacheDidWrite); + Assert.False(metrics.CacheDidRead); + + Assert.True(await cache.KeyExistsAsync($"QUERYCACHE:{metrics.CacheHash}").ConfigureAwait(true), $"QUERYCACHE:{metrics.CacheHash} does not exist"); + Assert.False(await cache.KeyExistsAsync($"QUERYRC:{metrics.CacheHash}").ConfigureAwait(true), $"QUERYRC:{metrics.CacheHash} exists"); + Assert.False(await cache.KeyExistsAsync($"QUERYWC:{metrics.CacheHash}").ConfigureAwait(true), $"QUERYWC:{metrics.CacheHash} exists"); + Assert.True(await cache.KeyExistsAsync($"QUERYDATA:{metrics.CacheHash}").ConfigureAwait(true), $"QUERYDATA:{metrics.CacheHash} does not exist"); + + Assert.False(await cache.KeyTimeToLiveAsync($"QUERYCACHE:{metrics.CacheHash}").ConfigureAwait(true) == null, $"QUERYCACHE:{metrics.CacheHash} is persistent (should be TTL)"); + Assert.False(await cache.KeyTimeToLiveAsync($"QUERYDATA:{metrics.CacheHash}").ConfigureAwait(true) == null, $"QUERYDATA:{metrics.CacheHash} is persistent (should be TTL)"); + } + + var queryMetrics = new RepositoryOperationMetrics(); + { + var result = await layer.QueryAsync( + string.Empty, + x => x.timestamp == instant && x.forTest == name && x.string1 == "test1", + null, + null, + null, + queryMetrics, + CancellationToken.None).ToListAsync().ConfigureAwait(true); + + Assert.Single(result); + Assert.False(queryMetrics.CacheDidWrite); + Assert.True(queryMetrics.CacheDidRead); + + Assert.True(await cache.KeyExistsAsync($"QUERYCACHE:{queryMetrics.CacheHash}").ConfigureAwait(true), $"QUERYCACHE:{queryMetrics.CacheHash} does not exist"); + Assert.False(await cache.KeyExistsAsync($"QUERYRC:{queryMetrics.CacheHash}").ConfigureAwait(true), $"QUERYRC:{queryMetrics.CacheHash} exists"); + Assert.False(await cache.KeyExistsAsync($"QUERYWC:{queryMetrics.CacheHash}").ConfigureAwait(true), $"QUERYWC:{queryMetrics.CacheHash} exists"); + Assert.True(await cache.KeyExistsAsync($"QUERYDATA:{queryMetrics.CacheHash}").ConfigureAwait(true), $"QUERYDATA:{queryMetrics.CacheHash} does not exist"); + + Assert.False(await cache.KeyTimeToLiveAsync($"QUERYCACHE:{queryMetrics.CacheHash}").ConfigureAwait(true) == null, $"QUERYCACHE:{queryMetrics.CacheHash} is persistent (should be TTL)"); + Assert.False(await cache.KeyTimeToLiveAsync($"QUERYDATA:{queryMetrics.CacheHash}").ConfigureAwait(true) == null, $"QUERYDATA:{queryMetrics.CacheHash} is persistent (should be TTL)"); + + models[0] = result[0]; + } + + { + models[0].string1 = "test3"; + + var metrics = new RepositoryOperationMetrics(); + + var transaction = await layer.BeginTransactionAsync(string.Empty, Repository.Transaction.TransactionMode.ReadWrite, null, CancellationToken.None).ConfigureAwait(true); + await layer.UpdateAsync(string.Empty, new[] { models[0] }.ToAsyncEnumerable(), transaction, metrics, CancellationToken.None).FirstAsync().ConfigureAwait(true); + + Assert.True(await cache.KeyExistsAsync($"QUERYCACHE:{queryMetrics.CacheHash}").ConfigureAwait(true), $"QUERYCACHE:{queryMetrics.CacheHash} does not exist"); + Assert.False(await cache.KeyExistsAsync($"QUERYRC:{queryMetrics.CacheHash}").ConfigureAwait(true), $"QUERYRC:{queryMetrics.CacheHash} exists"); + Assert.False(await cache.KeyExistsAsync($"QUERYWC:{queryMetrics.CacheHash}").ConfigureAwait(true), $"QUERYWC:{queryMetrics.CacheHash} exists"); + Assert.True(await cache.KeyExistsAsync($"QUERYDATA:{queryMetrics.CacheHash}").ConfigureAwait(true), $"QUERYDATA:{queryMetrics.CacheHash} does not exist"); + + await layer.CommitAsync(string.Empty, transaction, null, CancellationToken.None).ConfigureAwait(true); + + Assert.False(await cache.KeyExistsAsync($"QUERYCACHE:{queryMetrics.CacheHash}").ConfigureAwait(true), $"QUERYCACHE:{queryMetrics.CacheHash} exists"); + Assert.False(await cache.KeyExistsAsync($"QUERYRC:{queryMetrics.CacheHash}").ConfigureAwait(true), $"QUERYRC:{queryMetrics.CacheHash} exists"); + Assert.False(await cache.KeyExistsAsync($"QUERYWC:{queryMetrics.CacheHash}").ConfigureAwait(true), $"QUERYWC:{queryMetrics.CacheHash} exists"); + Assert.False(await cache.KeyExistsAsync($"QUERYDATA:{queryMetrics.CacheHash}").ConfigureAwait(true), $"QUERYDATA:{queryMetrics.CacheHash} exists"); + } + } + + [Fact] + public async Task TestCreateAndLoadWithEmbeddedEntity() + { + var layer = _env.Services.GetRequiredService(); + + var subentity1 = new Entity(); + subentity1.Key = null; + subentity1["a"] = "hello"; + + var subentity2 = new Entity(); + subentity2.Key = null; + subentity2["a"] = "world"; + + var subentity3 = new Entity(); + subentity3.Key = null; + subentity3["a"] = "blah"; + + var entity = new Entity(); + entity.Key = null; + entity["null"] = Value.ForNull(); + entity["string"] = "test"; + entity["integer"] = 5; + entity["double"] = 5.0; + entity["array"] = new[] + { + subentity1, + subentity2, + }; + entity["arrayString"] = new[] + { + "hello", + "world" + }; + entity["blob"] = ByteString.CopyFromUtf8("test"); + entity["entity"] = subentity3; + entity["geopoint"] = new LatLng { Latitude = 20, Longitude = 40 }; + entity["key"] = (await layer.GetKeyFactoryAsync(string.Empty, null, CancellationToken.None).ConfigureAwait(true)).CreateKey(1); + entity["timestamp"] = Timestamp.FromDateTimeOffset(DateTimeOffset.UtcNow); + + var model = new EmbeddedEntityModel + { + forTest = "TestCreateAndLoadWithEmbeddedEntityRedis", + timestamp = SystemClock.Instance.GetCurrentInstant(), + entity = entity, + }; + + var returnedModel = await layer.CreateAsync(string.Empty, new[] { model }.ToAsyncEnumerable(), null, null, CancellationToken.None).FirstAsync().ConfigureAwait(true); + + Assert.NotNull(model.Key); + Assert.NotNull(returnedModel.Key); + Assert.Equal(model.Key, returnedModel.Key); + Assert.Equal(model, returnedModel); + + await RedisLayerRepositoryLayerTests.HandleEventualConsistency(async () => + { + Assert.NotNull(await layer.LoadAsync(string.Empty, returnedModel.Key, null, null, CancellationToken.None).ConfigureAwait(true)); + }).ConfigureAwait(true); + + var loadedModel = await layer.LoadAsync(string.Empty, returnedModel.Key, null, null, CancellationToken.None).ConfigureAwait(true); + + Assert.NotEqual(loadedModel, model); + Assert.NotNull(loadedModel!.timestamp); + Assert.NotNull(loadedModel.entity); + foreach (var property in entity.Properties) + { + RedisLayerRepositoryLayerTests.AssertProperty(loadedModel.entity!, property.Key, property.Value); + } + } + + private static void AssertProperty(Entity entity, string name, Value compareWith) + { + Assert.True(entity.Properties.ContainsKey(name)); + Assert.NotNull(entity[name]); + if (compareWith.ValueTypeCase == Value.ValueTypeOneofCase.TimestampValue) + { + Assert.Equal(compareWith.ValueTypeCase, entity[name].ValueTypeCase); + Assert.Equal(compareWith.TimestampValue.Seconds, entity[name].TimestampValue.Seconds); + } + else + { + Assert.Equal(compareWith, entity[name]); + } + } + + [Fact] + public async Task TestCreateAndQueryWithEmbeddedEntity() + { + var layer = _env.Services.GetRequiredService(); + + var subentity1 = new Entity(); + subentity1.Key = null; + subentity1["a"] = "hello"; + + var subentity2 = new Entity(); + subentity2.Key = null; + subentity2["a"] = "world"; + + var subentity3 = new Entity(); + subentity3.Key = null; + subentity3["a"] = "blah"; + + var entity = new Entity(); + entity.Key = null; + entity["null"] = Value.ForNull(); + entity["string"] = "test"; + entity["integer"] = 5; + entity["double"] = 5.0; + entity["array"] = new[] + { + subentity1, + subentity2, + }; + entity["arrayString"] = new[] + { + "hello", + "world" + }; + entity["blob"] = ByteString.CopyFromUtf8("test"); + entity["entity"] = subentity3; + entity["geopoint"] = new LatLng { Latitude = 20, Longitude = 40 }; + entity["key"] = (await layer.GetKeyFactoryAsync(string.Empty, null, CancellationToken.None).ConfigureAwait(true)).CreateKey(1); + entity["timestamp"] = Timestamp.FromDateTimeOffset(DateTimeOffset.UtcNow); + + const string name = "TestCreateAndQueryWithEmbeddedEntityRedis"; + var timestamp = SystemClock.Instance.GetCurrentInstant(); + + var model = new EmbeddedEntityModel + { + forTest = name, + timestamp = timestamp, + entity = entity, + }; + + var returnedModel = await layer.CreateAsync(string.Empty, new[] { model }.ToAsyncEnumerable(), null, null, CancellationToken.None).FirstAsync().ConfigureAwait(true); + + Assert.NotNull(model.Key); + Assert.NotNull(returnedModel.Key); + Assert.Equal(model.Key, returnedModel.Key); + Assert.Equal(model, returnedModel); + + await RedisLayerRepositoryLayerTests.HandleEventualConsistency(async () => + { + Assert.NotNull(await layer.LoadAsync(string.Empty, returnedModel.Key, null, null, CancellationToken.None).ConfigureAwait(true)); + }).ConfigureAwait(true); + + var loadedModel = await layer.QueryAsync( + string.Empty, + x => + x.forTest == name && + x.timestamp == timestamp && + x.entity!["string"].StringValue == "test", + null, + 1, + null, + null, + CancellationToken.None).FirstOrDefaultAsync().ConfigureAwait(true); + Assert.NotNull(loadedModel); + + Assert.NotEqual(loadedModel, model); + Assert.NotNull(loadedModel!.timestamp); + Assert.NotNull(loadedModel.entity); + foreach (var property in entity.Properties) + { + RedisLayerRepositoryLayerTests.AssertProperty(loadedModel.entity!, property.Key, property.Value); + } + + loadedModel = await layer.QueryAsync( + string.Empty, + x => + x.forTest == name && + x.timestamp == timestamp && + x.entity!["string"].StringValue == "not_found", + null, + 1, + null, + null, + CancellationToken.None).FirstOrDefaultAsync().ConfigureAwait(true); + Assert.Null(loadedModel); + + loadedModel = await layer.QueryAsync( + string.Empty, + x => + x.forTest == name && + x.timestamp == timestamp && + x.entity!["arrayString"].StringValue == "hello", + null, + 1, + null, + null, + CancellationToken.None).FirstOrDefaultAsync().ConfigureAwait(true); + Assert.NotNull(loadedModel); + + Assert.NotEqual(loadedModel, model); + Assert.NotNull(loadedModel.timestamp); + Assert.NotNull(loadedModel.entity); + foreach (var property in entity.Properties) + { + RedisLayerRepositoryLayerTests.AssertProperty(loadedModel.entity!, property.Key, property.Value); + } + + loadedModel = await layer.QueryAsync( + string.Empty, + x => + x.forTest == name && + x.timestamp == timestamp && + x.entity!["array"].EntityValue["a"].StringValue == "world", + null, + 1, + null, + null, + CancellationToken.None).FirstOrDefaultAsync().ConfigureAwait(true); + Assert.NotNull(loadedModel); + + Assert.NotEqual(loadedModel, model); + Assert.NotNull(loadedModel.timestamp); + Assert.NotNull(loadedModel.entity); + foreach (var property in entity.Properties) + { + RedisLayerRepositoryLayerTests.AssertProperty(loadedModel.entity!, property.Key, property.Value); + } + } + + [Fact] + public async Task TestTransactionalUpdateFromNull() + { + const string name = "TestTransactionalUpdateFromNull"; + + var instant = SystemClock.Instance.GetCurrentInstant(); + + var models = new[] + { + new TestTransactionalUpdateFromNull_Model + { + forTest = name, + string1 = null, + number1 = 10, + number2 = 20, + timestamp = instant, + }, + }; + + var layer = _env.Services.GetRequiredService(); + var directLayer = _env.Services.GetRequiredService(); + var cache = (_env.Services.GetRequiredService()).GetDatabase(); + + await layer.CreateAsync(string.Empty, models.ToAsyncEnumerable(), null, null, CancellationToken.None).FirstAsync().ConfigureAwait(true); + + await RedisLayerRepositoryLayerTests.HandleEventualConsistency(async () => + { + Assert.Equal(1, await directLayer.QueryAsync( + string.Empty, + x => x.timestamp == instant && x.forTest == name, + null, + null, + null, + null, + CancellationToken.None).CountAsync().ConfigureAwait(true)); + }).ConfigureAwait(true); + + var entity = await layer.QueryAsync( + string.Empty, + x => x.timestamp == instant && x.forTest == name, + null, + null, + null, + null, + CancellationToken.None).FirstOrDefaultAsync().ConfigureAwait(true); + + var transaction = await layer.BeginTransactionAsync(string.Empty, Repository.Transaction.TransactionMode.ReadWrite, null, CancellationToken.None).ConfigureAwait(true); + Assert.NotNull(entity); + entity.string1 = "test2"; + await layer.UpdateAsync(string.Empty, new[] { entity }.ToAsyncEnumerable(), transaction, null, CancellationToken.None).ToListAsync().ConfigureAwait(true); + await layer.CommitAsync(string.Empty, transaction, null, CancellationToken.None).ConfigureAwait(true); + } + + [Fact] + public async Task TestNonTransactionalUpdateFromNull() + { + const string name = "TestNonTransactionalUpdateFromNull"; + + var instant = SystemClock.Instance.GetCurrentInstant(); + + var models = new[] + { + new TestNonTransactionalUpdateFromNull_Model + { + forTest = name, + string1 = null, + number1 = 10, + number2 = 20, + timestamp = instant, + }, + }; + + var layer = _env.Services.GetRequiredService(); + var directLayer = _env.Services.GetRequiredService(); + var cache = (_env.Services.GetRequiredService()).GetDatabase(); + + await layer.CreateAsync(string.Empty, models.ToAsyncEnumerable(), null, null, CancellationToken.None).FirstAsync().ConfigureAwait(true); + + await RedisLayerRepositoryLayerTests.HandleEventualConsistency(async () => + { + Assert.Equal(1, await directLayer.QueryAsync( + string.Empty, + x => x.timestamp == instant && x.forTest == name, + null, + null, + null, + null, + CancellationToken.None).CountAsync().ConfigureAwait(true)); + }).ConfigureAwait(true); + + var entity = await layer.QueryAsync( + string.Empty, + x => x.timestamp == instant && x.forTest == name, + null, + null, + null, + null, + CancellationToken.None).FirstOrDefaultAsync().ConfigureAwait(true); + Assert.NotNull(entity); + + entity.string1 = "test2"; + await layer.UpdateAsync(string.Empty, new[] { entity }.ToAsyncEnumerable(), null, null, CancellationToken.None).ToListAsync().ConfigureAwait(true); + } + + [Fact] + public async Task TestQueryOrdering() + { + var instant = SystemClock.Instance.GetCurrentInstant(); + + var models = new[] + { + new TestQueryOrdering_Model + { + forTest = "TestQueryOrdering", + string1 = "test1", + number1 = 10, + number2 = 20, + timestamp = instant, + }, + new TestQueryOrdering_Model + { + forTest = "TestQueryOrdering", + string1 = "test2", + number1 = 10, + number2 = 21, + timestamp = instant, + }, + new TestQueryOrdering_Model + { + forTest = "TestQueryOrdering", + string1 = "test3", + number1 = 11, + number2 = 22, + timestamp = instant, + }, + }; + + var layer = _env.Services.GetRequiredService(); + + var returnedModels = await layer.CreateAsync(string.Empty, models.ToAsyncEnumerable(), null, null, CancellationToken.None).ToArrayAsync().ConfigureAwait(true); + + await RedisLayerRepositoryLayerTests.HandleEventualConsistency(async () => + { + var result = await layer.QueryAsync( + string.Empty, + x => + x.timestamp == instant && + x.forTest == "TestQueryOrdering", + x => x.number1 < x.number1 | x.number2 > x.number2, + 3, + null, + null, + CancellationToken.None).ToArrayAsync().ConfigureAwait(true); + + Assert.Equal(3, result.Length); + Assert.Equal("test2", result[0].string1); + Assert.Equal("test1", result[1].string1); + Assert.Equal("test3", result[2].string1); + }).ConfigureAwait(true); + } + + [Fact] + public async Task TestQueryEverything() + { + var instant = SystemClock.Instance.GetCurrentInstant(); + + var models = new[] + { + new TestQueryEverything_Model + { + forTest = "TestQueryEverything", + string1 = "test1", + number1 = 10, + number2 = 20, + timestamp = instant, + }, + new TestQueryEverything_Model + { + forTest = "TestQueryEverything", + string1 = "test2", + number1 = 10, + number2 = 21, + timestamp = instant, + }, + new TestQueryEverything_Model + { + forTest = "TestQueryEverything", + string1 = "test3", + number1 = 11, + number2 = 22, + timestamp = instant, + }, + }; + + var layer = _env.Services.GetRequiredService(); + var directLayer = _env.Services.GetRequiredService(); + + var returnedModels = await layer.CreateAsync(string.Empty, models.ToAsyncEnumerable(), null, null, CancellationToken.None).ToArrayAsync().ConfigureAwait(true); + + await RedisLayerRepositoryLayerTests.HandleEventualConsistency(async () => + { + var result = await directLayer.QueryAsync( + string.Empty, + x => true, + null, + null, + null, + null, + CancellationToken.None).ToArrayAsync().ConfigureAwait(true); + Assert.Equal(3, result.Count(x => x.forTest == "TestQueryEverything" && Math.Abs((x.timestamp - instant)?.TotalSeconds ?? double.MaxValue) < 0.5)); + }).ConfigureAwait(true); + + var result = await layer.QueryAsync( + string.Empty, + x => true, + null, + null, + null, + null, + CancellationToken.None).ToArrayAsync().ConfigureAwait(true); + Assert.Equal(3, result.Count(x => x.forTest == "TestQueryEverything" && Math.Abs((x.timestamp - instant)?.TotalSeconds ?? double.MaxValue) < 0.5)); + } + + [Fact] + public async Task TestDeletedEntityIsNotInCachedQueryEverything() + { + var instant = SystemClock.Instance.GetCurrentInstant(); + + var model = new TestDeletedEntityIsNotInCachedQueryEverything_Model + { + forTest = "TestDeletedEntityIsNotInCachedQueryEverything", + string1 = "test", + number1 = 10, + number2 = 20, + timestamp = instant, + }; + + var layer = _env.Services.GetRequiredService(); + var directLayer = _env.Services.GetRequiredService(); + var redis = _env.Services.GetRequiredService().GetDatabase(); + + await layer.CreateAsync(string.Empty, new[] { model }.ToAsyncEnumerable(), null, null, CancellationToken.None).FirstAsync().ConfigureAwait(true); + + var value = await redis.StringGetAsync(GetSimpleCacheKey(model.Key)).ConfigureAwait(true); + Assert.False(value.HasValue); + + await RedisLayerRepositoryLayerTests.HandleEventualConsistency(async () => + { + Assert.NotNull(await directLayer.LoadAsync(string.Empty, model.Key, null, null, CancellationToken.None).ConfigureAwait(true)); + }).ConfigureAwait(true); + + var countedEntities = await layer.QueryAsync( + string.Empty, + x => true, + null, + null, + null, + null, + CancellationToken.None) + .CountAsync(x => x.forTest == "TestDeletedEntityIsNotInCachedQueryEverything" && Math.Abs((x.timestamp - instant)?.TotalSeconds ?? double.MaxValue) < 0.5).ConfigureAwait(true); + Assert.Equal(1, countedEntities); + + Assert.NotNull(await layer.LoadAsync(string.Empty, model.Key, null, null, CancellationToken.None).ConfigureAwait(true)); + + value = await redis.StringGetAsync(GetSimpleCacheKey(model.Key)).ConfigureAwait(true); + Assert.True(value.HasValue); + + await layer.DeleteAsync(string.Empty, new[] { model }.ToAsyncEnumerable(), null, null, CancellationToken.None).ConfigureAwait(true); + + value = await redis.StringGetAsync(GetSimpleCacheKey(model.Key)).ConfigureAwait(true); + Assert.False(value.HasValue); + + countedEntities = await layer.QueryAsync( + string.Empty, + x => true, + null, + null, + null, + null, + CancellationToken.None) + .CountAsync(x => x.forTest == "TestDeletedEntityIsNotInCachedQueryEverything" && (x.timestamp - instant)?.TotalSeconds < 0.5).ConfigureAwait(true); + Assert.Equal(0, countedEntities); + } + } +} diff --git a/UET/Redpoint.CloudFramework.Tests/RedisTestModel.cs b/UET/Redpoint.CloudFramework.Tests/RedisTestModel.cs new file mode 100644 index 00000000..ab169b1f --- /dev/null +++ b/UET/Redpoint.CloudFramework.Tests/RedisTestModel.cs @@ -0,0 +1,39 @@ +using Google.Cloud.Datastore.V1; +using NodaTime; +using Redpoint.CloudFramework.Models; + +namespace Redpoint.CloudFramework.Tests +{ + [Kind("cf_redisTest")] + public class RedisTestModel : AttributedModel + { + [Type(FieldType.String), Indexed] + public string? forTest { get; set; } + + [Type(FieldType.String), Indexed] + public string? string1 { get; set; } + + [Type(FieldType.Integer), Indexed] + public long? number1 { get; set; } + + [Type(FieldType.Integer), Indexed] + public long? number2 { get; set; } + + [Type(FieldType.Timestamp), Indexed] + public Instant? timestamp { get; set; } + + [Type(FieldType.Key)] + public Key? keyValue { get; set; } + + public TestModel? untracked { get; set; } + + [Type(FieldType.String), Indexed] + protected string? protectedString1 { get; set; } + + [Type(FieldType.String), Indexed] + private string? privateString1 { get; set; } + + [Type(FieldType.String), Indexed] + internal string? internalString1 { get; set; } + } +} diff --git a/UET/Redpoint.CloudFramework.Tests/Redpoint.CloudFramework.Tests.csproj b/UET/Redpoint.CloudFramework.Tests/Redpoint.CloudFramework.Tests.csproj new file mode 100644 index 00000000..c934857b --- /dev/null +++ b/UET/Redpoint.CloudFramework.Tests/Redpoint.CloudFramework.Tests.csproj @@ -0,0 +1,9 @@ + + + + + + + + + diff --git a/UET/Redpoint.CloudFramework.Tests/SecretManagerTests.cs b/UET/Redpoint.CloudFramework.Tests/SecretManagerTests.cs new file mode 100644 index 00000000..524d5096 --- /dev/null +++ b/UET/Redpoint.CloudFramework.Tests/SecretManagerTests.cs @@ -0,0 +1,352 @@ +namespace Redpoint.CloudFramework.Tests +{ + using Microsoft.Extensions.DependencyInjection; + using System; + using System.Threading.Tasks; + using Xunit; + using Redpoint.CloudFramework.Configuration; + using Redpoint.CloudFramework.GoogleInfrastructure; + using Redpoint.CloudFramework.Tracing; + using Microsoft.Extensions.Hosting; + using Microsoft.Extensions.FileProviders; + using Google.Cloud.SecretManager.V1; + using Grpc.Core; + using Redpoint.CloudFramework.Startup; + using Google.Apis.Auth.OAuth2.Responses; + using Redpoint.Concurrency; + using Google.Protobuf; + using System.Diagnostics.CodeAnalysis; + using Microsoft.Extensions.Configuration; + using System.Runtime.InteropServices; + using System.Reflection; + + public class SecretManagerTests + { + private const int _pubSubWaitMilliseconds = 500; + private const int _pubSubWaitIteration = 120000 / _pubSubWaitMilliseconds; + + private class DummyHostEnvironment : IHostEnvironment + { + public string ApplicationName { get => throw new NotImplementedException(); set => throw new NotImplementedException(); } + public IFileProvider ContentRootFileProvider { get => throw new NotImplementedException(); set => throw new NotImplementedException(); } + public string ContentRootPath { get => throw new NotImplementedException(); set => throw new NotImplementedException(); } + public string EnvironmentName { get => "Production"; set => throw new NotImplementedException(); } + } + + private class RandomSecretManagerNotificationSuffixProvider : ISecretManagerNotificationSuffixProvider + { + [SuppressMessage("Security", "CA5394:Do not use insecure randomness", Justification = "Not used for security.")] + public RandomSecretManagerNotificationSuffixProvider() + { + Suffix = $"automation-{Random.Shared.NextInt64()}"; + } + + public string Suffix { get; private init; } + } + + private class IntegrationGoogleProjectIdProvider : IGoogleProjectIdProvider + { + public string ProjectId => "cloud-framework-unit-tests"; + } + + private static ServiceProvider CreateServiceProvider(bool isolatedNotificationManager = true) + { + var services = new ServiceCollection(); + services.AddSingleton(); + services.AddLogging(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSecretManagerConfiguration(true, "test-secret", isolatedNotificationManager); + services.AddHostedService(); + + var serviceProvider = services.BuildServiceProvider(); + + // Check that the execution environment has access to the test project, otherwise skip the test. + var googleServices = serviceProvider.GetRequiredService(); + try + { + var secretManager = googleServices.Build( + SecretManagerServiceClient.DefaultEndpoint, + SecretManagerServiceClient.DefaultScopes); + secretManager.ListSecrets(new ListSecretsRequest()); + } + catch (RpcException ex) when ( + ex.StatusCode == StatusCode.Unauthenticated || + (ex.StatusCode == StatusCode.Internal && ex.InnerException is TokenResponseException ter && ter.Error.Error == "invalid_grant")) + { + throw new SkipException("The execution environment does not have application default credentials to access the Google Cloud test project."); + } + catch (InvalidOperationException ex) when (ex.Message.Contains("GOOGLE_APPLICATION_CREDENTIALS", StringComparison.Ordinal)) + { + throw new SkipException("The execution environment does not have application default credentials to access the Google Cloud test project."); + } + catch (TargetInvocationException tex) when ( + tex.InnerException != null && + tex.InnerException.Message.Contains("GOOGLE_APPLICATION_CREDENTIALS", StringComparison.Ordinal)) + { + throw new SkipException("The execution environment does not have application default credentials to access the Google Cloud test project."); + } + return serviceProvider; + } + + [SkippableFact] + public void ConfigurationSourceBehaviour() + { + var sp = CreateServiceProvider(); + + var csb = sp.GetRequiredService(); + Assert.True(csb.RequireSuccessfulLoad); + } + + [SkippableFact] + public void TryGetSecret() + { + var sp = CreateServiceProvider(); + + var secretAccess = sp.GetRequiredService(); + Assert.NotNull(secretAccess.TryGetSecret("test-secret")); + } + + [SkippableFact] + public void TryGetLatestSecretVersion() + { + var sp = CreateServiceProvider(); + + var secretAccess = sp.GetRequiredService(); + var secret = secretAccess.TryGetSecret("test-secret"); + Assert.NotNull(secret); + var secretVersion = secretAccess.TryGetLatestSecretVersion(secret); + Assert.NotNull(secretVersion); + Assert.Equal(SecretVersion.Types.State.Enabled, secretVersion.State); + } + + [SkippableFact] + public void TryAccessSecretVersion() + { + var sp = CreateServiceProvider(); + + var secretAccess = sp.GetRequiredService(); + var secret = secretAccess.TryGetSecret("test-secret"); + Assert.NotNull(secret); + var secretVersion = secretAccess.TryGetLatestSecretVersion(secret); + Assert.NotNull(secretVersion); + Assert.Equal(SecretVersion.Types.State.Enabled, secretVersion.State); + var accessed = secretAccess.TryAccessSecretVersion(secretVersion); + Assert.NotNull(accessed); + } + + [SkippableFact] + public async Task TryGetLatestSecretVersionAsync() + { + var sp = CreateServiceProvider(); + + var secretAccess = sp.GetRequiredService(); + var secret = secretAccess.TryGetSecret("test-secret"); + Assert.NotNull(secret); + var secretVersion = await secretAccess.TryGetLatestSecretVersionAsync(secret).ConfigureAwait(false); + Assert.NotNull(secretVersion); + Assert.Equal(SecretVersion.Types.State.Enabled, secretVersion.State); + } + + [SkippableFact] + public async Task TryAccessSecretVersionAsync() + { + var sp = CreateServiceProvider(); + + var secretAccess = sp.GetRequiredService(); + var secret = secretAccess.TryGetSecret("test-secret"); + Assert.NotNull(secret); + var secretVersion = await secretAccess.TryGetLatestSecretVersionAsync(secret).ConfigureAwait(false); + Assert.NotNull(secretVersion); + Assert.Equal(SecretVersion.Types.State.Enabled, secretVersion.State); + var accessed = await secretAccess.TryAccessSecretVersionAsync(secretVersion).ConfigureAwait(false); + Assert.NotNull(accessed); + } + + [SkippableFact] + public async Task Subscribe() + { + await using (CreateServiceProvider().AsAsyncDisposable(out var sp).ConfigureAwait(false)) + { + var secretAccess = sp.GetRequiredService(); + var secret = secretAccess.TryGetSecret("test-secret"); + Assert.NotNull(secret); + + var secretNotifications = sp.GetRequiredService(); + await secretNotifications.SubscribeAsync(secret).ConfigureAwait(false); + + var notified = false; + secretNotifications.OnSecretUpdated.Add((secret, _) => + { + notified = true; + return Task.FromResult(Google.Cloud.PubSub.V1.SubscriberClient.Reply.Ack); + }); + + // Add a new secret version. + var newVersion = await secretAccess.SecretClient.AddSecretVersionAsync(new AddSecretVersionRequest + { + ParentAsSecretName = secret.SecretName, + Payload = new SecretPayload + { + Data = ByteString.CopyFromUtf8( + """ + { + "Hello": "World2", + } + """), + } + }).ConfigureAwait(false); + + // Destroy all old versions. + await foreach (var version in secretAccess.SecretClient.ListSecretVersionsAsync(new ListSecretVersionsRequest + { + Filter = "state:(ENABLED)", + ParentAsSecretName = secret.SecretName, + }).ConfigureAwait(false)) + { + if (version.SecretVersionName != newVersion.SecretVersionName) + { + await secretAccess.SecretClient.DestroySecretVersionAsync(new DestroySecretVersionRequest + { + SecretVersionName = version.SecretVersionName, + }).ConfigureAwait(false); + } + } + + // Give Pub/Sub some time to notify us. + for (int i = 0; i < _pubSubWaitIteration; i++) + { + if (notified) + { + break; + } + await Task.Delay(_pubSubWaitMilliseconds).ConfigureAwait(false); + } + Assert.True(notified); + } + } + + [SkippableFact] + [SuppressMessage("Security", "CA5394:Do not use insecure randomness", Justification = "Not used for security.")] + public async Task AutoRefreshingSecret() + { + await using (CreateServiceProvider().AsAsyncDisposable(out var sp).ConfigureAwait(false)) + { + var autoRefreshingFactory = sp.GetRequiredService(); + + await using (autoRefreshingFactory.Create("test-secret", true).AsAsyncDisposable(out var refreshingSecret).ConfigureAwait(false)) + { + Assert.NotNull(refreshingSecret); + Assert.IsType(refreshingSecret); + + var notified = false; + refreshingSecret.OnRefreshed = () => + { + notified = true; + }; + + var secretAccess = sp.GetRequiredService(); + var secret = secretAccess.TryGetSecret("test-secret"); + Assert.NotNull(secret); + + var generatedValue = $"{Random.Shared.NextInt64()}"; + + // Add a new secret version. + var newVersion = await secretAccess.SecretClient.AddSecretVersionAsync(new AddSecretVersionRequest + { + ParentAsSecretName = secret.SecretName, + Payload = new SecretPayload + { + Data = ByteString.CopyFromUtf8( + $$""" + { + "Hello": "World2", + "Test": "{{generatedValue}}", + } + """), + } + }).ConfigureAwait(false); + + // Destroy all old versions. + await foreach (var version in secretAccess.SecretClient.ListSecretVersionsAsync(new ListSecretVersionsRequest + { + Filter = "state:(ENABLED)", + ParentAsSecretName = secret.SecretName, + }).ConfigureAwait(false)) + { + if (version.SecretVersionName != newVersion.SecretVersionName) + { + await secretAccess.SecretClient.DestroySecretVersionAsync(new DestroySecretVersionRequest + { + SecretVersionName = version.SecretVersionName, + }).ConfigureAwait(false); + } + } + + // Wait for the auto-refreshing secret to be notified. + for (int i = 0; i < _pubSubWaitIteration; i++) + { + if (notified) + { + break; + } + await Task.Delay(_pubSubWaitMilliseconds).ConfigureAwait(false); + } + Assert.True(notified); + + // Ensure the auto-refreshing secret has the new value. + Assert.Equal(generatedValue, refreshingSecret.Data["Test"]); + } + } + } + + [SkippableFact] + public async Task SecretManagerConfigurationProvider() + { + await using (CreateServiceProvider().AsAsyncDisposable(out var sp).ConfigureAwait(false)) + { + var configurationSource = sp.GetRequiredService(); + var configurationProvider = configurationSource.Build(null! /* Not required for our implementation. */); + Assert.NotNull(configurationProvider); + + configurationProvider.Load(); + + var secretManagerConfigurationProvider = Assert.IsType(configurationProvider); + Assert.NotNull(secretManagerConfigurationProvider._autoRefreshingSecret); + Assert.IsType(secretManagerConfigurationProvider._autoRefreshingSecret); + + Assert.True(configurationProvider.TryGet("Hello", out var value)); + Assert.Equal("World2", value); + + await sp.GetRequiredService().UnsubscribeAllAsync().ConfigureAwait(false); + } + } + + [SkippableFact] + public async Task SecretManagerSubscriptionCleanupHostedService() + { + await using (CreateServiceProvider().AsAsyncDisposable(out var sp).ConfigureAwait(false)) + { + var hostedService = sp.GetServices() + .OfType() + .FirstOrDefault(); + Assert.NotNull(hostedService); + + await hostedService.StartAsync(CancellationToken.None).ConfigureAwait(false); + await hostedService.StopAsync(CancellationToken.None).ConfigureAwait(false); + } + } + + [SkippableFact] + public async Task ResolveNonIsolatedManager() + { + await using (CreateServiceProvider(false).AsAsyncDisposable(out var sp).ConfigureAwait(false)) + { + sp.GetRequiredService(); + } + } + } +} diff --git a/UET/Redpoint.CloudFramework.Tests/SelectFastTests.cs b/UET/Redpoint.CloudFramework.Tests/SelectFastTests.cs new file mode 100644 index 00000000..e4cdbb0a --- /dev/null +++ b/UET/Redpoint.CloudFramework.Tests/SelectFastTests.cs @@ -0,0 +1,26 @@ +namespace Redpoint.CloudFramework.Tests +{ + using Redpoint.Collections; + using System.Linq; + using System.Threading.Tasks; + using Xunit; + + public class SelectFastTests + { + [Fact] + public async Task SelectFast() + { + var inputs = new[] + { + 1, 2, 3, 4, 5, 6 + }; + + await inputs.ToAsyncEnumerable().SelectFastAwait(async input => + { + Assert.NotEqual(0, input); + await Task.Delay(input * 10).ConfigureAwait(true); + return input; + }).ToListAsync().ConfigureAwait(true); + } + } +} diff --git a/UET/Redpoint.CloudFramework.Tests/ShardedCounterTests.cs b/UET/Redpoint.CloudFramework.Tests/ShardedCounterTests.cs new file mode 100644 index 00000000..6bf5e168 --- /dev/null +++ b/UET/Redpoint.CloudFramework.Tests/ShardedCounterTests.cs @@ -0,0 +1,86 @@ +namespace Redpoint.CloudFramework.Tests +{ + using Grpc.Core; + using Microsoft.Extensions.DependencyInjection; + using Redpoint.CloudFramework.Counter; + using Redpoint.CloudFramework.Repository; + using System; + using System.Linq; + using System.Threading; + using System.Threading.Tasks; + using Xunit; + + [Collection("CloudFramework Test")] + public class ShardedCounterTests + { + private readonly CloudFrameworkTestEnvironment _env; + + public const int DefaultDelayMs = 0; + + public ShardedCounterTests(CloudFrameworkTestEnvironment env) + { + _env = env; + } + + class CounterContainer + { + public long Value { get; set; } + } + + [Fact] + public async Task TestShardedCounterBehavesCorrectlyUnderHighConcurrency() + { +#pragma warning disable CA5394 + var shardedCounters = _env.Services.GetRequiredService(); + var semaphore = new SemaphoreSlim(1); + + var counterContainer = new CounterContainer + { + Value = await shardedCounters.GetAsync("test-sharded-counter").ConfigureAwait(true) + }; + await Parallel.ForEachAsync(AsyncEnumerable.Range(0, 16), async (idx, ct) => + { + for (int i = 0; i < 20; i++) + { + var adjustAmount = Random.Shared.Next(-10, 10); + await semaphore.WaitAsync(ct).ConfigureAwait(true); + try + { + counterContainer.Value += adjustAmount; + } + finally + { + semaphore.Release(); + } + while (true) + { + try + { + await shardedCounters.AdjustAsync("test-sharded-counter", adjustAmount).ConfigureAwait(true); + break; + } + catch (RpcException ex) when (ex.IsContentionException()) + { + await Task.Delay(Random.Shared.Next(0, 5) * 200, ct).ConfigureAwait(true); + continue; + } + } + } + }).ConfigureAwait(true); + + // Wait for Datastore to settle. + for (int i = 0; i < 30; i++) + { + var value = await shardedCounters.GetAsync("test-sharded-counter").ConfigureAwait(true); + if (counterContainer.Value == value) + { + Assert.True(true); + return; + } + await Task.Delay(1000).ConfigureAwait(true); + } + Assert.Equal(counterContainer.Value, await shardedCounters.GetAsync("test-sharded-counter").ConfigureAwait(true)); +#pragma warning restore CA5394 + } + } +} diff --git a/UET/Redpoint.CloudFramework.Tests/StartContainersForRancher.bat b/UET/Redpoint.CloudFramework.Tests/StartContainersForRancher.bat new file mode 100644 index 00000000..dd20f5a6 --- /dev/null +++ b/UET/Redpoint.CloudFramework.Tests/StartContainersForRancher.bat @@ -0,0 +1,3 @@ +docker run --rm -d --name rcftest-datastore -p 61002:9000 gcr.io/google.com/cloudsdktool/cloud-sdk:latest gcloud beta emulators datastore start --host-port=0.0.0.0:9000 --no-store-on-disk --project=local-dev --consistency=1.0 +docker run --rm -d --name rcftest-pubsub -p 61001:9000 gcr.io/google.com/cloudsdktool/cloud-sdk:latest gcloud beta emulators pubsub start --host-port=0.0.0.0:9000 +docker run --rm -d --name rcftest-redis -p 61000:6379 redis:6.0.10 \ No newline at end of file diff --git a/UET/Redpoint.CloudFramework.Tests/StringEnumRepositoryLayerTests.cs b/UET/Redpoint.CloudFramework.Tests/StringEnumRepositoryLayerTests.cs new file mode 100644 index 00000000..5611cc0b --- /dev/null +++ b/UET/Redpoint.CloudFramework.Tests/StringEnumRepositoryLayerTests.cs @@ -0,0 +1,224 @@ +namespace Redpoint.CloudFramework.Tests +{ + using Google.Cloud.Datastore.V1; + using Microsoft.Extensions.DependencyInjection; + using NodaTime; + using Redpoint.CloudFramework.Models; + using Redpoint.CloudFramework.Repository.Layers; + using Redpoint.StringEnum; + using System; + using System.Collections.Generic; + using System.Linq; + using System.Threading.Tasks; + using Xunit; + using Xunit.Sdk; + + internal class TestStringEnum : StringEnum + { + public static readonly StringEnumValue A = Create("a"); + + public static readonly StringEnumValue B = Create("b"); + + public static readonly StringEnumValue C = Create("c"); + } + + [Kind("testString")] + internal class TestStringModel : AttributedModel + { + [Type(FieldType.String), Indexed, Default("a")] + public StringEnumValue enumValue { get; set; } = TestStringEnum.A; + +#pragma warning disable CA1861 // Avoid constant arrays as arguments + [Type(FieldType.StringArray), Indexed, Default(new[] { "a" })] +#pragma warning restore CA1861 // Avoid constant arrays as arguments + public IReadOnlyList> enumArrayValue { get; set; } = new[] { TestStringEnum.A }; + +#pragma warning disable CA1861 // Avoid constant arrays as arguments + [Type(FieldType.StringArray), Indexed, Default(new[] { "a" })] +#pragma warning restore CA1861 // Avoid constant arrays as arguments + public IReadOnlySet> enumSetValue { get; set; } = new HashSet>(new[] { TestStringEnum.A }); + + [Type(FieldType.Timestamp), Indexed] + public Instant? timestamp { get; set; } + } + + [Collection("CloudFramework Test")] + public class StringEnumRepositoryLayerTests + { + private readonly CloudFrameworkTestEnvironment _env; + + public StringEnumRepositoryLayerTests(CloudFrameworkTestEnvironment env) + { + _env = env; + } + + private static async Task HandleEventualConsistency(Func task) + { + for (int i = 0; i < 20; i++) + { + try + { + await task().ConfigureAwait(true); + return; + } + catch (XunitException) + { + await Task.Delay(100).ConfigureAwait(true); + } + } + + await task().ConfigureAwait(true); + } + + [Fact] + public async Task TestCreate() + { + var model = new TestStringModel(); + + Assert.Equal(TestStringEnum.A, model.enumValue); + + var layer = _env.Services.GetRequiredService(); + + await layer.CreateAsync(string.Empty, new[] { model }.ToAsyncEnumerable(), null, null, CancellationToken.None).FirstAsync().ConfigureAwait(true); + + Assert.NotNull(model.Key); + Assert.Equal(TestStringEnum.A, model.enumValue); + } + + [Fact] + public async Task TestCreateAndLoad() + { + var layer = _env.Services.GetRequiredService(); + + Key modelKey; + { + var instant = SystemClock.Instance.GetCurrentInstant(); + var model = new TestStringModel + { + enumValue = TestStringEnum.B, + enumArrayValue = new[] { TestStringEnum.B, TestStringEnum.C }, + enumSetValue = new HashSet> { TestStringEnum.B, TestStringEnum.C }, + timestamp = instant, + }; + await layer.CreateAsync(string.Empty, new[] { model }.ToAsyncEnumerable(), null, null, CancellationToken.None).FirstAsync().ConfigureAwait(true); + modelKey = model.Key; + } + + { + await HandleEventualConsistency(async () => + { + Assert.NotNull(await layer.LoadAsync(string.Empty, modelKey, null, null, CancellationToken.None).ConfigureAwait(true)); + }).ConfigureAwait(true); + var model = await layer.LoadAsync(string.Empty, modelKey, null, null, CancellationToken.None).ConfigureAwait(true); + Assert.NotNull(model); + Assert.Equal(TestStringEnum.B, model.enumValue); + Assert.Equal(new[] { TestStringEnum.B, TestStringEnum.C }, model.enumArrayValue); + Assert.Equal(new[] { TestStringEnum.B, TestStringEnum.C }, model.enumSetValue); + } + } + + [Fact] + public async Task TestQueryOnEnumValue() + { + var layer = _env.Services.GetRequiredService(); + + var instant = SystemClock.Instance.GetCurrentInstant(); + { + var model = new TestStringModel + { + enumValue = TestStringEnum.B, + timestamp = instant, + }; + await layer.CreateAsync(string.Empty, new[] { model }.ToAsyncEnumerable(), null, null, CancellationToken.None).FirstAsync().ConfigureAwait(true); + } + + { + await HandleEventualConsistency(async () => + { + Assert.NotNull(await layer.QueryAsync( + string.Empty, + x => x.timestamp == instant && x.enumValue == TestStringEnum.B, + null, + null, + null, + null, + CancellationToken.None).FirstOrDefaultAsync().ConfigureAwait(true)); + }).ConfigureAwait(true); + + var model = await layer.QueryAsync( + string.Empty, + x => x.timestamp == instant && x.enumValue == TestStringEnum.B, + null, + null, + null, + null, + CancellationToken.None).FirstOrDefaultAsync().ConfigureAwait(true); + Assert.NotNull(model); + } + } + + [Fact] + public async Task TestLoadIntoCache() + { + var datastore = _env.Services.GetRequiredService(); + var redisCache = _env.Services.GetRequiredService(); + + Key modelKey; + { + var instant = SystemClock.Instance.GetCurrentInstant(); + var model = new TestStringModel + { + enumValue = TestStringEnum.B, + enumArrayValue = new[] { TestStringEnum.B, TestStringEnum.C }, + timestamp = instant, + }; + await datastore.CreateAsync(string.Empty, new[] { model }.ToAsyncEnumerable(), null, null, CancellationToken.None).FirstAsync().ConfigureAwait(true); + modelKey = model.Key; + } + + { + await HandleEventualConsistency(async () => + { + Assert.NotNull(await datastore.LoadAsync(string.Empty, modelKey, null, null, CancellationToken.None).ConfigureAwait(true)); + }).ConfigureAwait(true); + var model = await redisCache.LoadAsync(string.Empty, modelKey, null, null, CancellationToken.None).ConfigureAwait(true); + Assert.NotNull(model); + Assert.Equal(TestStringEnum.B, model.enumValue); + Assert.Equal(new[] { TestStringEnum.B, TestStringEnum.C }, model.enumArrayValue); + } + } + + [Fact] + public async Task TestRoundtripCache() + { + var datastore = _env.Services.GetRequiredService(); + var redisCache = _env.Services.GetRequiredService(); + + Key modelKey; + { + var instant = SystemClock.Instance.GetCurrentInstant(); + var model = new TestStringModel + { + enumValue = TestStringEnum.B, + enumArrayValue = new[] { TestStringEnum.B, TestStringEnum.C }, + enumSetValue = new HashSet> { TestStringEnum.B, TestStringEnum.C }, + timestamp = instant, + }; + await redisCache.CreateAsync(string.Empty, new[] { model }.ToAsyncEnumerable(), null, null, CancellationToken.None).FirstAsync().ConfigureAwait(true); + modelKey = model.Key; + } + + { + await HandleEventualConsistency(async () => + { + Assert.NotNull(await datastore.LoadAsync(string.Empty, modelKey, null, null, CancellationToken.None).ConfigureAwait(true)); + }).ConfigureAwait(true); + var model = await redisCache.LoadAsync(string.Empty, modelKey, null, null, CancellationToken.None).ConfigureAwait(true); + Assert.NotNull(model); + Assert.Equal(TestStringEnum.B, model.enumValue); + Assert.Equal(new[] { TestStringEnum.B, TestStringEnum.C }, model.enumArrayValue); + Assert.Equal(new[] { TestStringEnum.B, TestStringEnum.C }, model.enumSetValue); + } + } + } +} diff --git a/UET/Redpoint.CloudFramework.Tests/TestModel.cs b/UET/Redpoint.CloudFramework.Tests/TestModel.cs new file mode 100644 index 00000000..9265839c --- /dev/null +++ b/UET/Redpoint.CloudFramework.Tests/TestModel.cs @@ -0,0 +1,46 @@ +using Google.Cloud.Datastore.V1; +using NodaTime; +using Redpoint.CloudFramework.Models; + +namespace Redpoint.CloudFramework.Tests +{ + // This ensures the tests pass when using properties on base classes. + + [Kind("cf_testModel")] + public class TestModel : TestBaseModel + { + } + + [Kind("cf_testBaseModel")] + public class TestBaseModel : AttributedModel + { + [Type(FieldType.String), Indexed] + public string? forTest { get; set; } + + [Type(FieldType.String), Indexed] + public string? string1 { get; set; } + + [Type(FieldType.Integer), Indexed] + public long? number1 { get; set; } + + [Type(FieldType.Integer), Indexed] + public long? number2 { get; set; } + + [Type(FieldType.Timestamp), Indexed] + public Instant? timestamp { get; set; } + + [Type(FieldType.Key)] + public Key? keyValue { get; set; } + + public TestModel? untracked { get; set; } + + [Type(FieldType.String), Indexed] + protected string? protectedString1 { get; set; } + + [Type(FieldType.String), Indexed] + private string? privateString1 { get; set; } + + [Type(FieldType.String), Indexed] + internal string? internalString1 { get; set; } + } +} diff --git a/UET/Redpoint.CloudFramework.Tests/containers/datastore.Dockerfile b/UET/Redpoint.CloudFramework.Tests/containers/datastore.Dockerfile new file mode 100644 index 00000000..71fb1e30 --- /dev/null +++ b/UET/Redpoint.CloudFramework.Tests/containers/datastore.Dockerfile @@ -0,0 +1,12 @@ +FROM gcr.io/google.com/cloudsdktool/cloud-sdk:latest + +EXPOSE 9001 +ENV CLOUDSDK_CORE_PROJECT local-dev + +# DO NOT UNDER ANY CIRCUMSTANCES ADD THE --use-firestore-in-datastore-mode FLAG +# +# This flag does not properly emulate "Firestore in Datastore mode" and *actively* breaks +# transaction integrity in the emulator, leading to transaction commits going through +# when they should fail with contention errors. +# +ENTRYPOINT [ "gcloud", "beta", "emulators", "datastore", "start", "--host-port=0.0.0.0:9001", "--no-store-on-disk", "--consistency=1.0" ] \ No newline at end of file diff --git a/UET/Redpoint.CloudFramework.Tests/containers/pubsub.Dockerfile b/UET/Redpoint.CloudFramework.Tests/containers/pubsub.Dockerfile new file mode 100644 index 00000000..78dc0d6e --- /dev/null +++ b/UET/Redpoint.CloudFramework.Tests/containers/pubsub.Dockerfile @@ -0,0 +1,6 @@ +FROM gcr.io/google.com/cloudsdktool/cloud-sdk:latest + +EXPOSE 9000 +ENV CLOUDSDK_CORE_PROJECT local-dev + +ENTRYPOINT [ "gcloud", "beta", "emulators", "pubsub", "start", "--host-port=0.0.0.0:9000" ] \ No newline at end of file diff --git a/UET/Redpoint.CloudFramework/BigQuery/BigQueryResultsInfo.cs b/UET/Redpoint.CloudFramework/BigQuery/BigQueryResultsInfo.cs new file mode 100644 index 00000000..1be7043f --- /dev/null +++ b/UET/Redpoint.CloudFramework/BigQuery/BigQueryResultsInfo.cs @@ -0,0 +1,11 @@ +namespace Redpoint.CloudFramework.BigQuery +{ + using Google.Cloud.BigQuery.V2; + + public class BigQueryResultsInfo + { + public required BigQueryResults Results { get; init; } + + public required BigQueryJob Job { get; init; } + } +} diff --git a/UET/Redpoint.CloudFramework/BigQuery/DefaultBigQuery.cs b/UET/Redpoint.CloudFramework/BigQuery/DefaultBigQuery.cs new file mode 100644 index 00000000..7df71b8b --- /dev/null +++ b/UET/Redpoint.CloudFramework/BigQuery/DefaultBigQuery.cs @@ -0,0 +1,374 @@ +namespace Redpoint.CloudFramework.BigQuery +{ + using Google; + using Google.Apis.Bigquery.v2; + using Google.Apis.Bigquery.v2.Data; + using Google.Cloud.BigQuery.V2; + using Google.Cloud.Datastore.V1; + using Microsoft.Extensions.Caching.Memory; + using Redpoint.CloudFramework.GoogleInfrastructure; + using Redpoint.CloudFramework.Prefix; + using System; + using System.Collections.Generic; + using System.Linq; + using System.Text; + using System.Threading.Tasks; + + public class DefaultBigQuery : IBigQuery + { + private readonly IMemoryCache _memoryCache; + private readonly IGlobalPrefix _globalPrefix; + private readonly BigQueryClient _client; + private readonly Dictionary _knownSchemata; + private readonly Dictionary _schemataHasExpiry; + private static readonly object _bigQuerySchemaLock = new object(); + + private MemoryCacheEntryOptions _memoryCacheOptions = + new MemoryCacheEntryOptions() + .SetAbsoluteExpiration(TimeSpan.FromSeconds(30)); + + public DefaultBigQuery( + IMemoryCache memoryCache, + IGlobalPrefix globalPrefix, + IGoogleServices googleServices) + { + _memoryCache = memoryCache; + _globalPrefix = globalPrefix; + + ArgumentNullException.ThrowIfNull(googleServices); + + _client = googleServices.BuildRest( + new[] + { + BigqueryService.Scope.Bigquery, + BigqueryService.Scope.BigqueryInsertdata, + BigqueryService.Scope.DevstorageFullControl, + BigqueryService.Scope.CloudPlatform + }); + _knownSchemata = new Dictionary(); + _schemataHasExpiry = new Dictionary(); + } + + public Key PublicDatasetKey + { + get + { + var k = new Key(); + k.Path.Add(new Key.Types.PathElement("PublicDataset", "public")); + return k; + } + } + + public BigQueryClient GetBigQueryClient() + { + return _client; + } + + public void DeclareSchemaForTable(string table, int tableVersion, bool hasAutomaticExpiration, TableSchema schema) + { + lock (_bigQuerySchemaLock) + { + _knownSchemata[table + "_v" + tableVersion] = schema; + _schemataHasExpiry[table + "_v" + tableVersion] = hasAutomaticExpiration; + } + } + + public string GetDatasetNameForProject(Key projectKey) + { + ArgumentNullException.ThrowIfNull(projectKey); + + if (projectKey.Equals(PublicDatasetKey)) + { + return "public"; + } + + return _globalPrefix.Create(projectKey).Replace('-', '_'); + } + + public string GetTableNameFromTableAndVersion(string table, int tableVersion) + { + return table + "_v" + tableVersion; + } + + public async Task GetWritableTableForProject(Key projectKey, string table, int tableVersion) + { + ArgumentNullException.ThrowIfNull(projectKey); + + if (projectKey.Equals(PublicDatasetKey)) + { + return await GetWritableTableForProject("public", table, tableVersion).ConfigureAwait(false); + } + + return await GetWritableTableForProject(_globalPrefix.Create(projectKey), table, tableVersion).ConfigureAwait(false); + } + + public async Task GetWritableTableForProject(string projectId, string table, int tableVersion) + { + ArgumentException.ThrowIfNullOrEmpty(projectId); + + var datasetName = projectId.Replace('-', '_'); + var tableName = table + "_v" + tableVersion; + var cacheName = datasetName + "." + tableName; + if (!_memoryCache.TryGetValue(cacheName, out BigQueryTable? tableRef)) + { + var dataset = await _client.GetOrCreateDatasetAsync(datasetName).ConfigureAwait(false); + var schema = _knownSchemata[tableName]; + try + { + var tableToCreate = new Table + { + TimePartitioning = TimePartition.CreateDailyPartitioning(_schemataHasExpiry[tableName] ? TimeSpan.FromDays(30) : (TimeSpan?)null), + Schema = schema + }; + tableRef = await _client.GetOrCreateTableAsync( + datasetName, + tableName, + tableToCreate).ConfigureAwait(false); + } + catch (GoogleApiException gae) when (gae.Message.Contains("Already Exists: Table", StringComparison.InvariantCultureIgnoreCase)) + { + // Race condition with another thread creating the exact same table - retry with a get request. + tableRef = await _client.GetTableAsync( + datasetName, + tableName).ConfigureAwait(false); + } + + tableRef = await CheckTableRefMatchesSchemaAndUpdateIfNecessary(datasetName, tableName, tableRef, schema).ConfigureAwait(false); + + _memoryCache.Set(cacheName, tableRef, _memoryCacheOptions); + } + + return tableRef!; + } + + private async Task CheckTableRefMatchesSchemaAndUpdateIfNecessary(string datasetName, string tableName, BigQueryTable tableRef, TableSchema schema) + { + if (!FieldListIsEqual(tableRef.Schema.Fields, schema.Fields)) + { + tableRef.Resource.Schema = schema; + + return await _client.PatchTableAsync( + datasetName, + tableName, + tableRef.Resource, + new PatchTableOptions + { + }).ConfigureAwait(false); + } + + return tableRef; + } + + private static bool FieldListIsEqual(IList old, IList @new) + { + if (old == null && @new == null) + { + return true; + } + if (old == null || @new == null) + { + return false; + } + if (old.Count != @new.Count) + { + return false; + } + + var oldByKv = old.ToDictionary(k => k.Name, v => v); + var newByKv = @new.ToDictionary(k => k.Name, v => v); + + foreach (var kv in oldByKv) + { + if (!newByKv.ContainsKey(kv.Key)) + { + return false; + } + } + + foreach (var kv in newByKv) + { + if (!oldByKv.TryGetValue(kv.Key, out TableFieldSchema? value)) + { + return false; + } + if (value.Name != kv.Value.Name) + { + return false; + } + if (value.Description != kv.Value.Description) + { + return false; + } + if (value.Mode != kv.Value.Mode) + { + return false; + } + if (!FieldListIsEqual(value.Fields, kv.Value.Fields)) + { + return false; + } + } + + return true; + } + + public async Task GetReadableTableForProject(Key projectKey, string table, int tableVersion) + { + ArgumentNullException.ThrowIfNull(projectKey); + + if (projectKey.Equals(PublicDatasetKey)) + { + return await GetReadableTableForProject("public", table, tableVersion).ConfigureAwait(false); + } + + return await GetReadableTableForProject(_globalPrefix.Create(projectKey), table, tableVersion).ConfigureAwait(false); + } + + public async Task GetReadableTableForProject(string projectId, string table, int tableVersion) + { + ArgumentException.ThrowIfNullOrEmpty(projectId); + + var datasetName = projectId.Replace('-', '_'); + var tableName = table + "_v" + tableVersion; + var cacheName = datasetName + "." + tableName; + if (!_memoryCache.TryGetValue(cacheName, out BigQueryTable? tableRef)) + { + try + { + tableRef = await _client.GetTableAsync(datasetName, tableName).ConfigureAwait(false); + } + catch (Exception) + { + // Table might not exist, return null. + return null; + } + + var schema = _knownSchemata[tableName]; + tableRef = await CheckTableRefMatchesSchemaAndUpdateIfNecessary(datasetName, tableName, tableRef, schema).ConfigureAwait(false); + + _memoryCache.Set(cacheName, tableRef, _memoryCacheOptions); + } + + return tableRef; + } + + public Task ExecuteLegacyQuery(Key projectKey, string query) + { + return ExecuteLegacyQuery(projectKey, query, false); + } + + public async Task ExecuteLegacyQuery(Key projectKey, string query, bool disableCache) + { + var results = await _client.ExecuteQueryAsync(query, null, new QueryOptions + { + UseLegacySql = true, + UseQueryCache = !disableCache, + }).ConfigureAwait(false); + + var jobInfo = await _client.GetJobAsync(results.JobReference.JobId).ConfigureAwait(false); + + results.ThrowOnAnyError(); + + return new BigQueryResultsInfo + { + Results = results, + Job = jobInfo, + }; + } + + public Task ExecuteStandardQuery(Key projectKey, string query, params BigQueryParameter[] parameters) + { + return ExecuteStandardQuery(projectKey, query, false, parameters); + } + + public async Task ExecuteStandardQuery(Key projectKey, string query, bool disableCache, params BigQueryParameter[] parameters) + { + var results = await _client.ExecuteQueryAsync(query, parameters, new QueryOptions + { + UseLegacySql = false, + UseQueryCache = !disableCache, + }).ConfigureAwait(false); + + var jobInfo = await _client.GetJobAsync(results.JobReference.JobId).ConfigureAwait(false); + + results.ThrowOnAnyError(); + + return new BigQueryResultsInfo + { + Results = results, + Job = jobInfo, + }; + } + + public string EscapeLegacyString(string str) + { + return Encoding.ASCII.GetBytes(str).Select(x => "\\x" + BitConverter.ToString(new[] { x }).ToLowerInvariant()).Aggregate((a, b) => a + b); + } + + public async Task DeleteTableForProject(Key projectKey, string table, int tableVersion) + { + ArgumentNullException.ThrowIfNull(projectKey); + + var projectId = projectKey.Equals(PublicDatasetKey) ? "public" : _globalPrefix.Create(projectKey); + var datasetName = projectId.Replace('-', '_'); + var tableName = table + "_v" + tableVersion; + var cacheName = datasetName + "." + tableName; + if (!_memoryCache.TryGetValue(cacheName, out BigQueryTable? tableRef)) + { + try + { + tableRef = await _client.GetTableAsync(datasetName, tableName).ConfigureAwait(false); + } + catch (Exception) + { + // Table might not exist, ignore. + return; + } + + _memoryCache.Set(cacheName, tableRef); + } + if (tableRef == null) + { + // Table might not exist, ignore. + return; + } + + try + { + await _client.DeleteTableAsync(tableRef.Reference).ConfigureAwait(false); + } + catch (Exception ex) + { + if (ex.Message.Contains("Not found: Table", StringComparison.InvariantCultureIgnoreCase)) + { + // Table already doesn't exist. + } + else + { + throw; + } + } + + _memoryCache.Remove(cacheName); + } + + public async Task GetTableExistsForProject(Key projectKey, string table, int? tableVersion) + { + ArgumentNullException.ThrowIfNull(projectKey); + + var projectId = projectKey.Equals(PublicDatasetKey) ? "public" : _globalPrefix.Create(projectKey); + var datasetName = projectId.Replace('-', '_'); + var tableName = tableVersion == null ? table : (table + "_v" + tableVersion); + BigQueryTable tableRef; + try + { + tableRef = await _client.GetTableAsync(datasetName, tableName).ConfigureAwait(false); + return true; + } + catch (Exception) + { + return false; + } + } + } +} diff --git a/UET/Redpoint.CloudFramework/BigQuery/IBigQuery.cs b/UET/Redpoint.CloudFramework/BigQuery/IBigQuery.cs new file mode 100644 index 00000000..071d61c0 --- /dev/null +++ b/UET/Redpoint.CloudFramework/BigQuery/IBigQuery.cs @@ -0,0 +1,27 @@ +namespace Redpoint.CloudFramework.BigQuery +{ + using System.Threading.Tasks; + using Google.Apis.Bigquery.v2.Data; + using Google.Cloud.BigQuery.V2; + using Google.Cloud.Datastore.V1; + + public interface IBigQuery + { + Key PublicDatasetKey { get; } + BigQueryClient GetBigQueryClient(); + void DeclareSchemaForTable(string table, int tableVersion, bool hasAutomaticExpiration, TableSchema schema); + Task GetReadableTableForProject(Key projectKey, string table, int tableVersion); + Task GetReadableTableForProject(string projectId, string table, int tableVersion); + Task GetWritableTableForProject(Key projectKey, string table, int tableVersion); + Task GetWritableTableForProject(string projectId, string table, int tableVersion); + Task ExecuteLegacyQuery(Key projectKey, string query); + Task ExecuteLegacyQuery(Key projectKey, string query, bool disableCache); + Task ExecuteStandardQuery(Key projectKey, string query, params BigQueryParameter[] parameters); + Task ExecuteStandardQuery(Key projectKey, string query, bool disableCache, params BigQueryParameter[] parameters); + string EscapeLegacyString(string str); + Task DeleteTableForProject(Key projectKey, string table, int tableVersion); + Task GetTableExistsForProject(Key projectKey, string table, int? tableVersion); + string GetDatasetNameForProject(Key projectKey); + string GetTableNameFromTableAndVersion(string table, int tableVersion); + } +} diff --git a/UET/Redpoint.CloudFramework/Cache/RetryableRedisCache.cs b/UET/Redpoint.CloudFramework/Cache/RetryableRedisCache.cs new file mode 100644 index 00000000..519b2c17 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Cache/RetryableRedisCache.cs @@ -0,0 +1,268 @@ +namespace Redpoint.CloudFramework.Cache +{ + using Microsoft.Extensions.Caching.Distributed; + using Microsoft.Extensions.Caching.StackExchangeRedis; + using Microsoft.Extensions.Options; + using StackExchange.Redis; + using System; + using System.Threading; + using System.Threading.Tasks; + + public sealed class RetryableRedisCache : IDistributedCache, IDisposable + { + private readonly RedisCache _cache; + private const int _retryCount = 50; + + public RetryableRedisCache(IOptions redisOptions) + { + _cache = new RedisCache(redisOptions); + } + + public void Dispose() + { + _cache.Dispose(); + } + + public byte[]? Get(string key) + { + return RetryableRedisCache.Retry(() => _cache.Get(key)); + } + + public Task GetAsync(string key) + { + return RetryableRedisCache.RetryAsync(() => _cache.GetAsync(key)); + } + + public void Refresh(string key) + { + RetryableRedisCache.Retry(() => _cache.Refresh(key)); + } + + public Task RefreshAsync(string key) + { + return RetryableRedisCache.RetryAsync(() => _cache.RefreshAsync(key)); + } + + public void Remove(string key) + { + RetryableRedisCache.Retry(() => _cache.Remove(key)); + } + + public Task RemoveAsync(string key) + { + return RetryableRedisCache.RetryAsync(() => _cache.RemoveAsync(key)); + } + + public void Set(string key, byte[] value, DistributedCacheEntryOptions options) + { + RetryableRedisCache.Retry(() => _cache.Set(key, value, options)); + } + + public Task SetAsync(string key, byte[] value, DistributedCacheEntryOptions options) + { + return RetryableRedisCache.RetryAsync(() => _cache.SetAsync(key, value, options)); + } + + public Task GetAsync(string key, CancellationToken token = default(CancellationToken)) + { + return RetryableRedisCache.RetryAsync(() => _cache.GetAsync(key, token)); + } + + public Task SetAsync(string key, byte[] value, DistributedCacheEntryOptions options, CancellationToken token = default(CancellationToken)) + { + return RetryableRedisCache.RetryAsync(() => _cache.SetAsync(key, value, options, token)); + } + + public Task RefreshAsync(string key, CancellationToken token = default(CancellationToken)) + { + return RetryableRedisCache.RetryAsync(() => _cache.RefreshAsync(key, token)); + } + + public Task RemoveAsync(string key, CancellationToken token = default(CancellationToken)) + { + return RetryableRedisCache.RetryAsync(() => _cache.RemoveAsync(key, token)); + } + + #region Retry Logic + + private static void Retry(Action func) + { + for (var i = 0; i < _retryCount; i++) + { + try + { + func(); + return; + } + catch (NullReferenceException) + { + // Bug in Microsoft.Extensions.Caching.Redis! See https://github.com/aspnet/Caching/issues/270. + if (i == _retryCount - 1) + { + throw; + } + + Thread.Sleep(i * 100); + continue; + } + catch (TimeoutException) + { + if (i == _retryCount - 1) + { + throw; + } + + Thread.Sleep(i * 100); + continue; + } + catch (RedisConnectionException ex) when (ex.Message.Contains("BUSY", StringComparison.Ordinal)) + { + if (i == _retryCount - 1) + { + throw; + } + + Thread.Sleep(i * 100); + continue; + } + } + + throw new InvalidOperationException("Should not be able to escape for loop in Retry() of RetryableRedisCache"); + } + + private static T Retry(Func func) + { + for (var i = 0; i < _retryCount; i++) + { + try + { + return func(); + } + catch (NullReferenceException) + { + // Bug in Microsoft.Extensions.Caching.Redis! See https://github.com/aspnet/Caching/issues/270. + if (i == _retryCount - 1) + { + throw; + } + + Thread.Sleep(i * 100); + continue; + } + catch (TimeoutException) + { + if (i == _retryCount - 1) + { + throw; + } + + Thread.Sleep(i * 100); + continue; + } + catch (RedisConnectionException ex) when (ex.Message.Contains("BUSY", StringComparison.Ordinal)) + { + if (i == _retryCount - 1) + { + throw; + } + + Thread.Sleep(i * 100); + continue; + } + } + + throw new InvalidOperationException("Should not be able to escape for loop in Retry() of RetryableRedisCache"); + } + + private static async Task RetryAsync(Func func) + { + for (var i = 0; i < _retryCount; i++) + { + try + { + await func().ConfigureAwait(false); + return; + } + catch (NullReferenceException) + { + // Bug in Microsoft.Extensions.Caching.Redis! See https://github.com/aspnet/Caching/issues/270. + if (i == _retryCount - 1) + { + throw; + } + + await Task.Delay(i * 100).ConfigureAwait(false); + continue; + } + catch (TimeoutException) + { + if (i == _retryCount - 1) + { + throw; + } + + await Task.Delay(i * 100).ConfigureAwait(false); + continue; + } + catch (RedisConnectionException ex) when (ex.Message.Contains("BUSY", StringComparison.Ordinal)) + { + if (i == _retryCount - 1) + { + throw; + } + + await Task.Delay(i * 100).ConfigureAwait(false); + continue; + } + } + + throw new InvalidOperationException("Should not be able to escape for loop in RetryAsync() of RetryableRedisCache"); + } + + private static async Task RetryAsync(Func> func) + { + for (var i = 0; i < _retryCount; i++) + { + try + { + return await func().ConfigureAwait(false); + } + catch (NullReferenceException) + { + // Bug in Microsoft.Extensions.Caching.Redis! See https://github.com/aspnet/Caching/issues/270. + if (i == _retryCount - 1) + { + throw; + } + + await Task.Delay(i * 100).ConfigureAwait(false); + continue; + } + catch (TimeoutException) + { + if (i == _retryCount - 1) + { + throw; + } + + await Task.Delay(i * 100).ConfigureAwait(false); + continue; + } + catch (RedisConnectionException ex) when (ex.Message.Contains("BUSY", StringComparison.Ordinal)) + { + if (i == _retryCount - 1) + { + throw; + } + + await Task.Delay(i * 100).ConfigureAwait(false); + continue; + } + } + + throw new InvalidOperationException("Should not be able to escape for loop in RetryAsync() of RetryableRedisCache"); + } + + #endregion + } +} diff --git a/UET/Redpoint.CloudFramework/CloudFramework.cs b/UET/Redpoint.CloudFramework/CloudFramework.cs new file mode 100644 index 00000000..61d6428e --- /dev/null +++ b/UET/Redpoint.CloudFramework/CloudFramework.cs @@ -0,0 +1,31 @@ +using System.Runtime.CompilerServices; + +[assembly: InternalsVisibleTo("Redpoint.CloudFramework.Tests")] +[assembly: InternalsVisibleTo("Redpoint.CloudFramework.Tests.Shared")] + +namespace Redpoint.CloudFramework +{ + using Redpoint.CloudFramework.Startup; + +#pragma warning disable CA1724 + public static class CloudFramework +#pragma warning restore CA1724 + { + /// + /// Build a web application. + /// + public readonly static IWebAppConfigurator WebApp = new DefaultWebAppConfigurator(); + + /// + /// Build a service application, which runs in the background and processes either event-based or timing-based tasks. + /// + public readonly static IServiceAppConfigurator ServiceApp = new DefaultServiceAppConfigurator(); + +#if ENABLE_UNSUPPORTED + /// + /// Build an interactive console application. Used for command line tools that need to interact with your main app. + /// + public readonly static IInteractiveConsoleAppConfigurator InteractiveConsoleApp = new InteractiveConsoleAppConfigurator(); +#endif + } +} diff --git a/UET/Redpoint.CloudFramework/Collections/Batching/BatchedAsyncEnumerableExtensions.cs b/UET/Redpoint.CloudFramework/Collections/Batching/BatchedAsyncEnumerableExtensions.cs new file mode 100644 index 00000000..d105932e --- /dev/null +++ b/UET/Redpoint.CloudFramework/Collections/Batching/BatchedAsyncEnumerableExtensions.cs @@ -0,0 +1,191 @@ +namespace Redpoint.CloudFramework.Collections.Batching +{ + using System; + using System.Collections.Generic; + using System.Threading; + + public static class BatchedAsyncEnumerableExtensions + { + /// + /// Wraps the value as an asynchronous enumerable of a batch with a single entry. + /// + public static IBatchedAsyncEnumerable AsSingleBatchedAsyncEnumerable(TValue value) + { + return new WrappedBatchedAsyncEnumerable(new[] + { + new TValue[1] { value } + }.ToAsyncEnumerable()); + } + + /// + /// Wraps the asynchronous enumerable of lists as a batched asynchronous enumerable. + /// + /// The value type inside each batch. + /// The asynchronous enumerable that emits batches of values. + /// A new batched asynchronous enumerable. + public static IBatchedAsyncEnumerable AsBatchedAsyncEnumerable(this IAsyncEnumerable> batches) + { + return new WrappedBatchedAsyncEnumerable(batches); + } + + // - cancellation token + // - single key + // - first join + public static IBatchedAsyncEnumerable JoinByDistinctKeyAwait( + this IBatchedAsyncEnumerable enumerable, + Func keySelector, + Func, CancellationToken, IAsyncEnumerable>> joiner) where TKey : notnull + { + return new BindingBatchedAsyncEnumerable( + enumerable, + () => new KeyDistinctBatchAsyncOperation( + keySelector, + joiner), + (value, fetchedValue, _) => fetchedValue); + } + + // - cancellation token + // - multiple key + // - first join + public static IBatchedAsyncEnumerable> JoinByDistinctKeyListAwait( + this IBatchedAsyncEnumerable enumerable, + Func> keySelector, + Func, CancellationToken, IAsyncEnumerable>> joiner) where TKey : notnull + { + return new BindingBatchedAsyncEnumerable>( + enumerable, + () => new KeyListDistinctBatchAsyncOperation( + keySelector, + joiner), + (value, fetchedValue, _) => fetchedValue); + } + + // - no cancellation token + // - single key + // - first join + public static IBatchedAsyncEnumerable JoinByDistinctKeyAwait( + this IBatchedAsyncEnumerable enumerable, + Func keySelector, + Func, IAsyncEnumerable>> joiner) where TKey : notnull + => JoinByDistinctKeyAwait( + enumerable, + keySelector, + (values, _) => joiner(values)); + + // - no cancellation token + // - multiple key + // - first join + public static IBatchedAsyncEnumerable> JoinByDistinctKeyListAwait( + this IBatchedAsyncEnumerable enumerable, + Func> keySelector, + Func, IAsyncEnumerable>> joiner) where TKey : notnull + => JoinByDistinctKeyListAwait( + enumerable, + keySelector, + (values, _) => joiner(values)); + + // - cancellation token + // - single key + // - subsequent join + public static IBatchedAsyncEnumerable JoinByDistinctKeyAwait( + this IBatchedAsyncEnumerable enumerable, + Func keySelector, + Func, CancellationToken, IAsyncEnumerable>> joiner, + Func binder) where TKey : notnull where TAggregateRelated : notnull + { + return new BindingBatchedAsyncEnumerable( + enumerable, + () => new KeyDistinctBatchAsyncOperation( + keySelector, + joiner), + (value, fetchedValue, parentValue) => binder((TExistingRelated)parentValue!, (TRelated?)fetchedValue)); + } + + // - cancellation token + // - multiple key + // - subsequent join + public static IBatchedAsyncEnumerable JoinByDistinctKeyListAwait( + this IBatchedAsyncEnumerable enumerable, + Func> keySelector, + Func, CancellationToken, IAsyncEnumerable>> joiner, + Func, TAggregateRelated> binder) where TKey : notnull where TAggregateRelated : notnull + { + return new BindingBatchedAsyncEnumerable( + enumerable, + () => new KeyListDistinctBatchAsyncOperation( + keySelector, + joiner), + (value, fetchedValue, parentValue) => binder((TExistingRelated)parentValue!, (IReadOnlyList)fetchedValue!)); + } + + // - no cancellation token + // - single key + // - subsequent join + public static IBatchedAsyncEnumerable JoinByDistinctKeyAwait( + this IBatchedAsyncEnumerable enumerable, + Func keySelector, + Func, IAsyncEnumerable>> joiner, + Func binder) where TKey : notnull where TAggregateRelated : notnull + => JoinByDistinctKeyAwait( + enumerable, + keySelector, + (values, _) => joiner(values), + binder); + + // - no cancellation token + // - multiple key + // - subsequent join + public static IBatchedAsyncEnumerable JoinByDistinctKeyListAwait( + this IBatchedAsyncEnumerable enumerable, + Func> keySelector, + Func, IAsyncEnumerable>> joiner, + Func, TAggregateRelated> binder) where TKey : notnull where TAggregateRelated : notnull + => JoinByDistinctKeyListAwait( + enumerable, + keySelector, + (values, _) => joiner(values), + binder); + + public static IBatchedAsyncEnumerable JoinByValueAwait( + this IBatchedAsyncEnumerable enumerable, + Func, CancellationToken, Task>> joiner) + { + return new BindingBatchedAsyncEnumerable( + enumerable, + () => new ValueBatchAsyncOperation(joiner), + (value, fetchedValue, _) => fetchedValue); + } + + public static IBatchedAsyncEnumerable JoinByValueAwait( + this IBatchedAsyncEnumerable enumerable, + Func, Task>> joiner) + { + return new BindingBatchedAsyncEnumerable( + enumerable, + () => new ValueBatchAsyncOperation((values, _) => joiner(values)), + (value, fetchedValue, _) => fetchedValue); + } + + public static IBatchedAsyncEnumerable JoinByValueAwait( + this IBatchedAsyncEnumerable enumerable, + Func, CancellationToken, Task>> joiner, + Func binder) where TAggregateRelated : notnull + { + return new BindingBatchedAsyncEnumerable( + enumerable, + () => new ValueBatchAsyncOperation(joiner), + (value, fetchedValue, parentValue) => binder((TExistingRelated)parentValue!, (TRelated?)fetchedValue)); + } + + public static IBatchedAsyncEnumerable JoinByValueAwait( + this IBatchedAsyncEnumerable enumerable, + Func, Task>> joiner, + Func binder) where TAggregateRelated : notnull + { + return new BindingBatchedAsyncEnumerable( + enumerable, + () => new ValueBatchAsyncOperation((values, _) => joiner(values)), + (value, fetchedValue, parentValue) => binder((TExistingRelated)parentValue!, (TRelated?)fetchedValue)); + } + } +} diff --git a/UET/Redpoint.CloudFramework/Collections/Batching/BindingBatchedAsyncEnumerable.cs b/UET/Redpoint.CloudFramework/Collections/Batching/BindingBatchedAsyncEnumerable.cs new file mode 100644 index 00000000..098174a1 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Collections/Batching/BindingBatchedAsyncEnumerable.cs @@ -0,0 +1,139 @@ +namespace Redpoint.CloudFramework.Collections.Batching +{ + using System; + using System.Collections.Generic; + using System.Runtime.CompilerServices; + using System.Threading; + using System.Threading.Tasks; + + internal delegate object? BindingBatchedGenericMapper(TValue value, object? fetchedRelated, object? existingRelated); + + internal class BindingBatchedAsyncEnumerable : IBatchedAsyncEnumerable + { + private readonly IBatchedAsyncEnumerableInternal _parent; + private readonly Func> _batchOperatorFactory; + private readonly BindingBatchedGenericMapper _binder; + + public BindingBatchedAsyncEnumerable( + IBatchedAsyncEnumerableInternal parent, + Func> batchOperatorFactory, + BindingBatchedGenericMapper binder) + { + _parent = parent; + _batchOperatorFactory = batchOperatorFactory; + _binder = binder; + } + + private class HierarchyLayer + { + public required int Index; + public required IBatchedAsyncEnumerableInternal Layer; + public required IBatchAsyncOperation? StatefulOperation; + public required IReadOnlyList? LastBatchResult; + } + + private async IAsyncEnumerable> EnumerateAsync([EnumeratorCancellation] CancellationToken cancellationToken = default) + { + HierarchyLayer[] hierarchy; + { + var hierarchyCollector = new List>(); + IBatchedAsyncEnumerableInternal? current = this; + do + { + hierarchyCollector.Add(current); + current = current.GetParentBatcher(); + } while (current != null); + hierarchy = new HierarchyLayer[hierarchyCollector.Count]; + for (int h = 0; h < hierarchy.Length; h++) + { + // @note: hierarchyCollector will be in reverse on what we want. + var rh = hierarchy.Length - h - 1; + hierarchy[h] = new HierarchyLayer + { + Index = h, + Layer = hierarchyCollector[rh], + StatefulOperation = hierarchyCollector[rh].CreateStatefulOperationForEnumerator(), + LastBatchResult = null, + }; + } + } + + // @note: Use hierarchy[0].Layer instead of _parent; we don't need to recurse + // to find the root layer since we just collected all of the layers. + await foreach (var batch in hierarchy[0].Layer.GetBatchingRootBatches().ConfigureAwait(false)) + { + cancellationToken.ThrowIfCancellationRequested(); + await Parallel.ForEachAsync( + hierarchy, + cancellationToken, + async (layer, cancellationToken) => + { + if (layer.StatefulOperation != null) + { + layer.LastBatchResult = await layer.StatefulOperation.ProcessBatchAsync(batch, cancellationToken).ConfigureAwait(false); + } + }).ConfigureAwait(false); + var mappedBatch = new List<(TValue value, TRelated related)>(); + for (var i = 0; i < batch.Count; i++) + { + object? value = batch[i]; + if (value != null) + { + for (var h = 0; h < hierarchy.Length; h++) + { + if (hierarchy[h].StatefulOperation != null) + { + value = hierarchy[h].Layer.MapToAggregatedResult( + batch[i], + hierarchy[h].LastBatchResult![i], + value); + } + } + if (value == null) + { + break; + } + mappedBatch.Add((batch[i], (TRelated)value!)); + } + } + yield return mappedBatch; + } + } + + public IBatchedAsyncEnumerable<(TValue value, TRelated related)> ThenStartExecutingAsync() + { + return EnumerateAsync().AsBatchedAsyncEnumerable(); + } + + async IAsyncEnumerator<(TValue value, TRelated related)> IAsyncEnumerable<(TValue value, TRelated related)>.GetAsyncEnumerator(CancellationToken cancellationToken) + { + await foreach (var batch in EnumerateAsync(cancellationToken).ConfigureAwait(false)) + { + foreach (var value in batch) + { + yield return value; + } + } + } + + IBatchedAsyncEnumerableInternal? IBatchedAsyncEnumerableInternal.GetParentBatcher() + { + return _parent; + } + + IAsyncEnumerable> IBatchedAsyncEnumerableInternal.GetBatchingRootBatches() + { + return _parent.GetBatchingRootBatches(); + } + + object? IBatchedAsyncEnumerableInternal.MapToAggregatedResult(TValue value, object? fetchedRelated, object? existingRelated) + { + return _binder(value, fetchedRelated, existingRelated); + } + + IBatchAsyncOperation IBatchedAsyncEnumerableInternal.CreateStatefulOperationForEnumerator() + { + return _batchOperatorFactory(); + } + } +} diff --git a/UET/Redpoint.CloudFramework/Collections/Batching/IBatchAsyncOperation.cs b/UET/Redpoint.CloudFramework/Collections/Batching/IBatchAsyncOperation.cs new file mode 100644 index 00000000..d440d697 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Collections/Batching/IBatchAsyncOperation.cs @@ -0,0 +1,11 @@ +namespace Redpoint.CloudFramework.Collections.Batching +{ + using System.Collections.Generic; + using System.Threading; + using System.Threading.Tasks; + + internal interface IBatchAsyncOperation + { + Task> ProcessBatchAsync(IReadOnlyList values, CancellationToken cancellationToken); + } +} diff --git a/UET/Redpoint.CloudFramework/Collections/Batching/IBatchedAsyncEnumerable.cs b/UET/Redpoint.CloudFramework/Collections/Batching/IBatchedAsyncEnumerable.cs new file mode 100644 index 00000000..84e901d3 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Collections/Batching/IBatchedAsyncEnumerable.cs @@ -0,0 +1,14 @@ +namespace Redpoint.CloudFramework.Collections.Batching +{ + using System.Collections.Generic; + + public interface IBatchedAsyncEnumerable : IAsyncEnumerable, IBatchedAsyncEnumerableInternal + { + IAsyncEnumerable> AsBatches(); + } + + public interface IBatchedAsyncEnumerable : IAsyncEnumerable<(TValue value, TRelated related)>, IBatchedAsyncEnumerableInternal + { + IBatchedAsyncEnumerable<(TValue value, TRelated related)> ThenStartExecutingAsync(); + } +} diff --git a/UET/Redpoint.CloudFramework/Collections/Batching/IBatchedAsyncEnumerableInternal.cs b/UET/Redpoint.CloudFramework/Collections/Batching/IBatchedAsyncEnumerableInternal.cs new file mode 100644 index 00000000..2c5d0d84 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Collections/Batching/IBatchedAsyncEnumerableInternal.cs @@ -0,0 +1,15 @@ +namespace Redpoint.CloudFramework.Collections.Batching +{ + using System.Collections.Generic; + + public interface IBatchedAsyncEnumerableInternal + { + internal IAsyncEnumerable> GetBatchingRootBatches(); + + internal IBatchedAsyncEnumerableInternal? GetParentBatcher(); + + internal object? MapToAggregatedResult(TValue value, object? fetchedRelated, object? existingRelated); + + internal IBatchAsyncOperation? CreateStatefulOperationForEnumerator(); + } +} diff --git a/UET/Redpoint.CloudFramework/Collections/Batching/KeyDistinctBatchAsyncOperation.cs b/UET/Redpoint.CloudFramework/Collections/Batching/KeyDistinctBatchAsyncOperation.cs new file mode 100644 index 00000000..8a23d295 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Collections/Batching/KeyDistinctBatchAsyncOperation.cs @@ -0,0 +1,50 @@ +namespace Redpoint.CloudFramework.Collections.Batching +{ + using System.Collections.Generic; + using System.Threading; + using System.Threading.Tasks; + + internal class KeyDistinctBatchAsyncOperation : IBatchAsyncOperation where TKey : notnull + { + private readonly Func _keySelector; + private readonly Func, CancellationToken, IAsyncEnumerable>> _joiner; + private readonly Dictionary _cache; + + public KeyDistinctBatchAsyncOperation( + Func keySelector, + Func, CancellationToken, IAsyncEnumerable>> joiner) + { + _keySelector = keySelector; + _joiner = joiner; + _cache = new Dictionary(); + } + + public async Task> ProcessBatchAsync( + IReadOnlyList values, + CancellationToken cancellationToken) + { + var keys = values + .Select(_keySelector) + .Distinct() + .Where(x => !_cache.ContainsKey(x)); + await foreach (var kv in _joiner(keys.ToAsyncEnumerable(), cancellationToken).ConfigureAwait(false)) + { + _cache.TryAdd(kv.Key, kv.Value); + } + var results = new List(); + foreach (var value in values) + { + var key = _keySelector(value); + if (_cache.TryGetValue(key, out var related)) + { + results.Add(related); + } + else + { + results.Add(null); + } + } + return results; + } + } +} diff --git a/UET/Redpoint.CloudFramework/Collections/Batching/KeyListDistinctBatchAsyncOperation.cs b/UET/Redpoint.CloudFramework/Collections/Batching/KeyListDistinctBatchAsyncOperation.cs new file mode 100644 index 00000000..79801c23 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Collections/Batching/KeyListDistinctBatchAsyncOperation.cs @@ -0,0 +1,51 @@ +namespace Redpoint.CloudFramework.Collections.Batching +{ + using System.Collections.Generic; + using System.Threading; + using System.Threading.Tasks; + + internal class KeyListDistinctBatchAsyncOperation : IBatchAsyncOperation where TKey : notnull + { + private readonly Func> _keySelector; + private readonly Func, CancellationToken, IAsyncEnumerable>> _joiner; + private readonly Dictionary _cache; + + public KeyListDistinctBatchAsyncOperation( + Func> keySelector, + Func, CancellationToken, IAsyncEnumerable>> joiner) + { + _keySelector = keySelector; + _joiner = joiner; + _cache = new Dictionary(); + } + + public async Task> ProcessBatchAsync( + IReadOnlyList values, + CancellationToken cancellationToken) + { + var keys = values + .SelectMany(_keySelector) + .Distinct() + .Where(x => !_cache.ContainsKey(x)); + await foreach (var kv in _joiner(keys.ToAsyncEnumerable(), cancellationToken).ConfigureAwait(false)) + { + _cache.TryAdd(kv.Key, kv.Value); + } + var results = new List(); + foreach (var value in values) + { + var keyList = _keySelector(value); + var valueList = new TRelated?[keyList.Count]; + for (int i = 0; i < keyList.Count; i++) + { + if (!_cache.TryGetValue(keyList[i], out valueList[i])) + { + valueList[i] = default; + } + } + results.Add(valueList); + } + return results; + } + } +} diff --git a/UET/Redpoint.CloudFramework/Collections/Batching/ValueBatchAsyncOperation.cs b/UET/Redpoint.CloudFramework/Collections/Batching/ValueBatchAsyncOperation.cs new file mode 100644 index 00000000..f76e5107 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Collections/Batching/ValueBatchAsyncOperation.cs @@ -0,0 +1,33 @@ +namespace Redpoint.CloudFramework.Collections.Batching +{ + using System.Collections.Generic; + using System.Threading; + using System.Threading.Tasks; + + internal class ValueBatchAsyncOperation : IBatchAsyncOperation + { + private readonly Func, CancellationToken, Task>> _joiner; + + public ValueBatchAsyncOperation( + Func, CancellationToken, Task>> joiner) + { + _joiner = joiner; + } + + public async Task> ProcessBatchAsync( + IReadOnlyList values, + CancellationToken cancellationToken) + { + var results = new List(); + foreach (var entry in await _joiner(values, cancellationToken).ConfigureAwait(false)) + { + results.Add(entry); + } + if (results.Count != values.Count) + { + throw new InvalidOperationException("JoinByValueAwait joiner must return exactly the same number of elements as the batch input."); + } + return results; + } + } +} diff --git a/UET/Redpoint.CloudFramework/Collections/Batching/WrappedBatchedAsyncEnumerable.cs b/UET/Redpoint.CloudFramework/Collections/Batching/WrappedBatchedAsyncEnumerable.cs new file mode 100644 index 00000000..d8ecd058 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Collections/Batching/WrappedBatchedAsyncEnumerable.cs @@ -0,0 +1,45 @@ +namespace Redpoint.CloudFramework.Collections.Batching +{ + using System.Collections.Generic; + using System.Threading; + + internal class WrappedBatchedAsyncEnumerable : IBatchedAsyncEnumerable + { + private readonly IAsyncEnumerable> _batches; + + public WrappedBatchedAsyncEnumerable(IAsyncEnumerable> batches) + { + _batches = batches; + } + + IAsyncEnumerator IAsyncEnumerable.GetAsyncEnumerator(CancellationToken cancellationToken) + { + return _batches.SelectMany(x => x.ToAsyncEnumerable()).GetAsyncEnumerator(cancellationToken); + } + + IAsyncEnumerable> IBatchedAsyncEnumerableInternal.GetBatchingRootBatches() + { + return _batches; + } + + IBatchedAsyncEnumerableInternal? IBatchedAsyncEnumerableInternal.GetParentBatcher() + { + return null; + } + + object? IBatchedAsyncEnumerableInternal.MapToAggregatedResult(TValue value, object? fetchedRelated, object? existingRelated) + { + return existingRelated; + } + + IBatchAsyncOperation? IBatchedAsyncEnumerableInternal.CreateStatefulOperationForEnumerator() + { + return null; + } + + public IAsyncEnumerable> AsBatches() + { + return _batches; + } + } +} diff --git a/UET/Redpoint.CloudFramework/Configuration/DefaultAutoRefreshingSecret.cs b/UET/Redpoint.CloudFramework/Configuration/DefaultAutoRefreshingSecret.cs new file mode 100644 index 00000000..f007bbdd --- /dev/null +++ b/UET/Redpoint.CloudFramework/Configuration/DefaultAutoRefreshingSecret.cs @@ -0,0 +1,87 @@ +namespace Redpoint.CloudFramework.Configuration +{ + using Google.Cloud.PubSub.V1; + using Google.Cloud.SecretManager.V1; + using Microsoft.Extensions.Logging; + + internal class DefaultAutoRefreshingSecret : IAutoRefreshingSecret + { + private readonly ILogger _logger; + private readonly ISecretManagerAccess _secretManagerAccess; + private readonly ISecretManagerNotificationManager _secretManagerNotificationManager; + private readonly Secret _secret; + private readonly Func> _notifier; + + public DefaultAutoRefreshingSecret( + ILogger logger, + ISecretManagerAccess secretManagerAccess, + ISecretManagerNotificationManager secretManagerNotificationManager, + Secret secret, + AccessSecretVersionResponse? initialAccessedSecretVersion) + { + _logger = logger; + _secretManagerAccess = secretManagerAccess; + _secretManagerNotificationManager = secretManagerNotificationManager; + _secret = secret; + _notifier = OnSecretUpdated; + + _secretManagerNotificationManager.OnSecretUpdated.Add(_notifier); + _secretManagerNotificationManager.Subscribe(secret); + + if (initialAccessedSecretVersion == null) + { + Data = new Dictionary(); + } + else + { + using (var stream = new MemoryStream(initialAccessedSecretVersion.Payload.Data.ToByteArray())) + { + Data = JsonConfigurationParser.Parse(stream); + } + } + } + + public IDictionary Data { get; private set; } + + public Action? OnRefreshed { get; set; } + + private async Task OnSecretUpdated(Secret updatedSecret, CancellationToken cancellationToken) + { + if (updatedSecret.SecretName.SecretId != _secret.SecretName.SecretId) + { + return SubscriberClient.Reply.Nack; + } + + var secretVersion = await _secretManagerAccess.TryGetLatestSecretVersionAsync(_secret).ConfigureAwait(false); + if (secretVersion == null) + { + return SubscriberClient.Reply.Nack; + } + + var accessedSecretVersion = await _secretManagerAccess.TryAccessSecretVersionAsync(secretVersion).ConfigureAwait(false); + if (accessedSecretVersion == null) + { + return SubscriberClient.Reply.Nack; + } + + using (var stream = new MemoryStream(accessedSecretVersion.Payload.Data.ToByteArray())) + { + Data = JsonConfigurationParser.Parse(stream); + } + if (OnRefreshed != null) + { + OnRefreshed(); + } + + _logger.LogInformation($"Refreshed '{_secret.SecretName.SecretId}' secret from Google Cloud Secret Manager, using version '{secretVersion.SecretVersionName.SecretVersionId}'."); + + return SubscriberClient.Reply.Ack; + } + + public async ValueTask DisposeAsync() + { + _secretManagerNotificationManager.OnSecretUpdated.Remove(_notifier); + await _secretManagerNotificationManager.UnsubscribeAsync(_secret).ConfigureAwait(false); + } + } +} diff --git a/UET/Redpoint.CloudFramework/Configuration/DefaultAutoRefreshingSecretFactory.cs b/UET/Redpoint.CloudFramework/Configuration/DefaultAutoRefreshingSecretFactory.cs new file mode 100644 index 00000000..d6ec5a67 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Configuration/DefaultAutoRefreshingSecretFactory.cs @@ -0,0 +1,92 @@ +namespace Redpoint.CloudFramework.Configuration +{ + using Google.Cloud.SecretManager.V1; + using Microsoft.Extensions.Logging; + + internal class DefaultAutoRefreshingSecretFactory : IAutoRefreshingSecretFactory + { + private readonly ILogger _logger; + private readonly ISecretManagerAccess _secretManagerAccess; + private readonly ISecretManagerNotificationManager _secretManagerNotificationManager; + + public DefaultAutoRefreshingSecretFactory( + ILogger logger, + ISecretManagerAccess secretManagerAccess, + ISecretManagerNotificationManager secretManagerNotificationManager) + { + _logger = logger; + _secretManagerAccess = secretManagerAccess; + _secretManagerNotificationManager = secretManagerNotificationManager; + } + + public IAutoRefreshingSecret Create(string secretName, bool requireSuccessfulLoad) + { + var secret = _secretManagerAccess.TryGetSecret(secretName); + if (secret == null) + { + if (requireSuccessfulLoad) + { + throw new SecretManagerSecretFailedToLoadException($"The '{secretName}' secret could be found in Google Cloud Secret Manager."); + } + else + { + _logger.LogWarning($"No '{secretName}' secret could be found in Google Cloud Secret Manager; returning an empty secret."); + return new EmptyAutoRefreshingSecret(); + } + } + else + { + _logger.LogInformation($"Successfully loaded '{secretName}' secret from Google Cloud Secret Manager."); + } + + var secretVersion = _secretManagerAccess.TryGetLatestSecretVersion(secret); + if (secretVersion == null) + { + if (requireSuccessfulLoad) + { + throw new SecretManagerSecretFailedToLoadException($"No enabled version of the '{secretName}' secret could be found in Google Cloud Secret Manager."); + } + else + { + _logger.LogWarning($"No enabled '{secretName}' secret could be found in Google Cloud Secret Manager; the secret will initially have no data but can be populated by creating a new secret version in the Google Cloud dashboard."); + } + } + else + { + _logger.LogInformation($"Successfully determined the latest version '{secretVersion.SecretVersionName.SecretVersionId}' of the '{secretName}' secret from Google Cloud Secret Manager."); + } + + AccessSecretVersionResponse? accessedSecretVersion; + if (secretVersion != null) + { + accessedSecretVersion = _secretManagerAccess.TryAccessSecretVersion(secretVersion); + if (accessedSecretVersion == null) + { + if (requireSuccessfulLoad) + { + throw new SecretManagerSecretFailedToLoadException($"Unable to access the latest enabled version of the '{secretName}' secret in Google Cloud Secret Manager."); + } + else + { + _logger.LogWarning($"Unable to access the latest enabled version of the '{secretName}' secret in Google Cloud Secret Manager; the secret will initially have no data but can be populated by creating a new accessible secret version in the Google Cloud dashboard."); + } + } + else + { + _logger.LogInformation($"Successfully loaded the latest version '{secretVersion.SecretVersionName.SecretVersionId}' of the '{secretName}' secret from Google Cloud Secret Manager."); + } + } + else + { + accessedSecretVersion = null; + } + + return new DefaultAutoRefreshingSecret( + _logger, + _secretManagerAccess, + _secretManagerNotificationManager, + secret, + accessedSecretVersion); + } + } +} diff --git a/UET/Redpoint.CloudFramework/Configuration/DefaultSecretManagerAccess.cs b/UET/Redpoint.CloudFramework/Configuration/DefaultSecretManagerAccess.cs new file mode 100644 index 00000000..d11ac987 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Configuration/DefaultSecretManagerAccess.cs @@ -0,0 +1,93 @@ +namespace Redpoint.CloudFramework.Configuration +{ + using Google.Cloud.SecretManager.V1; + using Grpc.Core; + using Redpoint.CloudFramework.GoogleInfrastructure; + using System; + using System.Linq; + + internal class DefaultSecretManagerAccess : ISecretManagerAccess + { + private readonly IGoogleServices _googleServices; + private readonly Lazy _secretClient; + + public DefaultSecretManagerAccess( + IGoogleServices googleServices) + { + _googleServices = googleServices; + _secretClient = new Lazy(() => googleServices.Build( + SecretManagerServiceClient.DefaultEndpoint, + SecretManagerServiceClient.DefaultScopes)); + } + + public SecretManagerServiceClient SecretClient => _secretClient.Value; + + public Secret? TryGetSecret(string secretName) + { + Secret? secret = null; + try + { + secret = _secretClient.Value.GetSecret(SecretName.Format(_googleServices.ProjectId, secretName)); + } + catch (RpcException ex) when (ex.StatusCode == StatusCode.NotFound) + { + } + return secret; + } + + public SecretVersion? TryGetLatestSecretVersion(Secret secret) + { + var versions = _secretClient.Value.ListSecretVersions(new ListSecretVersionsRequest + { + Filter = "state:(ENABLED)", + PageSize = 1, + ParentAsSecretName = secret.SecretName, + }); + + var availableVersion = versions.FirstOrDefault(); + + return availableVersion; + } + + public AccessSecretVersionResponse? TryAccessSecretVersion(SecretVersion secretVersion) + { + AccessSecretVersionResponse? response = null; + try + { + response = _secretClient.Value.AccessSecretVersion(secretVersion.SecretVersionName); + } + catch (RpcException ex) when (ex.StatusCode == StatusCode.NotFound) + { + } + return response; + } + + public async Task TryGetLatestSecretVersionAsync(Secret secret) + { + await foreach (var availableVersion in _secretClient.Value.ListSecretVersionsAsync(new ListSecretVersionsRequest + { + Filter = "state:(ENABLED)", + PageSize = 1, + ParentAsSecretName = secret.SecretName, + }).ConfigureAwait(false)) + { + return availableVersion; + } + + return null; + } + + public async Task TryAccessSecretVersionAsync(SecretVersion secretVersion) + { + AccessSecretVersionResponse? response = null; + try + { + response = await _secretClient.Value.AccessSecretVersionAsync(secretVersion.SecretVersionName).ConfigureAwait(false); + } + catch (RpcException ex) when (ex.StatusCode == StatusCode.NotFound) + { + } + return response; + } + } +} diff --git a/UET/Redpoint.CloudFramework/Configuration/DefaultSecretManagerConfigurationSourceBehaviour.cs b/UET/Redpoint.CloudFramework/Configuration/DefaultSecretManagerConfigurationSourceBehaviour.cs new file mode 100644 index 00000000..d9991fed --- /dev/null +++ b/UET/Redpoint.CloudFramework/Configuration/DefaultSecretManagerConfigurationSourceBehaviour.cs @@ -0,0 +1,18 @@ +namespace Redpoint.CloudFramework.Configuration +{ + internal class DefaultSecretManagerConfigurationSourceBehaviour : ISecretManagerConfigurationSourceBehaviour + { + private readonly string _secretName; + private readonly bool _requireSuccessfulLoad; + + public DefaultSecretManagerConfigurationSourceBehaviour(string secretName, bool requireSuccessfulLoad) + { + _secretName = secretName; + _requireSuccessfulLoad = requireSuccessfulLoad; + } + + public string SecretName => _secretName; + + public bool RequireSuccessfulLoad => _requireSuccessfulLoad; + } +} diff --git a/UET/Redpoint.CloudFramework/Configuration/DefaultSecretManagerNotificationManager.cs b/UET/Redpoint.CloudFramework/Configuration/DefaultSecretManagerNotificationManager.cs new file mode 100644 index 00000000..3681e3a3 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Configuration/DefaultSecretManagerNotificationManager.cs @@ -0,0 +1,343 @@ +namespace Redpoint.CloudFramework.Configuration +{ + using Google.Api.Gax; + using Google.Cloud.PubSub.V1; + using Google.Cloud.SecretManager.V1; + using Google.Protobuf.WellKnownTypes; + using Grpc.Core; + using Microsoft.Extensions.DependencyInjection; + using Microsoft.Extensions.Logging; + using Redpoint.CloudFramework.GoogleInfrastructure; + using System; + using Topic = Google.Cloud.SecretManager.V1.Topic; + + internal class DefaultSecretManagerNotificationManager : ISecretManagerNotificationManager + { + private class SubscriptionState : IDisposable + { + public SubscriptionState(Secret secret, string subscriptionName) + { + Secret = secret; + SubscriptionName = subscriptionName; + SubscriptionCount = 0; + Subscriber = null; + CancellationTokenSource = new CancellationTokenSource(); + SubscriberInitTask = null; + SubscriberRunTask = null; + } + + public Secret Secret; + public string SubscriptionName; + public int SubscriptionCount; + public SubscriberClient? Subscriber; + public CancellationTokenSource CancellationTokenSource; + public Task? SubscriberInitTask; + public Task? SubscriberRunTask; + + public void Dispose() + { + CancellationTokenSource.Dispose(); + } + } + + private readonly ILogger _logger; + private readonly IGoogleServices _googleServices; + private readonly Lazy _subscriberClient; + private readonly Lazy _subscriberCredential; + private readonly Lazy _subscriberServiceEndpoint; + private readonly Dictionary _subscriptions; + private readonly string? _subscriptionSuffix; + + public DefaultSecretManagerNotificationManager( + ILogger logger, + IGoogleServices googleServices, + IServiceProvider serviceProvider) + { + _logger = logger; + _googleServices = googleServices; + + _subscriberClient = new Lazy(() => googleServices.Build( + SubscriberServiceApiClient.DefaultEndpoint, + SubscriberServiceApiClient.DefaultScopes)); + _subscriberCredential = new Lazy(() => googleServices.GetChannelCredentials( + SubscriberServiceApiClient.DefaultEndpoint, + SubscriberServiceApiClient.DefaultScopes)); + _subscriberServiceEndpoint = new Lazy(() => googleServices.GetServiceEndpoint( + SubscriberServiceApiClient.DefaultEndpoint, + SubscriberServiceApiClient.DefaultScopes)); + + _subscriptions = new Dictionary(); + + OnSecretUpdated = new List>>(); + + // Figure out the subscription suffix, which is used to allow multiple pods in Kubernetes to be subscribed at the same time. + var suffixProvider = serviceProvider.GetService(); + if (suffixProvider != null) + { + _subscriptionSuffix = "-" + suffixProvider.Suffix; + } + else + { + var subscriptionSuffix = Environment.GetEnvironmentVariable("SECRET_MANAGER_SUBSCRIPTION_SUFFIX"); + if (!string.IsNullOrWhiteSpace(subscriptionSuffix)) + { + _subscriptionSuffix = "-" + subscriptionSuffix; + } + else + { + _subscriptionSuffix = null; + } + } + } + + public List>> OnSecretUpdated { get; private init; } + + private SubscriptionState? SubscribeInternal(Secret secret) + { + if (_subscriptionSuffix == null) + { + _logger.LogError("Expected 'SECRET_MANAGER_SUBSCRIPTION_SUFFIX' environment variable to be set so we can create a unique subscription per process. This should usually be set to the value of `metadata.podName` if you're running the application in Kubernetes. Refer to https://kubernetes.io/docs/tasks/inject-data-application/environment-variable-expose-pod-information/#use-pod-fields-as-values-for-environment-variables for more information."); + return null; + } + + var secretName = secret.SecretName.SecretId; + + if (!_subscriptions.TryGetValue(secretName, out SubscriptionState? value)) + { + value = new SubscriptionState( + secret, + SubscriptionName.Format(_googleServices.ProjectId, $"{secretName}-notifications-update{_subscriptionSuffix}")); + _subscriptions.Add(secretName, value); + } + if (value.SubscriberInitTask != null) + { + // We've already been subscribed. + return null; + } + + var notificationTopic = secret.Topics.FirstOrDefault(x => x.TopicName.TopicId == $"{secretName}-notifications"); + if (notificationTopic == null) + { + _logger.LogError($"Expected '{secretName}' secret to have an '{secretName}-notifications' topic that we can subscribe to for update notifications. Since one doesn't exist, the application will not refresh configuration when the secrets are updated inside Google Cloud Secret Manager."); + return null; + } + + value.SubscriberInitTask = Task.Run(async () => await StartSubscriberAsync(secretName, value, notificationTopic).ConfigureAwait(false)); + return value; + } + + public void Subscribe(Secret secret) + { + SubscribeInternal(secret); + } + + public async Task SubscribeAsync(Secret secret) + { + var value = SubscribeInternal(secret); + if (value?.SubscriberInitTask == null) + { + return; + } + await value.SubscriberInitTask.ConfigureAwait(false); + } + + public async Task UnsubscribeAsync(Secret secret) + { + var secretName = secret.SecretName.SecretId; + + if (!_subscriptions.TryGetValue(secretName, out SubscriptionState? value)) + { + // Not subscribed. + return; + } + + if (value.SubscriptionCount > 0) + { + value.SubscriptionCount -= 1; + } + + if (value.SubscriptionCount == 0) + { + // Cancel and shutdown background task. + value.CancellationTokenSource.Cancel(); + try + { + if (value.SubscriberInitTask != null) + { + await value.SubscriberInitTask.ConfigureAwait(false); + } + } + catch (OperationCanceledException) + { + } + if (value.Subscriber != null) + { + try + { + await value.Subscriber.StopAsync(value.CancellationTokenSource.Token).ConfigureAwait(false); + } + catch (OperationCanceledException) + { + } + } + try + { + if (value.SubscriberRunTask != null) + { + await value.SubscriberRunTask.ConfigureAwait(false); + } + } + catch (OperationCanceledException) + { + } + value.CancellationTokenSource.Dispose(); + value.CancellationTokenSource = new CancellationTokenSource(); + value.Subscriber = null; + value.SubscriberInitTask = null; + value.SubscriberRunTask = null; + } + } + + private async Task StartSubscriberAsync(string secretName, SubscriptionState value, Topic notificationTopic) + { + for (int i = 0; i < 10; i++) + { + try + { + Subscription? subscription = null; + try + { + subscription = _subscriberClient.Value.GetSubscription(value.SubscriptionName); + } + catch (RpcException ex) when (ex.StatusCode == StatusCode.NotFound) + { + } + if (subscription == null) + { + subscription = _subscriberClient.Value.CreateSubscription(new Subscription + { + Name = value.SubscriptionName, + AckDeadlineSeconds = 60, + DeadLetterPolicy = null, + Detached = false, + MessageRetentionDuration = Duration.FromTimeSpan(TimeSpan.FromMinutes(20)), + Filter = "hasPrefix(attributes.eventType, \"SECRET_VERSION_\")", + EnableMessageOrdering = false, + ExpirationPolicy = new ExpirationPolicy + { + Ttl = Duration.FromTimeSpan(TimeSpan.FromDays(1)), + }, + Labels = + { + { "auto-managed-by", "redpoint-cloudframework" } + }, + PushConfig = null, + RetainAckedMessages = false, + Topic = notificationTopic.Name, + }); + } + + var builder = new SubscriberClientBuilder + { + SubscriptionName = subscription.SubscriptionName, + ClientCount = 1, + ApiSettings = null, + ChannelCredentials = _subscriberCredential.Value, + Endpoint = _subscriberServiceEndpoint.Value, + Settings = new SubscriberClient.Settings + { + FlowControlSettings = new FlowControlSettings(1, null) + } + }; + value.Subscriber = builder.Build(); + value.SubscriberRunTask = value.Subscriber.StartAsync(async (message, cancellationToken) => + { + var handled = SubscriberClient.Reply.Nack; + foreach (var handler in OnSecretUpdated) + { + var handlerHandled = await handler(value.Secret, cancellationToken).ConfigureAwait(false); + if (handlerHandled == SubscriberClient.Reply.Ack) + { + handled = SubscriberClient.Reply.Ack; + } + } + return handled; + }); + _logger.LogInformation($"Subscribed to '{secretName}' update notifications via Pub/Sub from Google Cloud Secret Manager."); + return; + } + catch (RpcException ex) when (ex.StatusCode == StatusCode.AlreadyExists) + { + if (i == 9) + { + _logger.LogWarning($"Got 'already exists' while trying to subscribe to the '{value.SubscriptionName}' subscription. This means we couldn't get it through GetSubscription, but also can't create it with CreateSubscription. Something is very weird."); + } + else + { + await Task.Delay(i * 1000).ConfigureAwait(false); + continue; + } + } + catch (RpcException ex) when (ex.StatusCode == StatusCode.NotFound) + { + if (i == 9) + { + _logger.LogWarning($"Got 'not found' while trying to subscribe to the '{value.SubscriptionName}' subscription, even though we should have created it during startup."); + } + else + { + await Task.Delay(i * 1000).ConfigureAwait(false); + continue; + } + } + } + } + + public async ValueTask UnsubscribeAllAsync() + { + if (_subscriptions.Count == 0) + { + return; + } + + // Force each subscription to unsubscribe. + foreach (var subscription in _subscriptions) + { + subscription.Value.SubscriptionCount = 1; + await UnsubscribeAsync(subscription.Value.Secret).ConfigureAwait(false); + } + + // Go through each subscription entry and delete the underlying Pub/Sub subscription if it exists. + foreach (var subscription in _subscriptions) + { + try + { + var pubsubSubscription = await _subscriberClient.Value.GetSubscriptionAsync( + subscription.Value.SubscriptionName).ConfigureAwait(false); + if (pubsubSubscription != null) + { + _logger.LogInformation($"Cleaning up '{subscription.Key}' notification subscription from Google Pub/Sub."); + await _subscriberClient.Value.DeleteSubscriptionAsync(subscription.Value.SubscriptionName).ConfigureAwait(false); + } + } + catch (RpcException ex) when (ex.StatusCode == StatusCode.NotFound) + { + _logger.LogInformation($"No '{subscription.Key}' notification subscription was found Google Pub/Sub, so it was not cleaned up."); + } + } + + // Dispose cancellation token sources. + var keys = _subscriptions.Keys.ToList(); + foreach (var key in keys) + { + _subscriptions[key].Dispose(); + _subscriptions.Remove(key); + } + } + + public ValueTask DisposeAsync() + { + return UnsubscribeAllAsync(); + } + } +} diff --git a/UET/Redpoint.CloudFramework/Configuration/EmptyAutoRefreshingSecret.cs b/UET/Redpoint.CloudFramework/Configuration/EmptyAutoRefreshingSecret.cs new file mode 100644 index 00000000..f26c0bb0 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Configuration/EmptyAutoRefreshingSecret.cs @@ -0,0 +1,22 @@ +namespace Redpoint.CloudFramework.Configuration +{ + using System; + using System.Collections.Generic; + + internal class EmptyAutoRefreshingSecret : IAutoRefreshingSecret + { + public EmptyAutoRefreshingSecret() + { + Data = new Dictionary(); + } + + public IDictionary Data { get; private init; } + + public Action? OnRefreshed { get; set; } + + public ValueTask DisposeAsync() + { + return ValueTask.CompletedTask; + } + } +} diff --git a/UET/Redpoint.CloudFramework/Configuration/IAutoRefreshingSecret.cs b/UET/Redpoint.CloudFramework/Configuration/IAutoRefreshingSecret.cs new file mode 100644 index 00000000..5062964c --- /dev/null +++ b/UET/Redpoint.CloudFramework/Configuration/IAutoRefreshingSecret.cs @@ -0,0 +1,27 @@ +namespace Redpoint.CloudFramework.Configuration +{ + using System; + using System.Collections.Generic; + + /// + /// Represents the current value of a Google Cloud Secret Manager secret. + /// The property is automatically updated and the + /// callback fired whenever the secret versions + /// are modified. + /// + public interface IAutoRefreshingSecret : IAsyncDisposable + { + /// + /// The value of the secret JSON keyed for the configuration system. That + /// is, a JSON value of {"Test":{"A":"World"}} in the secret would be + /// represented with the key "Test:A" equalling "World" in this dictionary. + /// + IDictionary Data { get; } + + /// + /// An optional callback that is fired whenever the + /// property is updated with new data. + /// + Action? OnRefreshed { get; set; } + } +} diff --git a/UET/Redpoint.CloudFramework/Configuration/IAutoRefreshingSecretFactory.cs b/UET/Redpoint.CloudFramework/Configuration/IAutoRefreshingSecretFactory.cs new file mode 100644 index 00000000..ffcf9322 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Configuration/IAutoRefreshingSecretFactory.cs @@ -0,0 +1,22 @@ +namespace Redpoint.CloudFramework.Configuration +{ + /// + /// A factory which can construct instances + /// over Google Cloud Secret Manager secrets. + /// + public interface IAutoRefreshingSecretFactory + { + /// + /// Create a new instance of for the specified + /// Google Cloud Secret Manager secret, if it exists. If + /// is true, this method will throw an exception if the secret data can't be read for + /// the latest version, otherwise it will return an implementation of + /// that contains no initial data. + /// + /// The name of the secret to load. + /// If true, this method will throw an exception. + /// The loaded secret. + /// was set to true and the secret could not be loaded. + IAutoRefreshingSecret Create(string secretName, bool requireSuccessfulLoad); + } +} diff --git a/UET/Redpoint.CloudFramework/Configuration/ISecretManagerAccess.cs b/UET/Redpoint.CloudFramework/Configuration/ISecretManagerAccess.cs new file mode 100644 index 00000000..0960fe4d --- /dev/null +++ b/UET/Redpoint.CloudFramework/Configuration/ISecretManagerAccess.cs @@ -0,0 +1,16 @@ +namespace Redpoint.CloudFramework.Configuration +{ + using Google.Cloud.SecretManager.V1; + + internal interface ISecretManagerAccess + { + SecretManagerServiceClient SecretClient { get; } + + Secret? TryGetSecret(string secretName); + SecretVersion? TryGetLatestSecretVersion(Secret secret); + AccessSecretVersionResponse? TryAccessSecretVersion(SecretVersion secretVersion); + + Task TryGetLatestSecretVersionAsync(Secret secret); + Task TryAccessSecretVersionAsync(SecretVersion secretVersion); + } +} diff --git a/UET/Redpoint.CloudFramework/Configuration/ISecretManagerConfigurationSourceBehaviour.cs b/UET/Redpoint.CloudFramework/Configuration/ISecretManagerConfigurationSourceBehaviour.cs new file mode 100644 index 00000000..9f89e786 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Configuration/ISecretManagerConfigurationSourceBehaviour.cs @@ -0,0 +1,9 @@ +namespace Redpoint.CloudFramework.Configuration +{ + internal interface ISecretManagerConfigurationSourceBehaviour + { + string SecretName { get; } + + bool RequireSuccessfulLoad { get; } + } +} diff --git a/UET/Redpoint.CloudFramework/Configuration/ISecretManagerNotificationManager.cs b/UET/Redpoint.CloudFramework/Configuration/ISecretManagerNotificationManager.cs new file mode 100644 index 00000000..9fd8ac95 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Configuration/ISecretManagerNotificationManager.cs @@ -0,0 +1,19 @@ +namespace Redpoint.CloudFramework.Configuration +{ + using Google.Cloud.PubSub.V1; + using Google.Cloud.SecretManager.V1; + using System; + + internal interface ISecretManagerNotificationManager : IAsyncDisposable + { + void Subscribe(Secret secret); + + Task SubscribeAsync(Secret secret); + + Task UnsubscribeAsync(Secret secret); + + List>> OnSecretUpdated { get; } + + ValueTask UnsubscribeAllAsync(); + } +} diff --git a/UET/Redpoint.CloudFramework/Configuration/ISecretManagerNotificationSuffixProvider.cs b/UET/Redpoint.CloudFramework/Configuration/ISecretManagerNotificationSuffixProvider.cs new file mode 100644 index 00000000..f2bf871a --- /dev/null +++ b/UET/Redpoint.CloudFramework/Configuration/ISecretManagerNotificationSuffixProvider.cs @@ -0,0 +1,7 @@ +namespace Redpoint.CloudFramework.Configuration +{ + internal interface ISecretManagerNotificationSuffixProvider + { + string Suffix { get; } + } +} diff --git a/UET/Redpoint.CloudFramework/Configuration/JsonConfigurationParser.cs b/UET/Redpoint.CloudFramework/Configuration/JsonConfigurationParser.cs new file mode 100644 index 00000000..4e6c2a14 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Configuration/JsonConfigurationParser.cs @@ -0,0 +1,107 @@ +namespace Redpoint.CloudFramework.Configuration +{ + using Microsoft.Extensions.Configuration; + using System; + using System.Collections.Generic; + using System.Globalization; + using System.IO; + using System.Text.Json; + + /// + /// Copy of https://github.com/dotnet/runtime/blob/main/src/libraries/Microsoft.Extensions.Configuration.Json/src/JsonConfigurationFileParser.cs because + /// it's not public. + /// + internal sealed class JsonConfigurationParser + { + private JsonConfigurationParser() { } + + private readonly Dictionary _data = new Dictionary(StringComparer.OrdinalIgnoreCase); + private readonly Stack _paths = new Stack(); + + public static IDictionary Parse(Stream input) + => new JsonConfigurationParser().ParseStream(input); + + private Dictionary ParseStream(Stream input) + { + var jsonDocumentOptions = new JsonDocumentOptions + { + CommentHandling = JsonCommentHandling.Skip, + AllowTrailingCommas = true, + }; + + using (var reader = new StreamReader(input)) + using (JsonDocument doc = JsonDocument.Parse(reader.ReadToEnd(), jsonDocumentOptions)) + { + if (doc.RootElement.ValueKind != JsonValueKind.Object) + { + return new Dictionary(); + } + VisitElement(doc.RootElement); + } + + return _data; + } + + private void VisitElement(JsonElement element) + { + var isEmpty = true; + + foreach (JsonProperty property in element.EnumerateObject()) + { + isEmpty = false; + EnterContext(property.Name); + VisitValue(property.Value); + ExitContext(); + } + + if (isEmpty && _paths.Count > 0) + { + _data[_paths.Peek()] = null; + } + } + + private void VisitValue(JsonElement value) + { + switch (value.ValueKind) + { + case JsonValueKind.Object: + VisitElement(value); + break; + + case JsonValueKind.Array: + int index = 0; + foreach (JsonElement arrayElement in value.EnumerateArray()) + { + EnterContext(index.ToString(CultureInfo.InvariantCulture)); + VisitValue(arrayElement); + ExitContext(); + index++; + } + break; + + case JsonValueKind.Number: + case JsonValueKind.String: + case JsonValueKind.True: + case JsonValueKind.False: + case JsonValueKind.Null: + string key = _paths.Peek(); + if (_data.ContainsKey(key)) + { + throw new FormatException($"Key {key} is duplicated."); + } + _data[key] = value.ToString(); + break; + + default: + throw new FormatException($"Unsupported JSON token {value.ValueKind}."); + } + } + + private void EnterContext(string context) => + _paths.Push(_paths.Count > 0 ? + _paths.Peek() + ConfigurationPath.KeyDelimiter + context : + context); + + private void ExitContext() => _paths.Pop(); + } +} diff --git a/UET/Redpoint.CloudFramework/Configuration/SecretManagerConfigurationProvider.cs b/UET/Redpoint.CloudFramework/Configuration/SecretManagerConfigurationProvider.cs new file mode 100644 index 00000000..6aea98b2 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Configuration/SecretManagerConfigurationProvider.cs @@ -0,0 +1,33 @@ +namespace Redpoint.CloudFramework.Configuration +{ + using Microsoft.Extensions.Configuration; + + internal class SecretManagerConfigurationProvider : ConfigurationProvider + { + private readonly IAutoRefreshingSecretFactory _autoRefreshingSecretFactory; + private readonly ISecretManagerConfigurationSourceBehaviour _secretManagerConfigurationSourceBehaviour; + internal IAutoRefreshingSecret? _autoRefreshingSecret; + + public SecretManagerConfigurationProvider( + IAutoRefreshingSecretFactory autoRefreshingSecretFactory, + ISecretManagerConfigurationSourceBehaviour secretManagerConfigurationSourceBehaviour) + { + _autoRefreshingSecretFactory = autoRefreshingSecretFactory; + _secretManagerConfigurationSourceBehaviour = secretManagerConfigurationSourceBehaviour; + _autoRefreshingSecret = null; + } + + public override void Load() + { + _autoRefreshingSecret = _autoRefreshingSecretFactory.Create( + _secretManagerConfigurationSourceBehaviour.SecretName, + _secretManagerConfigurationSourceBehaviour.RequireSuccessfulLoad); + _autoRefreshingSecret.OnRefreshed = () => + { + Data = _autoRefreshingSecret.Data; + OnReload(); + }; + Data = _autoRefreshingSecret.Data; + } + } +} diff --git a/UET/Redpoint.CloudFramework/Configuration/SecretManagerConfigurationSource.cs b/UET/Redpoint.CloudFramework/Configuration/SecretManagerConfigurationSource.cs new file mode 100644 index 00000000..e0c9d3ed --- /dev/null +++ b/UET/Redpoint.CloudFramework/Configuration/SecretManagerConfigurationSource.cs @@ -0,0 +1,20 @@ +namespace Redpoint.CloudFramework.Configuration +{ + using Microsoft.Extensions.Configuration; + using Microsoft.Extensions.DependencyInjection; + + internal class SecretManagerConfigurationSource : IConfigurationSource + { + private readonly IServiceProvider _serviceProvider; + + public SecretManagerConfigurationSource(IServiceProvider serviceProvider) + { + _serviceProvider = serviceProvider; + } + + public IConfigurationProvider Build(IConfigurationBuilder builder) + { + return _serviceProvider.GetRequiredService(); + } + } +} diff --git a/UET/Redpoint.CloudFramework/Configuration/SecretManagerNotificationEventArgs.cs b/UET/Redpoint.CloudFramework/Configuration/SecretManagerNotificationEventArgs.cs new file mode 100644 index 00000000..ac37379a --- /dev/null +++ b/UET/Redpoint.CloudFramework/Configuration/SecretManagerNotificationEventArgs.cs @@ -0,0 +1,10 @@ +namespace Redpoint.CloudFramework.Configuration +{ + using Google.Cloud.SecretManager.V1; + using System; + + internal class SecretManagerNotificationEventArgs : EventArgs + { + public required Secret Secret { get; init; } + } +} diff --git a/UET/Redpoint.CloudFramework/Configuration/SecretManagerSecretFailedToLoadException.cs b/UET/Redpoint.CloudFramework/Configuration/SecretManagerSecretFailedToLoadException.cs new file mode 100644 index 00000000..5f2c8147 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Configuration/SecretManagerSecretFailedToLoadException.cs @@ -0,0 +1,18 @@ +namespace Redpoint.CloudFramework.Configuration +{ + using System; + + /// + /// Thrown at startup when the application is unable to load required configuration from Google Cloud Secret Manager. + /// + public class SecretManagerSecretFailedToLoadException : Exception + { + /// + /// Constructs a . + /// + /// The exception message. + public SecretManagerSecretFailedToLoadException(string message) : base(message) + { + } + } +} diff --git a/UET/Redpoint.CloudFramework/Configuration/SecretManagerServiceCollectionExtensions.cs b/UET/Redpoint.CloudFramework/Configuration/SecretManagerServiceCollectionExtensions.cs new file mode 100644 index 00000000..564c04c5 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Configuration/SecretManagerServiceCollectionExtensions.cs @@ -0,0 +1,74 @@ +namespace Redpoint.CloudFramework.Configuration +{ + using Google.Cloud.SecretManager.V1; + using Microsoft.Extensions.Configuration; + using Microsoft.Extensions.DependencyInjection; + + /// + /// Method which can be used to register the service + /// with a service collection. + /// + public static class SecretManagerServiceCollectionExtensions + { + private static ISecretManagerNotificationManager? _superSingletonNotificationManager = null; + + private static void AddSecretManagerBase(this IServiceCollection services, bool isolatedNotificationManager) + { + services.AddSingleton(); + if (isolatedNotificationManager) + { + services.AddSingleton(); + } + else + { + services.AddSingleton(); + services.AddSingleton(sp => + { + // @note: The notification manager is registered as a "super singleton", which is always + // the same instance in the application, even across different service providers and ensures + // that the hosted service that runs at runtime in the ASP.NET uses the same instance that + // the configuration system uses. + if (_superSingletonNotificationManager == null) + { + _superSingletonNotificationManager = sp.GetRequiredService(); + } + return _superSingletonNotificationManager; + }); + } + services.AddSingleton(); + } + + /// + /// Registers a hosted service which will clean up Google Cloud Secret Manager notification + /// subscriptions when the application exits. + /// + /// The service collection to register services into. + public static void AddSecretManagerRuntime(this IServiceCollection services) + { + services.AddSecretManagerBase(false); + + services.AddHostedService(); + } + + /// + /// Registers and + /// services into the service collection. + /// + /// The service collection to register services into. + /// If true, the will throw if the secret fails to load. + /// The secret that the should use as a backing store. Defaults to "appsettings". + /// For automation testing only. + public static void AddSecretManagerConfiguration( + this IServiceCollection services, + bool requireSuccessfulLoad, + string secretName = "appsettings", + bool isolatedNotificationManager = false) + { + services.AddSecretManagerBase(isolatedNotificationManager); + + services.AddSingleton(new DefaultSecretManagerConfigurationSourceBehaviour(secretName, requireSuccessfulLoad)); + services.AddSingleton(); + services.AddTransient(); + } + } +} diff --git a/UET/Redpoint.CloudFramework/Configuration/SecretManagerSubscriptionCleanupHostedService.cs b/UET/Redpoint.CloudFramework/Configuration/SecretManagerSubscriptionCleanupHostedService.cs new file mode 100644 index 00000000..49523757 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Configuration/SecretManagerSubscriptionCleanupHostedService.cs @@ -0,0 +1,27 @@ +namespace Redpoint.CloudFramework.Configuration +{ + using Microsoft.Extensions.Hosting; + using System.Threading; + using System.Threading.Tasks; + + internal class SecretManagerSubscriptionCleanupHostedService : IHostedService + { + private readonly ISecretManagerNotificationManager _secretManagerNotificationManager; + + public SecretManagerSubscriptionCleanupHostedService( + ISecretManagerNotificationManager secretManagerNotificationManager) + { + _secretManagerNotificationManager = secretManagerNotificationManager; + } + + public Task StartAsync(CancellationToken cancellationToken) + { + return Task.CompletedTask; + } + + public async Task StopAsync(CancellationToken cancellationToken) + { + await _secretManagerNotificationManager.UnsubscribeAllAsync().ConfigureAwait(false); + } + } +} diff --git a/UET/Redpoint.CloudFramework/Counter/DefaultGlobalShardedCounter.cs b/UET/Redpoint.CloudFramework/Counter/DefaultGlobalShardedCounter.cs new file mode 100644 index 00000000..4fa37d00 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Counter/DefaultGlobalShardedCounter.cs @@ -0,0 +1,140 @@ +namespace Redpoint.CloudFramework.Counter +{ + using Google.Cloud.Datastore.V1; + using Redpoint.CloudFramework.Repository; + using Redpoint.CloudFramework.Repository.Transaction; + using StackExchange.Redis; + using System; + using System.Collections.Generic; + using System.Linq; + using System.Security.Cryptography; + using System.Threading.Tasks; + + internal class DefaultGlobalShardedCounter : IGlobalShardedCounter + { + private readonly IGlobalRepository _globalRepository; + private readonly IDatabase _redisDatabase; + + // This can only ever be increased; not decreased. + private const int _numShards = 10; + + private const bool _enableRedis = true; + + public DefaultGlobalShardedCounter( + IGlobalRepository globalRepository, + IConnectionMultiplexer connectionMultiplexer) + { + ArgumentNullException.ThrowIfNull(connectionMultiplexer); + + _globalRepository = globalRepository; + _redisDatabase = connectionMultiplexer.GetDatabase(); + } + + private static string GetShardKeyName(string name, long index) + { + return $"{name}:{index}"; + } + + private static string GetShardRedisName(string @namespace, string name) + { + return $"shard:{@namespace}:{name}"; + } + + private async IAsyncEnumerable GetAllKeys(string @namespace, string name) + { + var keyFactory = await _globalRepository.GetKeyFactoryAsync(@namespace).ConfigureAwait(false); + for (var i = 0; i < _numShards; i++) + { + yield return keyFactory.CreateKey(GetShardKeyName(name, i)); + } + } + + public async Task GetAsync(string @namespace, string name) + { + long total; + if (_enableRedis) + { + var shardCache = await _redisDatabase.StringGetAsync(GetShardRedisName(@namespace, name)).ConfigureAwait(false); + if (!(!shardCache.HasValue || !shardCache.IsInteger || !shardCache.TryParse(out total))) + { + return total; + } + } + + total = + await _globalRepository.LoadAsync( + @namespace, + GetAllKeys(@namespace, name)) + .Where(x => x.Value != null) + .Select(x => x.Value!.value) + .SumAsync().ConfigureAwait(false); + if (_enableRedis) + { + await _redisDatabase.StringSetAsync( + GetShardRedisName(@namespace, name), + total, + TimeSpan.FromSeconds(60), + When.NotExists).ConfigureAwait(false); + } + return total; + } + + public async Task AdjustAsync(string @namespace, string name, long modifier) + { + var transaction = await _globalRepository.BeginTransactionAsync(@namespace).ConfigureAwait(false); + try + { + var afterCommit = await AdjustAsync(@namespace, name, modifier, transaction).ConfigureAwait(false); + await _globalRepository.CommitAsync(@namespace, transaction).ConfigureAwait(false); + await afterCommit().ConfigureAwait(false); + } + finally + { + if (!transaction.HasCommitted) + { + await _globalRepository.RollbackAsync(@namespace, transaction).ConfigureAwait(false); + } + } + } + + public async Task AdjustAsync(string @namespace, string name, long modifier, IModelTransaction transaction) + { + var index = RandomNumberGenerator.GetInt32(_numShards); + var keyFactory = await _globalRepository.GetKeyFactoryAsync(@namespace).ConfigureAwait(false); + var key = keyFactory.CreateKey(GetShardKeyName(name, index)); + + var create = false; + var counter = await _globalRepository.LoadAsync(@namespace, key, transaction).ConfigureAwait(false); + if (counter == null) + { + counter = new DefaultShardedCounterModel + { + Key = key, + name = name, + index = index, + value = modifier, + }; + create = true; + } + else + { + counter.value += modifier; + } + if (create) + { + await _globalRepository.CreateAsync(@namespace, counter, transaction).ConfigureAwait(false); + } + else + { + await _globalRepository.UpdateAsync(@namespace, counter, transaction).ConfigureAwait(false); + } + return async () => + { + if (_enableRedis) + { + await _redisDatabase.StringIncrementAsync(GetShardRedisName(@namespace, name), modifier, CommandFlags.FireAndForget).ConfigureAwait(false); + } + }; + } + } +} diff --git a/UET/Redpoint.CloudFramework/Counter/DefaultShardedCounter.cs b/UET/Redpoint.CloudFramework/Counter/DefaultShardedCounter.cs new file mode 100644 index 00000000..46d5d6e1 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Counter/DefaultShardedCounter.cs @@ -0,0 +1,54 @@ +namespace Redpoint.CloudFramework.Counter +{ + using Redpoint.CloudFramework.Repository.Transaction; + using System; + using System.Threading.Tasks; + + internal class DefaultShardedCounter : IShardedCounter + { + private readonly IGlobalShardedCounter _globalShardedCounter; + private readonly ICurrentTenantService _currentTenant; + + public DefaultShardedCounter( + IGlobalShardedCounter globalShardedCounter, + ICurrentTenantService currentTenant) + { + _globalShardedCounter = globalShardedCounter; + _currentTenant = currentTenant; + } + + private async Task GetDatastoreNamespace() + { + var currentTenant = await _currentTenant.GetTenant().ConfigureAwait(false); + if (currentTenant == null) + { + throw new InvalidOperationException("IShardedCounter can not be used without a tenant."); + } + return currentTenant.DatastoreNamespace; + } + + public async Task GetAsync(string name) + { + return await _globalShardedCounter.GetAsync( + await GetDatastoreNamespace().ConfigureAwait(false), + name).ConfigureAwait(false); + } + + public async Task AdjustAsync(string name, long modifier) + { + await _globalShardedCounter.AdjustAsync( + await GetDatastoreNamespace().ConfigureAwait(false), + name, + modifier).ConfigureAwait(false); + } + + public async Task AdjustAsync(string name, long modifier, IModelTransaction existingTransaction) + { + return await _globalShardedCounter.AdjustAsync( + await GetDatastoreNamespace().ConfigureAwait(false), + name, + modifier, + existingTransaction).ConfigureAwait(false); + } + } +} diff --git a/UET/Redpoint.CloudFramework/Counter/DefaultShardedCounterModel.cs b/UET/Redpoint.CloudFramework/Counter/DefaultShardedCounterModel.cs new file mode 100644 index 00000000..66dc7456 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Counter/DefaultShardedCounterModel.cs @@ -0,0 +1,27 @@ +namespace Redpoint.CloudFramework.Counter +{ + using Redpoint.CloudFramework.Models; + using System; + + [Kind("_rcfShardedCounter")] + internal class DefaultShardedCounterModel : AttributedModel + { + /// + /// The counter name shared amongst all shards of this counter. + /// + [Type(FieldType.String), Indexed, Default("")] + public string name { get; set; } = string.Empty; + + /// + /// The shard index. + /// + [Type(FieldType.Integer), Indexed, Default(0)] + public long index { get; set; } = 0; + + /// + /// The shard value. + /// + [Type(FieldType.Integer), Indexed, Default(0)] + public long value { get; set; } = 0; + } +} diff --git a/UET/Redpoint.CloudFramework/Counter/IGlobalShardedCounter.cs b/UET/Redpoint.CloudFramework/Counter/IGlobalShardedCounter.cs new file mode 100644 index 00000000..2edd8b77 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Counter/IGlobalShardedCounter.cs @@ -0,0 +1,36 @@ +namespace Redpoint.CloudFramework.Counter +{ + using Redpoint.CloudFramework.Repository.Transaction; + using System.Threading.Tasks; + + public interface IGlobalShardedCounter + { + /// + /// Returns the value of a sharded counter. + /// + /// The Datastore namespace to store the counter in. + /// The name of the sharded counter. + /// The value of the sharded counter. + Task GetAsync(string @namespace, string name); + + /// + /// Adjust the value of a sharded counter. + /// + /// The Datastore namespace to store the counter in. + /// The name of the sharded counter. + /// The amount to modify the sharded counter by. + /// The task to await on. + Task AdjustAsync(string @namespace, string name, long modifier); + + /// + /// Adjust the value of a sharded counter inside an existing transaction. You *must* await this + /// function and call the callback it returns after you commit the provided transaction. + /// + /// The Datastore namespace to store the counter in. + /// The name of the sharded counter. + /// The amount to modify the sharded counter by. + /// The existing transaction to update the counter in. + /// The task to await on. + Task AdjustAsync(string @namespace, string name, long modifier, IModelTransaction existingTransaction); + } +} diff --git a/UET/Redpoint.CloudFramework/Counter/IShardedCounter.cs b/UET/Redpoint.CloudFramework/Counter/IShardedCounter.cs new file mode 100644 index 00000000..9d08cf5f --- /dev/null +++ b/UET/Redpoint.CloudFramework/Counter/IShardedCounter.cs @@ -0,0 +1,33 @@ +namespace Redpoint.CloudFramework.Counter +{ + using Redpoint.CloudFramework.Repository.Transaction; + using System.Threading.Tasks; + + public interface IShardedCounter + { + /// + /// Returns the value of a sharded counter. + /// + /// The name of the sharded counter. + /// The value of the sharded counter. + Task GetAsync(string name); + + /// + /// Adjust the value of a sharded counter. + /// + /// The name of the sharded counter. + /// The amount to modify the sharded counter by. + /// The task to await on. + Task AdjustAsync(string name, long modifier); + + /// + /// Adjust the value of a sharded counter inside an existing transaction. You *must* await this + /// function and call the callback it returns after you commit the provided transaction. + /// + /// The name of the sharded counter. + /// The amount to modify the sharded counter by. + /// The existing transaction to update the counter in. + /// The task to await on. + Task AdjustAsync(string name, long modifier, IModelTransaction existingTransaction); + } +} diff --git a/UET/Redpoint.CloudFramework/Counter/IShardedCounterService.cs b/UET/Redpoint.CloudFramework/Counter/IShardedCounterService.cs new file mode 100644 index 00000000..21e7548a --- /dev/null +++ b/UET/Redpoint.CloudFramework/Counter/IShardedCounterService.cs @@ -0,0 +1,64 @@ +namespace Redpoint.CloudFramework.Counter +{ + using Redpoint.CloudFramework.Models; + using Redpoint.CloudFramework.Repository.Transaction; + using System; + using System.Diagnostics.CodeAnalysis; + using System.Threading.Tasks; + + [Obsolete("Use IShardedCounter instead.")] + public interface IShardedCounterService + { + /// + /// Returns the value of a sharded counter. + /// + /// The name of the sharded counter. + /// The value of the sharded counter. + Task Get(string name); + + /// + /// Returns the value of a sharded counter stored in a custom model. + /// + /// The model that is used to store sharded counter data. + /// The name of the sharded counter. + /// The value of the sharded counter. + Task GetCustom<[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties | DynamicallyAccessedMemberTypes.NonPublicProperties)] T>(string name) where T : Model, IShardedCounterModel, new(); + + /// + /// Adjust the value of a sharded counter. + /// + /// The name of the sharded counter. + /// The amount to modify the sharded counter by. + /// The task to await on. + Task Adjust(string name, long modifier); + + /// + /// Adjust the value of a sharded counter stored in a custom model. + /// + /// The model that is used to store sharded counter data. + /// The name of the sharded counter. + /// The amount to modify the sharded counter by. + /// The task to await on. + Task AdjustCustom<[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties | DynamicallyAccessedMemberTypes.NonPublicProperties)] T>(string name, long modifier) where T : Model, IShardedCounterModel, new(); + + /// + /// Adjust the value of a sharded counter inside an existing transaction. You *must* await this + /// function and call the callback it returns after you commit the provided transaction. + /// + /// The name of the sharded counter. + /// The amount to modify the sharded counter by. + /// The existing transaction to update the counter in. + /// The task to await on. + Task> Adjust(string name, long modifier, IModelTransaction existingTransaction); + + /// + /// Adjust the value of a sharded counter stored in a custom model, inside an existing transaction. + /// You *must* await this function and call the callback it returns after you commit the provided transaction. + /// + /// The name of the sharded counter. + /// The amount to modify the sharded counter by. + /// The existing transaction to update the counter in. + /// The task to await on. + Task> AdjustCustom<[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties | DynamicallyAccessedMemberTypes.NonPublicProperties)] T>(string name, long modifier, IModelTransaction existingTransaction) where T : Model, IShardedCounterModel, new(); + } +} diff --git a/UET/Redpoint.CloudFramework/Counter/LegacyShardedCounterModel.cs b/UET/Redpoint.CloudFramework/Counter/LegacyShardedCounterModel.cs new file mode 100644 index 00000000..7163efa4 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Counter/LegacyShardedCounterModel.cs @@ -0,0 +1,53 @@ +using Redpoint.CloudFramework.Models; + +namespace Redpoint.CloudFramework.Counter +{ + using System.Collections.Generic; + using System.Globalization; + + [Obsolete] + internal class LegacyShardedCounterModel : Model, IShardedCounterModel + { + public long? count { get; set; } + + public override string GetKind() + { + return "RCF-SharedCounter"; + } + + public override long GetSchemaVersion() + { + return 1; + } + + public override Dictionary GetTypes() + { + return new Dictionary { + { "count", FieldType.Integer } + }; + } + + public override HashSet GetIndexes() + { + return new HashSet + { + "count", + }; + } + + public string? GetTypeFieldName() + { + return null; + } + + public string GetCountFieldName() + { + return "count"; + } + + public string FormatShardName(string name, int index) + { + return string.Format(CultureInfo.InvariantCulture, "shard-{0}-{1}", name, index); + } + } +} \ No newline at end of file diff --git a/UET/Redpoint.CloudFramework/Counter/LegacyShardedCounterService.cs b/UET/Redpoint.CloudFramework/Counter/LegacyShardedCounterService.cs new file mode 100644 index 00000000..c6523e76 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Counter/LegacyShardedCounterService.cs @@ -0,0 +1,224 @@ +namespace Redpoint.CloudFramework.Counter +{ + using Google.Cloud.Datastore.V1; + using Redpoint.CloudFramework.Models; + using Redpoint.CloudFramework.Repository; + using Redpoint.CloudFramework.Repository.Transaction; + using StackExchange.Redis; + using System; + using System.Collections.Generic; + using System.Diagnostics.CodeAnalysis; + using System.Linq; + using System.Reflection; + using System.Threading.Tasks; + +#pragma warning disable CS0612 // Type or member is obsolete +#pragma warning disable CS0618 // Type or member is obsolete + internal class LegacyShardedCounterService : IShardedCounterService + { + private readonly IRepository _repository; + private readonly IDatabase _redisDatabase; + private readonly Random _random; + + // This can only ever be increased; not decreased. + public const int NumShards = 60; + + private const bool _enableRedis = true; + + public LegacyShardedCounterService( + IRepository repository, + IConnectionMultiplexer connectionMultiplexer) + { + ArgumentNullException.ThrowIfNull(connectionMultiplexer); + + _repository = repository; + _redisDatabase = connectionMultiplexer.GetDatabase(); + _random = new Random(); + } + + private async IAsyncEnumerable GetAllKeys(string name) where T : Model, IShardedCounterModel, new() + { + var t = new T(); + + var keyFactory = await _repository.GetKeyFactoryAsync().ConfigureAwait(false); + for (var i = 0; i < NumShards; i++) + { + yield return keyFactory.CreateKey(t.FormatShardName(name, i)); + } + } + + public async Task AdjustCustom<[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties | DynamicallyAccessedMemberTypes.NonPublicProperties)] T>(string name, long modifier) where T : Model, IShardedCounterModel, new() + { + var t = new T(); + + var counterProperty = typeof(T).GetProperty(t.GetCountFieldName(), BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic); + if (counterProperty == null) + { + throw new InvalidOperationException($"The count field name specified by GetCountFieldName '{t.GetCountFieldName()}' does not exist on the class."); + } + + var typeName = t.GetTypeFieldName(); + var typeProperty = typeName == null ? null : typeof(T).GetProperty(typeName, BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic); + +#pragma warning disable CA5394 // Do not use insecure randomness + var idx = _random.Next(0, NumShards); +#pragma warning restore CA5394 // Do not use insecure randomness + var keyFactory = await _repository.GetKeyFactoryAsync().ConfigureAwait(false); + var key = keyFactory.CreateKey(t.FormatShardName(name, idx)); + var transaction = await _repository.BeginTransactionAsync().ConfigureAwait(false); + var rollback = true; + try + { + var create = false; + var counter = await _repository.LoadAsync(key, transaction).ConfigureAwait(false); + if (counter == null) + { + counter = new T + { + Key = key, + }; + counterProperty.SetValue(counter, modifier); + if (typeProperty != null) + { + typeProperty.SetValue(counter, "shard"); + } + create = true; + } + else + { + counterProperty.SetValue(counter, ((long?)counterProperty.GetValue(counter) ?? 0) + modifier); + } + if (create) + { + await _repository.CreateAsync(counter, transaction).ConfigureAwait(false); + } + else + { + await _repository.UpdateAsync(counter, transaction).ConfigureAwait(false); + } + await _repository.CommitAsync(transaction).ConfigureAwait(false); + if (_enableRedis) + { + await _redisDatabase.StringIncrementAsync("shard-" + name, modifier, CommandFlags.FireAndForget).ConfigureAwait(false); + } + rollback = false; + } + finally + { + if (rollback) + { + await transaction.Transaction.RollbackAsync().ConfigureAwait(false); + } + } + } + + + public async Task> AdjustCustom<[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties | DynamicallyAccessedMemberTypes.NonPublicProperties)] T>(string name, long modifier, IModelTransaction existingTransaction) where T : Model, IShardedCounterModel, new() + { + var t = new T(); + + var counterProperty = typeof(T).GetProperty(t.GetCountFieldName(), BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic); + if (counterProperty == null) + { + throw new InvalidOperationException($"The count field name specified by GetCountFieldName '{t.GetCountFieldName()}' does not exist on the class."); + } + + var typeName = t.GetTypeFieldName(); + var typeProperty = typeName == null ? null : typeof(T).GetProperty(typeName, BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic); + +#pragma warning disable CA5394 // Do not use insecure randomness + var idx = _random.Next(0, NumShards); +#pragma warning restore CA5394 // Do not use insecure randomness + var keyFactory = await _repository.GetKeyFactoryAsync().ConfigureAwait(false); + var key = keyFactory.CreateKey(t.FormatShardName(name, idx)); + + var create = false; + var counter = await _repository.LoadAsync(key, existingTransaction).ConfigureAwait(false); + if (counter == null) + { + counter = new T + { + Key = key, + }; + counterProperty.SetValue(counter, modifier); + if (typeProperty != null) + { + typeProperty.SetValue(counter, "shard"); + } + create = true; + } + else + { + counterProperty.SetValue(counter, ((long?)counterProperty.GetValue(counter) ?? 0) + modifier); + } + if (create) + { + await _repository.CreateAsync(counter, existingTransaction).ConfigureAwait(false); + } + else + { + await _repository.UpdateAsync(counter, existingTransaction).ConfigureAwait(false); + } + return async () => + { + if (_enableRedis) + { + await _redisDatabase.StringIncrementAsync("shard-" + name, modifier, CommandFlags.FireAndForget).ConfigureAwait(false); + } + }; + } + + public async Task GetCustom<[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties | DynamicallyAccessedMemberTypes.NonPublicProperties)] T>(string name) where T : Model, IShardedCounterModel, new() + { + var t = new T(); + + var counterProperty = typeof(T).GetProperty(t.GetCountFieldName(), BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic); + if (counterProperty == null) + { + throw new InvalidOperationException($"The count field name specified by GetCountFieldName '{t.GetCountFieldName()}' does not exist on the class."); + } + + long total; + if (_enableRedis) + { + var shardCache = await _redisDatabase.StringGetAsync("shard-" + name).ConfigureAwait(false); + if (!(!shardCache.HasValue || !shardCache.IsInteger || !shardCache.TryParse(out total))) + { + return total; + } + } + + total = + await _repository.LoadAsync(GetAllKeys(name)) + .Where(x => x.Value != null) + .Select(x => (long?)counterProperty.GetValue(x.Value) ?? 0) + .SumAsync().ConfigureAwait(false); + if (_enableRedis) + { + await _redisDatabase.StringSetAsync( + "shard-" + name, + total, + TimeSpan.FromSeconds(60), + When.NotExists).ConfigureAwait(false); + } + return total; + } + + public Task Get(string name) + { + return GetCustom(name); + } + + public Task Adjust(string name, long modifier) + { + return AdjustCustom(name, modifier); + } + + public Task> Adjust(string name, long modifier, IModelTransaction existingTransaction) + { + return AdjustCustom(name, modifier, existingTransaction); + } + } +#pragma warning restore CS0612 // Type or member is obsolete +#pragma warning restore CS0618 // Type or member is obsolete +} diff --git a/UET/Redpoint.CloudFramework/Counter/ShardedCounterPostCommit.cs b/UET/Redpoint.CloudFramework/Counter/ShardedCounterPostCommit.cs new file mode 100644 index 00000000..1464f78b --- /dev/null +++ b/UET/Redpoint.CloudFramework/Counter/ShardedCounterPostCommit.cs @@ -0,0 +1,10 @@ +namespace Redpoint.CloudFramework.Counter +{ + using System.Threading.Tasks; + + /// + /// The callback returned from and which MUST be called after the transaction has been committed. + /// + /// + public delegate Task ShardedCounterPostCommit(); +} diff --git a/UET/Redpoint.CloudFramework/DataProtection/StaticDataProtectionProvider.cs b/UET/Redpoint.CloudFramework/DataProtection/StaticDataProtectionProvider.cs new file mode 100644 index 00000000..4f3c0db6 --- /dev/null +++ b/UET/Redpoint.CloudFramework/DataProtection/StaticDataProtectionProvider.cs @@ -0,0 +1,26 @@ +namespace Redpoint.CloudFramework.DataProtection +{ + using Microsoft.AspNetCore.DataProtection; + using Microsoft.Extensions.Configuration; + using Microsoft.Extensions.Hosting; + using Microsoft.Extensions.Logging; + + public class StaticDataProtectionProvider : IDataProtectionProvider + { + private readonly IHostEnvironment _hostEnvironment; + private readonly IConfiguration _configuration; + private readonly ILogger _logger; + + public StaticDataProtectionProvider(IHostEnvironment hostEnvironment, IConfiguration configuration, ILogger logger) + { + _hostEnvironment = hostEnvironment; + _configuration = configuration; + _logger = logger; + } + + public IDataProtector CreateProtector(string purpose) + { + return new StaticDataProtector(_hostEnvironment, _configuration, _logger); + } + } +} diff --git a/UET/Redpoint.CloudFramework/DataProtection/StaticDataProtector.cs b/UET/Redpoint.CloudFramework/DataProtection/StaticDataProtector.cs new file mode 100644 index 00000000..605f011c --- /dev/null +++ b/UET/Redpoint.CloudFramework/DataProtection/StaticDataProtector.cs @@ -0,0 +1,167 @@ +namespace Redpoint.CloudFramework.DataProtection +{ + using Microsoft.AspNetCore.DataProtection; + using Microsoft.Extensions.Configuration; + using Microsoft.Extensions.Hosting; + using Microsoft.Extensions.Logging; + using Newtonsoft.Json; + using Newtonsoft.Json.Linq; + using System; + using System.IO; + using System.Reflection; + using System.Security.Cryptography; + + /// + /// We don't protect anything sensitive (ASP.NET Core just needs some encryption for the session key + /// that it stores in cookies), and all of the built-in data protection mechanisms are unreliable. + /// + /// You need to create AES key/IV settings to your appsettings.json: + /// + /// { + /// "CloudFramework": { + /// "Security": { + /// "AES": { + /// "Key": "", + /// "IV": "" + /// } + /// } + /// } + /// } + /// + /// If you run your application unconfigured, the framework will throw an exception that you can + /// get newly generated values out of to use in appsettings.json. + /// + public class StaticDataProtector : IDataProtector + { + private readonly Aes _aes; + private readonly byte[] _aesKey; + private readonly byte[] _aesIV; + + private static T CreatePath(JObject current, string name, T newValue) where T : JToken + { + if (!current.ContainsKey(name)) + { + current.Add(name, newValue); + return newValue; + } + return current.Property(name, StringComparison.InvariantCulture)!.Value.ToObject()!; + } + + public StaticDataProtector(IHostEnvironment hostEnvironment, IConfiguration configuration, ILogger logger) + { + ArgumentNullException.ThrowIfNull(configuration); + + _aes = Aes.Create(); + _aes.BlockSize = 128; + _aes.Mode = CipherMode.CBC; + _aes.Padding = PaddingMode.PKCS7; + + // If the developer is running their app unconfigured, generate the key and IV and throw an exception with + // the values to make it easy to set values into appsettings.json + if (string.IsNullOrEmpty(configuration["CloudFramework:Security:AES:Key"]) || + string.IsNullOrEmpty(configuration["CloudFramework:Security:AES:IV"])) + { + _aes.GenerateIV(); + _aes.GenerateKey(); + + bool needsThrow = true; + if (hostEnvironment.IsDevelopment()) + { + // If we can, automatically fix up appsettings.json for the developer. +#pragma warning disable IL3000 // Avoid accessing Assembly file path when publishing as a single file + var filePath = Path.Combine(new FileInfo(Assembly.GetEntryAssembly()!.Location).DirectoryName!, "..", "..", "..", "appsettings.json"); +#pragma warning restore IL3000 // Avoid accessing Assembly file path when publishing as a single file + if (File.Exists(filePath)) + { + var parentJson = JsonConvert.DeserializeObject(File.ReadAllText(filePath))!; + var json = CreatePath(parentJson, "CloudFramework", new JObject()); + json = CreatePath(json, "Security", new JObject()); + json = CreatePath(json, "AES", new JObject()); + CreatePath(json, "Key", new JValue(Convert.ToBase64String(_aes.Key))); + CreatePath(json, "IV", new JValue(Convert.ToBase64String(_aes.IV))); + File.WriteAllText(filePath, JsonConvert.SerializeObject(parentJson, Formatting.Indented)); + + logger.LogInformation("Automatically updated your appsettings.json file with the requires AES key/IV settings."); + + _aesKey = _aes.Key; + _aesIV = _aes.IV; + + // We've updated appsettings. We don't need to restart because we're already set our settings, and next time the app starts it will be using the settings we just persisted. + needsThrow = false; + } + } + + if (needsThrow) + { + var message = "You haven't set the AES key/IV in appsettings.json. Here are newly generated values for you. Key: '" + Convert.ToBase64String(_aes.Key) + "', IV: '" + Convert.ToBase64String(_aes.IV) + "'. Refer to documentation on how to set this up."; + logger.LogError(message); + throw new InvalidOperationException(message); + } + } + else + { + _aesKey = Convert.FromBase64String(configuration["CloudFramework:Security:AES:Key"] ?? string.Empty); + _aesIV = Convert.FromBase64String(configuration["CloudFramework:Security:AES:IV"] ?? string.Empty); + } + + if (_aesKey == null || _aesIV == null) + { + throw new InvalidOperationException("AES key/IV not loaded; this code path should not be hit."); + } + } + + public IDataProtector CreateProtector(string purpose) + { + return this; + } + + public byte[] Protect(byte[] plaintext) + { + ArgumentNullException.ThrowIfNull(plaintext); + + // We must have the IV the same every time, or the content can't be decrypted. +#pragma warning disable CA5401 + using var encryptor = _aes.CreateEncryptor(_aesKey, _aesIV); +#pragma warning restore CA5401 + using var result = new MemoryStream(); + + using (var stream = new CryptoStream(result, encryptor, CryptoStreamMode.Write, true)) + { + stream.Write(plaintext, 0, plaintext.Length); + } + + var l = new byte[result.Position]; + result.Seek(0, SeekOrigin.Begin); + result.Read(l, 0, l.Length); + return l; + } + + public byte[] Unprotect(byte[] protectedData) + { + ArgumentNullException.ThrowIfNull(protectedData); + + try + { + // We must have the IV the same every time, or the content can't be decrypted. +#pragma warning disable CA5401 + using var decryptor = _aes.CreateDecryptor(_aesKey, _aesIV); +#pragma warning restore CA5401 + using var result = new MemoryStream(); + + using (var stream = new CryptoStream(result, decryptor, CryptoStreamMode.Write, true)) + { + stream.Write(protectedData, 0, protectedData.Length); + } + + var l = new byte[result.Position]; + result.Seek(0, SeekOrigin.Begin); + result.Read(l, 0, l.Length); + return l; + } + catch (CryptographicException) + { + return Array.Empty(); + } + } + } +} diff --git a/UET/Redpoint.CloudFramework/Event/CoreEventTypes.cs b/UET/Redpoint.CloudFramework/Event/CoreEventTypes.cs new file mode 100644 index 00000000..1ae10358 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Event/CoreEventTypes.cs @@ -0,0 +1,7 @@ +namespace Redpoint.CloudFramework.Event +{ + public static class CoreEventTypes + { + public const string Migrate = "migrate"; + } +} diff --git a/UET/Redpoint.CloudFramework/Event/Event.cs b/UET/Redpoint.CloudFramework/Event/Event.cs new file mode 100644 index 00000000..f0c21f71 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Event/Event.cs @@ -0,0 +1,31 @@ +namespace Redpoint.CloudFramework.Event +{ + using Google.Cloud.Datastore.V1; + using Newtonsoft.Json.Linq; + using NodaTime; + +#pragma warning disable CA1724 + public class Event +#pragma warning restore CA1724 + { + public required Key Id { get; set; } + + public required Instant UtcTimestamp { get; set; } + + public string? EventType { get; set; } + + public string? ServiceIdentifier { get; set; } + + public Key? Project { get; set; } + + public Key? Session { get; set; } + + public JObject? Request { get; set; } + + public Key? Key { get; set; } + + public JObject? Entity { get; set; } + + public JObject? Userdata { get; set; } + } +} diff --git a/UET/Redpoint.CloudFramework/Event/EventApi.cs b/UET/Redpoint.CloudFramework/Event/EventApi.cs new file mode 100644 index 00000000..b1f46266 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Event/EventApi.cs @@ -0,0 +1,155 @@ +namespace Redpoint.CloudFramework.Event +{ + using System; + using System.Threading.Tasks; + using Google.Cloud.Datastore.V1; + using Redpoint.CloudFramework.Event.PubSub; + using Microsoft.AspNetCore.Http; + using Microsoft.AspNetCore.Http.Extensions; + using Newtonsoft.Json; + using Newtonsoft.Json.Linq; + using NodaTime; + using Redpoint.CloudFramework.Prefix; + using Redpoint.CloudFramework.Models; + using Redpoint.CloudFramework.GoogleInfrastructure; + using Redpoint.CloudFramework.Tracing; + + public class EventApi : IEventApi + { + private readonly IGlobalPrefix _globalPrefix; + private readonly IPubSub _pubSub; + private readonly IManagedTracer _managedTracer; + private readonly IGoogleServices _googleServices; + private Random _random = new Random(); + + public EventApi( + IGlobalPrefix globalPrefix, + IPubSub pubSub, + IManagedTracer managedTracer, + IGoogleServices googleServices) + { + _globalPrefix = globalPrefix; + _pubSub = pubSub; + _managedTracer = managedTracer; + _googleServices = googleServices; + } + + public async Task Raise(string eventType, Key project, Key session, HttpRequest request, Key key, object entity, object userdata) + { + SerializedEvent eventObj; + + using (_managedTracer.StartSpan("event.serialize", eventType)) + { + var generatedIdBytes = new byte[7]; +#pragma warning disable CA5394 // Do not use insecure randomness + _random.NextBytes(generatedIdBytes); +#pragma warning restore CA5394 // Do not use insecure randomness + var generatedIdString = Convert.ToHexString(generatedIdBytes); + var generatedId = Convert.ToInt64(generatedIdString, 16); + + var generatedKeyFactory = new KeyFactory(_googleServices.ProjectId, string.Empty, "HiveEvent"); + var generatedKey = generatedKeyFactory.CreateKey(generatedId); + + // TODO: Potentially get this from environment variables now. + var serviceIdentifier = "hivemp:unknown"; + + var data = new Event + { + Id = generatedKey, + UtcTimestamp = SystemClock.Instance.GetCurrentInstant(), + EventType = eventType, + ServiceIdentifier = serviceIdentifier, + Project = project, + Session = session, + Request = EventApi.FormatRequest(request), + Key = key, + Entity = EventApi.FormatEntity(entity), + Userdata = EventApi.FormatUserdata(userdata) + }; + + eventObj = SerializeEvent(data); + } + + using (_managedTracer.StartSpan("event.publish", eventType)) + { + await _pubSub.PublishAsync(eventObj).ConfigureAwait(false); + } + } + + private SerializedEvent SerializeEvent(Event data) + { + return new SerializedEvent + { + Id = _globalPrefix.CreateInternal(data.Id), + UtcTimestamp = data.UtcTimestamp.ToUnixTimeSeconds(), + Type = data.EventType, + Service = data.ServiceIdentifier, + Project = data.Project != null ? _globalPrefix.CreateInternal(data.Project) : null, + Session = data.Session != null ? _globalPrefix.CreateInternal(data.Session) : null, + Request = data.Request, + Key = data.Key != null ? _globalPrefix.CreateInternal(data.Key) : null, + Entity = data.Entity, + Userdata = data.Userdata + }; + } + + private static JObject? FormatUserdata(object userdata) + { + if (userdata == null) + { + return null; + } + + return JsonConvert.DeserializeObject( + JsonConvert.SerializeObject(userdata)); + } + + private static JObject? FormatEntity(object entity) + { + if (entity == null) + { + return null; + } + + if (entity is JObject o) + { + return o; + } + + if (entity is Model m) + { + // TODO + return null; + } + + return null; + } + + private static JObject? FormatRequest(HttpRequest request) + { + if (request == null) + { + return null; + } + + var headers = new JObject(); + foreach (var kv in request.Headers) + { + var headerArray = new JArray(); + foreach (var v in kv.Value) + { + headerArray.Add(v); + } + + headers.Add(kv.Key, headerArray); + } + + return JsonConvert.DeserializeObject(JsonConvert.SerializeObject(new + { + headers, + url = request.GetEncodedUrl(), + method = request.Method + })); + } + } +} diff --git a/UET/Redpoint.CloudFramework/Event/EventProcessingOutcome.cs b/UET/Redpoint.CloudFramework/Event/EventProcessingOutcome.cs new file mode 100644 index 00000000..0e59920d --- /dev/null +++ b/UET/Redpoint.CloudFramework/Event/EventProcessingOutcome.cs @@ -0,0 +1,20 @@ +namespace Redpoint.CloudFramework.Event +{ + public enum EventProcessingOutcome + { + /// + /// Indicates the event should be ignored by this processor. + /// + IgnoreEvent, + + /// + /// Indicates the event should be retried later. + /// + RetryLater, + + /// + /// Indicates that this processor has processed the event. + /// + Complete + } +} diff --git a/UET/Redpoint.CloudFramework/Event/IEventApi.cs b/UET/Redpoint.CloudFramework/Event/IEventApi.cs new file mode 100644 index 00000000..556db341 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Event/IEventApi.cs @@ -0,0 +1,13 @@ +namespace Redpoint.CloudFramework.Event +{ + using Google.Cloud.Datastore.V1; + using System.Threading.Tasks; + using Microsoft.AspNetCore.Http; + + public interface IEventApi + { +#pragma warning disable CA1030 // Use events where appropriate + Task Raise(string eventType, Key project, Key session, HttpRequest request, Key key, object entity, object userdata); +#pragma warning restore CA1030 // Use events where appropriate + } +} diff --git a/UET/Redpoint.CloudFramework/Event/PubSub/GooglePubSub.cs b/UET/Redpoint.CloudFramework/Event/PubSub/GooglePubSub.cs new file mode 100644 index 00000000..d3992a3c --- /dev/null +++ b/UET/Redpoint.CloudFramework/Event/PubSub/GooglePubSub.cs @@ -0,0 +1,331 @@ +namespace Redpoint.CloudFramework.Event.PubSub +{ + using Google.Api.Gax; + using Google.Cloud.PubSub.V1; + using Google.Protobuf; + using Grpc.Core; + using Microsoft.Extensions.Logging; + using Newtonsoft.Json; + using Redpoint.CloudFramework.GoogleInfrastructure; + using Redpoint.CloudFramework.Metric; + using Redpoint.CloudFramework.Prefix; + using System; + using System.Collections.Generic; + using System.Linq; + using System.Threading; + using System.Threading.Tasks; + + public sealed class GooglePubSub : IPubSub, IDisposable + { + private readonly ILogger _logger; + private readonly IGoogleServices _googleServices; + private readonly IGoogleApiRetry _googleApiRetry; + private readonly IMetricService _metricService; + private readonly IGlobalPrefix _globalPrefix; + + private readonly PublisherServiceApiClient _publisherClient; + private readonly SubscriberServiceApiClient _subscriberClient; + private readonly ChannelCredentials? _publisherCredential; + private readonly string? _publisherServiceEndpoint; + private readonly ChannelCredentials? _subscriberCredential; + private readonly string? _subscriberServiceEndpoint; + private Dictionary _publisherClients; + private readonly SemaphoreSlim _publisherClientCreation; + + private const string _googlePubSubPushCount = "rcf/pubsub_push_count"; + private const string _googlePubSubPullCount = "rcf/pubsub_pull_count"; + private const string _googlePubSubAckCount = "rcf/pubsub_ack_count"; + private const string _googlePubSubNackCount = "rcf/pubsub_nack_count"; + private const string _googlePubSubNackFailCount = "rcf/pubsub_nack_fail_count"; + + public GooglePubSub( + ILogger logger, + IMetricService metricService, + IGoogleServices googleServices, + IGoogleApiRetry googleApiRetry, + IGlobalPrefix globalPrefix) + { + _logger = logger; + _googleServices = googleServices; + _googleApiRetry = googleApiRetry; + _metricService = metricService; + _globalPrefix = globalPrefix; + + _publisherClient = _googleServices.Build( + PublisherServiceApiClient.DefaultEndpoint, + PublisherServiceApiClient.DefaultScopes); + _subscriberClient = _googleServices.Build( + SubscriberServiceApiClient.DefaultEndpoint, + SubscriberServiceApiClient.DefaultScopes); + _publisherCredential = _googleServices.GetChannelCredentials( + PublisherServiceApiClient.DefaultEndpoint, + PublisherServiceApiClient.DefaultScopes); + _publisherServiceEndpoint = _googleServices.GetServiceEndpoint( + PublisherServiceApiClient.DefaultEndpoint, + PublisherServiceApiClient.DefaultScopes); + _subscriberCredential = _googleServices.GetChannelCredentials( + SubscriberServiceApiClient.DefaultEndpoint, + SubscriberServiceApiClient.DefaultScopes); + _subscriberServiceEndpoint = _googleServices.GetServiceEndpoint( + SubscriberServiceApiClient.DefaultEndpoint, + SubscriberServiceApiClient.DefaultScopes); + _publisherClients = new Dictionary(); + _publisherClientCreation = new SemaphoreSlim(1); + } + + public async Task PublishAsync(SerializedEvent jsonObject) + { + ArgumentNullException.ThrowIfNull(jsonObject); + + var topicRawName = "event~" + jsonObject.Type?.Replace(':', '.'); + var topicName = new TopicName( + _googleServices.ProjectId, + topicRawName); + + PublisherClient client; + await _publisherClientCreation.WaitAsync().ConfigureAwait(false); + try + { + if (!_publisherClients.TryGetValue(topicRawName, out client!)) + { + try + { + // Attempt to create the topic in case it doesn't exist. + await _publisherClient.CreateTopicAsync(topicName).ConfigureAwait(false); + } + catch (RpcException ex2) when (ex2.Status.StatusCode == StatusCode.AlreadyExists) + { + // Already exists. + } + + var builder = new PublisherClientBuilder + { + TopicName = topicName, + ClientCount = 1, + ApiSettings = null, + ChannelCredentials = _publisherCredential, + Endpoint = _publisherServiceEndpoint, + }; + client = await builder.BuildAsync().ConfigureAwait(false); + _publisherClients[topicRawName] = client; + } + } + finally + { + _publisherClientCreation.Release(); + } + + await client.PublishAsync(new PubsubMessage + { + Data = ByteString.CopyFromUtf8(JsonConvert.SerializeObject(jsonObject)) + }).ConfigureAwait(false); + + await _metricService.AddPoint( + _googlePubSubPushCount, + 1, + jsonObject.Project == null ? null : _globalPrefix.ParseInternal(string.Empty, jsonObject.Project), + new Dictionary + { + { "event_type", jsonObject.Type }, + { "entity_type", jsonObject.Key == null ? "(no entity in event)" : _globalPrefix.ParseInternal(string.Empty, jsonObject.Key).Path.Last().Kind }, + }).ConfigureAwait(false); + } + + public async Task SubscribeAndLoopUntilCancelled( + string subscriptionName, + string[] eventTypes, + SubscriptionCleanupPolicy cleanupPolicy, + Func> onMessage, + CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(eventTypes); + + if (cancellationToken.IsCancellationRequested) + { + return; + } + + // Subscribe to all of the topics for all of the event types in parallel. + using var derivedCancellationTokenSource = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken); + var derivedCancellationToken = derivedCancellationTokenSource.Token; + var tasks = new Task[eventTypes.Length]; + for (var i = 0; i < eventTypes.Length; i++) + { + var eventType = eventTypes[i]; + + var topicName = new TopicName( + _googleServices.ProjectId, + "event~" + eventType.Replace(':', '.')); + var subscriberName = new SubscriptionName( + _googleServices.ProjectId, + subscriptionName + "~" + eventType.Replace(':', '.')); + + try + { + await _googleApiRetry.DoRetryableOperationAsync(GoogleApiCallContext.PubSub, _logger, async () => + { + await _subscriberClient.CreateSubscriptionAsync( + subscriberName, + topicName, + null, + 600, + derivedCancellationToken).ConfigureAwait(false); + }).ConfigureAwait(false); + } + catch (RpcException ex) when (ex.Status.StatusCode == StatusCode.NotFound) + { + try + { + // The topic wasn't found, so create it. + await _googleApiRetry.DoRetryableOperationAsync(GoogleApiCallContext.PubSub, _logger, async () => + { + await _publisherClient.CreateTopicAsync(topicName, derivedCancellationToken).ConfigureAwait(false); + }).ConfigureAwait(false); + } + catch (RpcException ex2) when (ex2.Status.StatusCode == StatusCode.AlreadyExists) + { + // Topic has been created in parallel. + } + + try + { + // Now create the subscription. + await _googleApiRetry.DoRetryableOperationAsync(GoogleApiCallContext.PubSub, _logger, async () => + { + await _subscriberClient.CreateSubscriptionAsync( + subscriberName, + topicName, + null, + 600, + derivedCancellationToken).ConfigureAwait(false); + }).ConfigureAwait(false); + } + catch (RpcException ex2) when (ex2.Status.StatusCode == StatusCode.AlreadyExists) + { + // Subscription already exists; everything is OK. + } + } + catch (RpcException ex) when (ex.Status.StatusCode == StatusCode.AlreadyExists) + { + // Subscription already exists; everything is OK. + } + + // Set up the task to continously poll. + tasks[i] = Task.Run(async () => + { + var builder = new SubscriberClientBuilder + { + SubscriptionName = subscriberName, + ClientCount = 1, + ApiSettings = null, + ChannelCredentials = _subscriberCredential, + Endpoint = _subscriberServiceEndpoint, + Settings = new SubscriberClient.Settings + { + FlowControlSettings = new FlowControlSettings(1, null) + } + }; + var simpleSubscriber = await builder.BuildAsync().ConfigureAwait(false); + derivedCancellationToken.Register(() => + { + // Can't return a task here? + simpleSubscriber.StopAsync(TimeSpan.FromMinutes(3)); + }); + await simpleSubscriber.StartAsync(async (message, cancellationTokenInner) => + { + SerializedEvent? serializedEvent = null; + try + { + serializedEvent = JsonConvert.DeserializeObject( + message.Data.ToStringUtf8())!; + _logger.LogInformation($"Recieved event {serializedEvent.Id} from Google Pub/Sub for {eventType} events from {subscriptionName}"); + await _metricService.AddPoint( + _googlePubSubPullCount, + 1, + serializedEvent.Project == null ? null : _globalPrefix.ParseInternal(string.Empty, serializedEvent.Project), + new Dictionary + { + { "event_type", serializedEvent.Type }, + { "subscription_name", subscriptionName }, + { "entity_type", serializedEvent.Key == null ? "(no entity in event)" : _globalPrefix.ParseInternal(string.Empty, serializedEvent.Key).Path.Last().Kind }, + }).ConfigureAwait(false); + if (await onMessage(serializedEvent).ConfigureAwait(false)) + { + await _metricService.AddPoint( + _googlePubSubAckCount, + 1, + serializedEvent.Project == null ? null : _globalPrefix.ParseInternal(string.Empty, serializedEvent.Project), + new Dictionary + { + { "event_type", serializedEvent.Type }, + { "subscription_name", subscriptionName }, + { "entity_type", serializedEvent.Key == null ? "(no entity in event)" : _globalPrefix.ParseInternal(string.Empty, serializedEvent.Key).Path.Last().Kind }, + }).ConfigureAwait(false); + return SubscriberClient.Reply.Ack; + } + else + { + await _metricService.AddPoint( + _googlePubSubNackCount, + 1, + serializedEvent.Project == null ? null : _globalPrefix.ParseInternal(string.Empty, serializedEvent.Project), + new Dictionary + { + { "event_type", serializedEvent.Type }, + { "subscription_name", subscriptionName }, + { "entity_type", serializedEvent.Key == null ? "(no entity in event)" : _globalPrefix.ParseInternal(string.Empty, serializedEvent.Key).Path.Last().Kind }, + }).ConfigureAwait(false); + return SubscriberClient.Reply.Nack; + } + } + catch (Exception ex) + { + _logger.LogError(ex, ex.Message); + + if (serializedEvent != null) + { + await _metricService.AddPoint( + _googlePubSubNackFailCount, + 1, + serializedEvent.Project == null ? null : _globalPrefix.ParseInternal(string.Empty, serializedEvent.Project), + new Dictionary + { + { "event_type", serializedEvent.Type }, + { "subscription_name", subscriptionName }, + { "entity_type", serializedEvent.Key == null ? "(no entity in event)" : _globalPrefix.ParseInternal(string.Empty, serializedEvent.Key).Path.Last().Kind }, + }).ConfigureAwait(false); + } + return SubscriberClient.Reply.Nack; + } + }).ConfigureAwait(false); + }, derivedCancellationToken); + } + + await Task.WhenAny(tasks).ConfigureAwait(false); + + derivedCancellationTokenSource.Cancel(); + + if (cleanupPolicy == SubscriptionCleanupPolicy.DeleteSubscription) + { + for (var i = 0; i < eventTypes.Length; i++) + { + var eventType = eventTypes[i]; + + var subscriberName = new SubscriptionName( + _googleServices.ProjectId, + subscriptionName + "~" + eventType.Replace(':', '.')); + + await _googleApiRetry.DoRetryableOperationAsync(GoogleApiCallContext.PubSub, _logger, async () => + { + await _subscriberClient.DeleteSubscriptionAsync(subscriberName).ConfigureAwait(false); + }).ConfigureAwait(false); + } + } + } + + public void Dispose() + { + ((IDisposable)_publisherClientCreation).Dispose(); + } + } +} diff --git a/UET/Redpoint.CloudFramework/Event/PubSub/IPubSub.cs b/UET/Redpoint.CloudFramework/Event/PubSub/IPubSub.cs new file mode 100644 index 00000000..5dfebce6 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Event/PubSub/IPubSub.cs @@ -0,0 +1,24 @@ +namespace Redpoint.CloudFramework.Event.PubSub +{ + using System; + using System.Threading; + using System.Threading.Tasks; + + public interface IPubSub + { + Task PublishAsync(SerializedEvent jsonObject); + + Task SubscribeAndLoopUntilCancelled( + string subscriptionName, + string[] eventTypes, + SubscriptionCleanupPolicy cleanupPolicy, + Func> onMessage, + CancellationToken cancellationToken); + } + + public enum SubscriptionCleanupPolicy + { + NoCleanup, + DeleteSubscription, + } +} diff --git a/UET/Redpoint.CloudFramework/Event/PubSub/NullPubSub.cs b/UET/Redpoint.CloudFramework/Event/PubSub/NullPubSub.cs new file mode 100644 index 00000000..597722b4 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Event/PubSub/NullPubSub.cs @@ -0,0 +1,22 @@ +namespace Redpoint.CloudFramework.Event.PubSub +{ + using System; + using System.Threading; + using System.Threading.Tasks; + + /// + /// Null implementation used when there is no other implementation to use. + /// + internal class NullPubSub : IPubSub + { + public Task PublishAsync(SerializedEvent jsonObject) + { + return Task.CompletedTask; + } + + public Task SubscribeAndLoopUntilCancelled(string subscriptionName, string[] eventTypes, SubscriptionCleanupPolicy cleanupPolicy, Func> onMessage, CancellationToken cancellationToken) + { + return Task.CompletedTask; + } + } +} diff --git a/UET/Redpoint.CloudFramework/Event/SerializedEvent.cs b/UET/Redpoint.CloudFramework/Event/SerializedEvent.cs new file mode 100644 index 00000000..747185a2 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Event/SerializedEvent.cs @@ -0,0 +1,80 @@ +namespace Redpoint.CloudFramework.Event +{ + using Google.Cloud.Datastore.V1; + using Newtonsoft.Json; + using Newtonsoft.Json.Linq; + using NodaTime; + using Redpoint.CloudFramework.Prefix; + using Redpoint.CloudFramework.Repository; + + public class SerializedEvent + { + [JsonProperty("id")] + public required string Id { get; set; } + + [JsonProperty("utcTimestamp")] + public long UtcTimestamp { get; set; } + + [JsonProperty("type")] + public string? Type { get; set; } + + [JsonProperty("service")] + public string? Service { get; set; } + + [JsonProperty("project")] + public string? Project { get; set; } + + [JsonProperty("session")] + public string? Session { get; set; } + + [JsonProperty("req")] + public JObject? Request { get; set; } + + [JsonProperty("key")] + public string? Key { get; set; } + + [JsonProperty("entity")] + public JObject? Entity { get; set; } + + [JsonProperty("userdata")] + public JObject? Userdata { get; set; } + + public static Event Deserialize(IGlobalPrefix globalPrefix, SerializedEvent jsonObject) + { + ArgumentNullException.ThrowIfNull(globalPrefix); + ArgumentNullException.ThrowIfNull(jsonObject); + + var baseNamespace = string.Empty; + Key? projectKey = null; + Key? sessionKey = null; + Key? objectKey = null; + if (jsonObject.Project != null) + { + projectKey = globalPrefix.ParseInternal(string.Empty, (string)jsonObject.Project); + baseNamespace = "proj_" + projectKey.GetIdFromKey(); + } + if (jsonObject.Session != null) + { + sessionKey = globalPrefix.ParseInternal(baseNamespace, (string)jsonObject.Session); + } + if (jsonObject.Key != null) + { + objectKey = globalPrefix.ParseInternal(baseNamespace, (string)jsonObject.Key); + } + + return new Event + { + Id = globalPrefix.ParseInternal(baseNamespace, (string)jsonObject.Id), + UtcTimestamp = Instant.FromUnixTimeSeconds((long)jsonObject.UtcTimestamp), + EventType = (string?)jsonObject.Type, + ServiceIdentifier = (string?)jsonObject.Service, + Project = projectKey, + Session = sessionKey, + Request = (JObject?)jsonObject.Request, + Key = objectKey, + Entity = (JObject?)jsonObject.Entity, + Userdata = (JObject?)jsonObject.Userdata + }; + } + } +} diff --git a/UET/Redpoint.CloudFramework/GoogleCloudUsageFlag.cs b/UET/Redpoint.CloudFramework/GoogleCloudUsageFlag.cs new file mode 100644 index 00000000..5247deff --- /dev/null +++ b/UET/Redpoint.CloudFramework/GoogleCloudUsageFlag.cs @@ -0,0 +1,74 @@ +namespace Redpoint.CloudFramework +{ + using System; + + /// + /// Defines what Google Cloud services will be used at runtime. You can use this turn off certain services if you don't need them or otherwise have replacements. + /// + [Flags] +#pragma warning disable CA1711 // Identifiers should not have incorrect suffix + public enum GoogleCloudUsageFlag +#pragma warning restore CA1711 // Identifiers should not have incorrect suffix + { + /// + /// Do not register any Google Cloud services. + /// + None = 0, + + /// + /// Use everything. + /// + Default = All, + + /// + /// Use Google Cloud Logging. + /// + [Obsolete("Google Cloud Logging support has been removed. Use Sentry instead.", true)] + Logging = 1, + + /// + /// Use Google Cloud Trace. + /// + [Obsolete("Google Cloud Trace support has been removed. Use Sentry instead.", true)] + Trace = 2, + + /// + /// Use Google Cloud Error Reporting. + /// + [Obsolete("Google Cloud Error Reporting support has been removed. Use Sentry instead.", true)] + ErrorReporting = 4, + + /// + /// Use Google Cloud Datastore. + /// + Datastore = 8, + + /// + /// Use Google Cloud Pub/Sub. + /// + PubSub = 16, + + /// + /// Use Google Cloud BigQuery. + /// + BigQuery = 32, + + /// + /// Report metrics into Google Cloud Monitoring. + /// + [Obsolete("Metrics will be replaced with Prometheus in the future.")] + Metrics = 64, + + /// + /// Load application configuration from Google Cloud Secret Manager. + /// + SecretManager = 128, + + /// + /// Use all Google Cloud services that the framework uses. + /// +#pragma warning disable CA1069 // Enums values should not be duplicated + All = 255, +#pragma warning restore CA1069 // Enums values should not be duplicated + } +} diff --git a/UET/Redpoint.CloudFramework/GoogleInfrastructure/GoogleApiCallContext.cs b/UET/Redpoint.CloudFramework/GoogleInfrastructure/GoogleApiCallContext.cs new file mode 100644 index 00000000..cb4c8f78 --- /dev/null +++ b/UET/Redpoint.CloudFramework/GoogleInfrastructure/GoogleApiCallContext.cs @@ -0,0 +1,11 @@ +namespace Redpoint.CloudFramework.GoogleInfrastructure +{ + public enum GoogleApiCallContext + { + PubSub, + + Datastore, + + DatastoreTransactional + } +} diff --git a/UET/Redpoint.CloudFramework/GoogleInfrastructure/GoogleApiRetry.cs b/UET/Redpoint.CloudFramework/GoogleInfrastructure/GoogleApiRetry.cs new file mode 100644 index 00000000..544e3a46 --- /dev/null +++ b/UET/Redpoint.CloudFramework/GoogleInfrastructure/GoogleApiRetry.cs @@ -0,0 +1,197 @@ +namespace Redpoint.CloudFramework.GoogleInfrastructure +{ + using Grpc.Core; + using Microsoft.Extensions.Logging; + using Redpoint.CloudFramework.Repository; + using System; + using System.Threading; + using System.Threading.Tasks; + + /// + /// Provides a method for automatically retrying operations against + /// Google APIs if the API throws an error that's retryable. + /// + public class GoogleApiRetry : IGoogleApiRetry + { + private static bool IsRecoverable(RpcException e, GoogleApiCallContext callContext, int attempts) + { + switch (e.Status.StatusCode) + { + case StatusCode.Aborted: + { + if (callContext == GoogleApiCallContext.PubSub) + { + return true; + } + else + { + // Can't retry for Aborted on Datastore. + return false; + } + } + case StatusCode.Cancelled: + return callContext == GoogleApiCallContext.PubSub; + case StatusCode.DeadlineExceeded: + case StatusCode.Internal: + return attempts == 0; + case StatusCode.ResourceExhausted: + return false; + case StatusCode.Unknown: + return callContext == GoogleApiCallContext.PubSub; + case StatusCode.Unavailable: + return true; + default: + return false; + } + } + + public void DoRetryableOperation(GoogleApiCallContext callContext, ILogger logger, Action operation) + { + ArgumentNullException.ThrowIfNull(operation); + + var attempts = 0; + var delay = 100; + bool needsRetry; + do + { + needsRetry = false; + + try + { + operation(); + return; + } + catch (RpcException ex) when (ex.IsContentionException()) + { + // Re-throw to allow the application to catch the content exception. + throw; + } + catch (RpcException ex) when (IsRecoverable(ex, callContext, attempts++)) + { + logger.LogWarning($"Got recoverable RPC exception: {ex.Status.StatusCode} \"{ex.Status.Detail}\", waiting ${delay}ms before retry..."); + + needsRetry = true; + Thread.Sleep(delay); + delay *= 2; + if (delay > 30000) + { + delay = 30000; + } + } + } + while (needsRetry); + } + + public T DoRetryableOperation(GoogleApiCallContext callContext, ILogger logger, Func operation) + { + ArgumentNullException.ThrowIfNull(operation); + + var attempts = 0; + var delay = 100; + bool needsRetry; + do + { + needsRetry = false; + + try + { + return operation(); + } + catch (RpcException ex) when (ex.IsContentionException()) + { + // Re-throw to allow the application to catch the content exception. + throw; + } + catch (RpcException ex) when (IsRecoverable(ex, callContext, attempts++)) + { + logger.LogWarning($"Got recoverable RPC exception: {ex.Status.StatusCode} \"{ex.Status.Detail}\", waiting ${delay}ms before retry..."); + + needsRetry = true; + Thread.Sleep(delay); + delay *= 2; + if (delay > 30000) + { + delay = 30000; + } + } + } + while (needsRetry); + + throw new InvalidOperationException("Unexpected code reached in DoRetryableOperation"); + } + + public async Task DoRetryableOperationAsync(GoogleApiCallContext callContext, ILogger logger, Func operation) + { + ArgumentNullException.ThrowIfNull(operation); + + var attempts = 0; + var delay = 100; + bool needsRetry; + do + { + needsRetry = false; + + try + { + await operation().ConfigureAwait(false); + } + catch (RpcException ex) when (ex.IsContentionException()) + { + // Re-throw to allow the application to catch the content exception. + throw; + } + catch (RpcException ex) when (IsRecoverable(ex, callContext, attempts++)) + { + logger.LogWarning($"Got recoverable RPC exception: {ex.Status.StatusCode} \"{ex.Status.Detail}\", waiting ${delay}ms before retry..."); + + needsRetry = true; + await Task.Delay(delay).ConfigureAwait(false); + delay *= 2; + if (delay > 30000) + { + delay = 30000; + } + } + } + while (needsRetry); + } + + public async Task DoRetryableOperationAsync(GoogleApiCallContext callContext, ILogger logger, Func> operation) + { + ArgumentNullException.ThrowIfNull(operation); + + var attempts = 0; + var delay = 100; + bool needsRetry; + do + { + needsRetry = false; + + try + { + return await operation().ConfigureAwait(false); + } + catch (RpcException ex) when (ex.IsContentionException()) + { + // Re-throw to allow the application to catch the content exception. + throw; + } + catch (RpcException ex) when (IsRecoverable(ex, callContext, attempts++)) + { + logger.LogWarning($"Got recoverable RPC exception: {ex.Status.StatusCode} \"{ex.Status.Detail}\", waiting ${delay}ms before retry..."); + + needsRetry = true; + await Task.Delay(delay).ConfigureAwait(false); + delay *= 2; + if (delay > 30000) + { + delay = 30000; + } + } + } + while (needsRetry); + + throw new InvalidOperationException("Unexpected code reached in DoRetryableOperationAsync"); + } + } +} diff --git a/UET/Redpoint.CloudFramework/GoogleInfrastructure/GoogleServices.cs b/UET/Redpoint.CloudFramework/GoogleInfrastructure/GoogleServices.cs new file mode 100644 index 00000000..dac79491 --- /dev/null +++ b/UET/Redpoint.CloudFramework/GoogleInfrastructure/GoogleServices.cs @@ -0,0 +1,188 @@ +namespace Redpoint.CloudFramework.GoogleInfrastructure +{ + using Google.Api.Gax.Grpc; + using Google.Apis.Auth.OAuth2; + using Google.Cloud.Datastore.V1; + using Google.Cloud.PubSub.V1; + using Grpc.Core; + using Microsoft.Extensions.DependencyInjection; + using Microsoft.Extensions.Hosting; + using Newtonsoft.Json; + using Newtonsoft.Json.Linq; + using Redpoint.CloudFramework.Startup; + using Redpoint.CloudFramework.Tracing; + using System; + using System.Collections.Generic; + using System.Diagnostics.CodeAnalysis; + using System.IO; + using System.Reflection; + + internal class GoogleServices : IGoogleServices + { + private readonly IHostEnvironment _hostEnvironment; + private readonly IOptionalHelmConfiguration? _optionalHelmConfiguration; + private readonly IManagedTracer _managedTracer; + + public GoogleServices( + IHostEnvironment hostEnvironment, + IManagedTracer managedTracer, + IServiceProvider serviceProvider, + IOptionalHelmConfiguration? optionalHelmConfiguration = null) + { + if (hostEnvironment.IsDevelopment() || hostEnvironment.IsStaging()) + { + ProjectId = "local-dev"; + } + else + { + var projectIdProvider = serviceProvider.GetService(); + if (projectIdProvider != null) + { + ProjectId = projectIdProvider.ProjectId; + } + else + { + var gcProjectId = Environment.GetEnvironmentVariable("GOOGLE_CLOUD_PROJECT_ID"); + if (!string.IsNullOrWhiteSpace(gcProjectId)) + { + ProjectId = gcProjectId; + } + else + { + var filePath = Environment.GetEnvironmentVariable("GOOGLE_APPLICATION_CREDENTIALS"); + if (string.IsNullOrWhiteSpace(filePath)) + { + throw new InvalidOperationException("GOOGLE_APPLICATION_CREDENTIALS is not set, and this application is not running in Development."); + } + var credentialText = File.ReadAllText(filePath); + if (string.IsNullOrWhiteSpace(credentialText)) + { + throw new InvalidOperationException($"GOOGLE_APPLICATION_CREDENTIALS at path '{filePath}' is empty, and this application is not running in Development."); + } + var content = JsonConvert.DeserializeObject(credentialText)!; + + if (content["project_id"] == null) + { + throw new InvalidOperationException("GOOGLE_APPLICATION_CREDENTIALS is missing the project_id value!"); + } + + ProjectId = content["project_id"]!.ToString(); + } + } + } + _hostEnvironment = hostEnvironment; + _optionalHelmConfiguration = optionalHelmConfiguration; + _managedTracer = managedTracer; + } + + public string ProjectId { get; set; } + + [SuppressMessage("Trimming", "IL2090:'this' argument does not satisfy 'DynamicallyAccessedMembersAttribute' in call to target method. The generic parameter of the source method or type does not have matching annotations.", Justification = "This method call is not trimmed.")] + public TType Build(string endpoint, IEnumerable scopes) where TBuilder : ClientBuilderBase, new() + { + var builder = new TBuilder(); + builder.ChannelCredentials = GetChannelCredentials(endpoint, scopes); + builder.Endpoint = GetServiceEndpoint(endpoint, scopes); + var callInvoker = (CallInvoker)typeof(TBuilder) + .GetMethod( + "CreateCallInvoker", + BindingFlags.Instance | + BindingFlags.NonPublic | + BindingFlags.DoNotWrapExceptions)! + .Invoke(builder, null)!; + builder.CallInvoker = new TracingCallInvoker(callInvoker, _managedTracer); + // We have to set these back to defaults, because we've now instantiated the call invoker which used them. + builder.ChannelCredentials = null; + builder.Endpoint = null; + return builder.Build(); + } + + public TType BuildRest(IEnumerable scopes) where TBuilder : global::Google.Api.Gax.Rest.ClientBuilderBase, new() + { + var filePath = Environment.GetEnvironmentVariable("GOOGLE_APPLICATION_CREDENTIALS"); + if (string.IsNullOrWhiteSpace(filePath)) + { + throw new InvalidOperationException("BuildRest not supported without GOOGLE_APPLICATION_CREDENTIALS being specified."); + } + + using (var reader = new FileStream(filePath, FileMode.Open, FileAccess.Read)) + { + var googleCredentials = GoogleCredential.FromStream(reader); + if (googleCredentials.IsCreateScopedRequired) + { + googleCredentials = googleCredentials.CreateScoped(scopes); + } + + var builder = new TBuilder(); + builder.Credential = googleCredentials; + // @todo: Figure this out. + // builder.HttpClientFactory = new HttpClientFromMessageHandlerFactory(_httpClientFactory.CreateClient); + return builder.Build(); + } + } + + public ChannelCredentials? GetChannelCredentials(string endpoint, IEnumerable scopes) + { + if (_hostEnvironment.IsDevelopment() || _hostEnvironment.IsStaging()) + { + return ChannelCredentials.Insecure; + } + + return null; + } + + public string? GetServiceEndpoint(string endpoint, IEnumerable scopes) + { + if (_hostEnvironment.IsDevelopment() || _hostEnvironment.IsStaging()) + { + if (endpoint == PublisherServiceApiClient.DefaultEndpoint || + endpoint == SubscriberServiceApiClient.DefaultEndpoint) + { + var pubsubServerEnv = Environment.GetEnvironmentVariable("PUBSUB_SERVER"); + if (!string.IsNullOrWhiteSpace(pubsubServerEnv)) + { + return pubsubServerEnv; + } + + var helmConfig = _optionalHelmConfiguration?.GetHelmConfig(); + if (helmConfig != null) + { + return "localhost:" + helmConfig.PubSubPort; + } + + if (Environment.GetEnvironmentVariable("GITLAB_CI") == "true") + { + return "pubsub:9000"; + } + + return "localhost:9000"; + } + else if (endpoint == DatastoreClient.DefaultEndpoint) + { + var datastoreServerEnv = Environment.GetEnvironmentVariable("DATASTORE_SERVER"); + if (!string.IsNullOrWhiteSpace(datastoreServerEnv)) + { + return datastoreServerEnv; + } + + var helmConfig = _optionalHelmConfiguration?.GetHelmConfig(); + if (helmConfig != null) + { + return "localhost:" + helmConfig.DatastorePort; + } + + if (Environment.GetEnvironmentVariable("GITLAB_CI") == "true") + { + return "datastore:9001"; + } + + return "localhost:9001"; + } + + throw new InvalidOperationException($"The service at {endpoint} is not supported in the local development environment."); + } + + return null; + } + } +} diff --git a/UET/Redpoint.CloudFramework/GoogleInfrastructure/IGoogleApiRetry.cs b/UET/Redpoint.CloudFramework/GoogleInfrastructure/IGoogleApiRetry.cs new file mode 100644 index 00000000..c74edf31 --- /dev/null +++ b/UET/Redpoint.CloudFramework/GoogleInfrastructure/IGoogleApiRetry.cs @@ -0,0 +1,14 @@ +namespace Redpoint.CloudFramework.GoogleInfrastructure +{ + using Microsoft.Extensions.Logging; + using System; + using System.Threading.Tasks; + + public interface IGoogleApiRetry + { + void DoRetryableOperation(GoogleApiCallContext callContext, ILogger logger, Action operation); + T DoRetryableOperation(GoogleApiCallContext callContext, ILogger logger, Func operation); + Task DoRetryableOperationAsync(GoogleApiCallContext callContext, ILogger logger, Func operation); + Task DoRetryableOperationAsync(GoogleApiCallContext callContext, ILogger logger, Func> operation); + } +} diff --git a/UET/Redpoint.CloudFramework/GoogleInfrastructure/IGoogleProjectIdProvider.cs b/UET/Redpoint.CloudFramework/GoogleInfrastructure/IGoogleProjectIdProvider.cs new file mode 100644 index 00000000..261e6c98 --- /dev/null +++ b/UET/Redpoint.CloudFramework/GoogleInfrastructure/IGoogleProjectIdProvider.cs @@ -0,0 +1,7 @@ +namespace Redpoint.CloudFramework.GoogleInfrastructure +{ + internal interface IGoogleProjectIdProvider + { + string ProjectId { get; } + } +} diff --git a/UET/Redpoint.CloudFramework/GoogleInfrastructure/IGoogleServices.cs b/UET/Redpoint.CloudFramework/GoogleInfrastructure/IGoogleServices.cs new file mode 100644 index 00000000..e92a22b7 --- /dev/null +++ b/UET/Redpoint.CloudFramework/GoogleInfrastructure/IGoogleServices.cs @@ -0,0 +1,20 @@ +namespace Redpoint.CloudFramework.GoogleInfrastructure +{ + using Google.Api.Gax.Grpc; + using Grpc.Core; + using System.Collections.Generic; + using System.Diagnostics.CodeAnalysis; + + public interface IGoogleServices + { + string ProjectId { get; } + + TType Build(string endpoint, IEnumerable scopes) where TBuilder : ClientBuilderBase, new(); + + TType BuildRest(IEnumerable scopes) where TBuilder : global::Google.Api.Gax.Rest.ClientBuilderBase, new(); + + ChannelCredentials? GetChannelCredentials(string endpoint, IEnumerable scopes); + + string? GetServiceEndpoint(string endpoint, IEnumerable scopes); + } +} diff --git a/UET/Redpoint.CloudFramework/GoogleInfrastructure/TracingCallInvoker.cs b/UET/Redpoint.CloudFramework/GoogleInfrastructure/TracingCallInvoker.cs new file mode 100644 index 00000000..f4c7540d --- /dev/null +++ b/UET/Redpoint.CloudFramework/GoogleInfrastructure/TracingCallInvoker.cs @@ -0,0 +1,59 @@ +namespace Redpoint.CloudFramework.GoogleInfrastructure +{ + using Grpc.Core; + using Redpoint.CloudFramework.Tracing; + + internal class TracingCallInvoker : CallInvoker + { + private readonly CallInvoker _baseCallInvoker; + private readonly IManagedTracer _managedTracer; + + public TracingCallInvoker( + CallInvoker baseCallInvoker, + IManagedTracer managedTracer) + { + _baseCallInvoker = baseCallInvoker; + _managedTracer = managedTracer; + } + + public override AsyncClientStreamingCall AsyncClientStreamingCall(Method method, string? host, CallOptions options) + { + using (_managedTracer.StartSpan("grpc.client_streaming", $"{host}/{method.Name}")) + { + return _baseCallInvoker.AsyncClientStreamingCall(method, host, options); + } + } + + public override AsyncDuplexStreamingCall AsyncDuplexStreamingCall(Method method, string? host, CallOptions options) + { + using (_managedTracer.StartSpan("grpc.duplex_streaming", $"{host}/{method.Name}")) + { + return _baseCallInvoker.AsyncDuplexStreamingCall(method, host, options); + } + } + + public override AsyncServerStreamingCall AsyncServerStreamingCall(Method method, string? host, CallOptions options, TRequest request) + { + using (_managedTracer.StartSpan("grpc.server_streaming", $"{host}/{method.Name}")) + { + return _baseCallInvoker.AsyncServerStreamingCall(method, host, options, request); + } + } + + public override AsyncUnaryCall AsyncUnaryCall(Method method, string? host, CallOptions options, TRequest request) + { + using (_managedTracer.StartSpan("grpc.call_async", $"{host}/{method.Name}")) + { + return _baseCallInvoker.AsyncUnaryCall(method, host, options, request); + } + } + + public override TResponse BlockingUnaryCall(Method method, string? host, CallOptions options, TRequest request) + { + using (_managedTracer.StartSpan("grpc.call", $"{host}/{method.Name}")) + { + return _baseCallInvoker.BlockingUnaryCall(method, host, options, request); + } + } + } +} diff --git a/UET/Redpoint.CloudFramework/ICurrentTenantService.cs b/UET/Redpoint.CloudFramework/ICurrentTenantService.cs new file mode 100644 index 00000000..9a93428a --- /dev/null +++ b/UET/Redpoint.CloudFramework/ICurrentTenantService.cs @@ -0,0 +1,17 @@ +namespace Redpoint.CloudFramework +{ + using Google.Cloud.Datastore.V1; + using System.Threading.Tasks; + + public interface ICurrentTenantService + { + Task GetTenant(); + + Task GetTenantDatastoreKeyFromNamespace(string @namespace); + } + + public interface ICurrentTenant + { + string DatastoreNamespace { get; } + } +} diff --git a/UET/Redpoint.CloudFramework/Infrastructure/INamedEnum.cs b/UET/Redpoint.CloudFramework/Infrastructure/INamedEnum.cs new file mode 100644 index 00000000..1f4fa755 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Infrastructure/INamedEnum.cs @@ -0,0 +1,14 @@ +namespace Redpoint.CloudFramework.Infrastructure +{ + using System; + using System.Diagnostics.CodeAnalysis; + + /// + /// Internal interface for accessing enumeration type with fields. + /// + internal interface INamedEnum + { + [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicFields)] + Type EnumType { get; } + } +} diff --git a/UET/Redpoint.CloudFramework/Infrastructure/IRandomStringGenerator.cs b/UET/Redpoint.CloudFramework/Infrastructure/IRandomStringGenerator.cs new file mode 100644 index 00000000..fa6c6475 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Infrastructure/IRandomStringGenerator.cs @@ -0,0 +1,7 @@ +namespace Redpoint.CloudFramework.Infrastructure +{ + public interface IRandomStringGenerator + { + string GetRandomString(int halfLength); + } +} diff --git a/UET/Redpoint.CloudFramework/Infrastructure/NamedEnumAttribute.cs b/UET/Redpoint.CloudFramework/Infrastructure/NamedEnumAttribute.cs new file mode 100644 index 00000000..0e615adf --- /dev/null +++ b/UET/Redpoint.CloudFramework/Infrastructure/NamedEnumAttribute.cs @@ -0,0 +1,15 @@ +namespace Redpoint.CloudFramework.Infrastructure +{ + using System; + using System.Diagnostics.CodeAnalysis; + + /// + /// Indicates that an enumeration contains named values. + /// + [AttributeUsage(AttributeTargets.Enum)] + public sealed class NamedEnumAttribute<[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicFields)] T> : Attribute, INamedEnum + { + [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicFields)] + Type INamedEnum.EnumType => typeof(T); + } +} diff --git a/UET/Redpoint.CloudFramework/Infrastructure/NamedEnumValueAttribute.cs b/UET/Redpoint.CloudFramework/Infrastructure/NamedEnumValueAttribute.cs new file mode 100644 index 00000000..7ed43310 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Infrastructure/NamedEnumValueAttribute.cs @@ -0,0 +1,21 @@ +namespace Redpoint.CloudFramework.Infrastructure +{ + using System; + + /// + /// Associates a name with an enumeration value. + /// + [AttributeUsage(AttributeTargets.Field)] + public sealed class NamedEnumValueAttribute : Attribute + { + public NamedEnumValueAttribute(string name) + { + Name = name; + } + + /// + /// The name associated with the enumeration value. + /// + public string Name { get; private set; } + } +} diff --git a/UET/Redpoint.CloudFramework/Infrastructure/RandomStringGenerator.cs b/UET/Redpoint.CloudFramework/Infrastructure/RandomStringGenerator.cs new file mode 100644 index 00000000..7fd94f57 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Infrastructure/RandomStringGenerator.cs @@ -0,0 +1,22 @@ +namespace Redpoint.CloudFramework.Infrastructure +{ + using System; + using System.Security.Cryptography; + + public class RandomStringGenerator : IRandomStringGenerator + { + private readonly RandomNumberGenerator _cryptoRng; + + public RandomStringGenerator() + { + _cryptoRng = RandomNumberGenerator.Create(); + } + + public string GetRandomString(int halfLength) + { + var bytes = new byte[halfLength]; + _cryptoRng.GetBytes(bytes); + return Convert.ToHexString(bytes).ToLowerInvariant(); + } + } +} diff --git a/UET/Redpoint.CloudFramework/Locking/DatastoreBasedGlobalLockService.cs b/UET/Redpoint.CloudFramework/Locking/DatastoreBasedGlobalLockService.cs new file mode 100644 index 00000000..d3454991 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Locking/DatastoreBasedGlobalLockService.cs @@ -0,0 +1,461 @@ +namespace Redpoint.CloudFramework.Locking +{ + using Google.Cloud.Datastore.V1; + using Grpc.Core; + using Microsoft.Extensions.Logging; + using NodaTime; + using Redpoint.CloudFramework.Metric; + using Redpoint.CloudFramework.Models; + using Redpoint.CloudFramework.Prefix; + using Redpoint.CloudFramework.Repository; + using Redpoint.CloudFramework.Repository.Layers; + using System; + using System.Collections.Generic; + using System.Linq; + using System.Threading; + using System.Threading.Tasks; + + internal partial class DatastoreBasedGlobalLockService : IGlobalLockService + { + private readonly IGlobalPrefix _globalPrefix; + private readonly ILogger _logger; + private readonly IDatastoreRepositoryLayer _datastoreRepositoryLayer; + private readonly IMetricService _metricService; + + private static readonly Duration _defaultExpiryDuration = Duration.FromMinutes(5); + private static readonly Duration _defaultRenewalDuration = Duration.FromSeconds(60); + + private const string _lockAcquireMetric = "rcf/lock_acquire_count"; + private const string _lockContentionFailureMetric = "rcf/lock_contention_failure_count"; + private const string _lockRenewedMetric = "rcf/lock_renewed_count"; + private const string _lockReleaseMetric = "rcf/lock_release_count"; + + public DatastoreBasedGlobalLockService( + IGlobalPrefix globalPrefix, + ILogger logger, + IDatastoreRepositoryLayer datastoreRepositoryLayer, + IMetricService metricService) + { + _globalPrefix = globalPrefix; + _logger = logger; + _datastoreRepositoryLayer = datastoreRepositoryLayer; + _metricService = metricService; + } + + public async Task Acquire(string @namespace, Key objectToLock) + { + var objectToLockName = _globalPrefix.CreateInternal(objectToLock); + _logger?.LogBeginningAcquisitionOfLock(@namespace, objectToLockName); + var lockKeyFactory = await _datastoreRepositoryLayer.GetKeyFactoryAsync(@namespace, null, CancellationToken.None).ConfigureAwait(false); + var lockKey = lockKeyFactory.CreateKey(objectToLockName); + var transaction = await _datastoreRepositoryLayer.BeginTransactionAsync(@namespace, Repository.Transaction.TransactionMode.ReadWrite, null, CancellationToken.None).ConfigureAwait(false); + var acquisitionGuid = Guid.NewGuid().ToString(); + _logger?.LogBegunTransaction(acquisitionGuid, @namespace, objectToLockName); + var doRollback = false; + try + { + _logger?.LogLoadingExistingLockModel(acquisitionGuid, @namespace, objectToLockName); + var existingLock = await _datastoreRepositoryLayer.LoadAsync(@namespace, lockKey, transaction, null, CancellationToken.None).ConfigureAwait(false); + if (existingLock == null) + { + _logger?.LogNoExistingLockObject(acquisitionGuid, @namespace, objectToLockName); + + // No existing lock, use create semantics. + existingLock = new DefaultLockModel + { + Key = lockKey, + acquisitionGuid = acquisitionGuid, + dateExpiresUtc = SystemClock.Instance.GetCurrentInstant().Plus(_defaultExpiryDuration), + }; + + await _datastoreRepositoryLayer.CreateAsync(@namespace, new[] { existingLock }.ToAsyncEnumerable(), transaction, null, CancellationToken.None).FirstAsync().ConfigureAwait(false); + } + else + { + _logger?.LogFoundExistingLockObject(acquisitionGuid, @namespace, objectToLockName); + + // Existing lock, check if expired and use update semantics. + if (existingLock.dateExpiresUtc <= SystemClock.Instance.GetCurrentInstant()) + { + _logger?.LogExistingLockNaturallyExpired(acquisitionGuid, @namespace, objectToLockName); + + // Lock expired, we can take. Update the acquisition GUID (so the original owner + // knows they lost the lock if the attempt to renew it). + existingLock.acquisitionGuid = acquisitionGuid; + existingLock.dateExpiresUtc = SystemClock.Instance.GetCurrentInstant().Plus(_defaultExpiryDuration); + + await _datastoreRepositoryLayer.UpdateAsync(@namespace, new[] { existingLock }.ToAsyncEnumerable(), transaction, null, CancellationToken.None).FirstAsync().ConfigureAwait(false); + } + else + { + _logger?.LogExistingLockStillInUseThrowing(acquisitionGuid, @namespace, objectToLockName); + + await _metricService.AddPoint( + _lockContentionFailureMetric, + 1, + null, + new Dictionary + { + { "namespace", @namespace }, + { "object_kind", objectToLock.Path.Last().Kind }, + }).ConfigureAwait(false); + + throw new LockAcquisitionException(objectToLockName); + } + } + + _logger?.LogAttemptingTransactionCommit(acquisitionGuid, @namespace, objectToLockName); + await _datastoreRepositoryLayer.CommitAsync(@namespace, transaction, null, CancellationToken.None).ConfigureAwait(false); + doRollback = false; + _logger?.LogSuccessfulTransactionCommit(acquisitionGuid, @namespace, objectToLockName); + + await _metricService.AddPoint( + _lockAcquireMetric, + 1, + null, + new Dictionary + { + { "namespace", @namespace }, + { "object_kind", objectToLock.Path.Last().Kind }, + }).ConfigureAwait(false); + + return new LockHandle( + _datastoreRepositoryLayer, + _logger, + _metricService, + existingLock.Key, + @namespace, + acquisitionGuid, + objectToLock.Path.Last().Kind); + } + catch (LockAcquisitionException) + { + // Just rethrow, we already logged why this was happening. + throw; + } + catch (RpcException ex) when (ex.IsContentionException()) + { + _logger?.LogEncounteredLogContention(acquisitionGuid, @namespace, objectToLockName); + + await _metricService.AddPoint( + _lockContentionFailureMetric, + 1, + null, + new Dictionary + { + { "namespace", @namespace }, + { "object_kind", objectToLock.Path.Last().Kind }, + }).ConfigureAwait(false); + + throw new LockAcquisitionException(objectToLockName); + } + catch (RpcException ex) when (ex.StatusCode == StatusCode.NotFound) + { + _logger?.LogEncounteredDisappearingLock(acquisitionGuid, @namespace, objectToLockName); + + await _metricService.AddPoint( + _lockContentionFailureMetric, + 1, + null, + new Dictionary + { + { "namespace", @namespace }, + { "object_kind", objectToLock.Path.Last().Kind }, + }).ConfigureAwait(false); + + throw new LockAcquisitionException(objectToLockName); + } + finally + { + _logger?.LogReachedFinallyBlock(acquisitionGuid, @namespace, objectToLockName); + if (doRollback) + { + _logger?.LogAttemptingRollbackTransaction(acquisitionGuid, @namespace, objectToLockName); + await _datastoreRepositoryLayer.RollbackAsync(@namespace, transaction, null, CancellationToken.None).ConfigureAwait(false); + _logger?.LogSuccessfulRollbackTransaction(acquisitionGuid, @namespace, objectToLockName); + } + } + } + + public async Task AcquireAndUse(string @namespace, Key objectToLock, Func block) + { + await using ((await Acquire(@namespace, objectToLock).ConfigureAwait(false)).ConfigureAwait(false)) + { + await block().ConfigureAwait(false); + } + } + + public async Task AcquireAndUse(string @namespace, Key objectToLock, Func> block) + { + await using ((await Acquire(@namespace, objectToLock).ConfigureAwait(false)).ConfigureAwait(false)) + { + return await block().ConfigureAwait(false); + } + } + + private class LockHandle : ILockHandle + { + private readonly IDatastoreRepositoryLayer _datastoreRepositoryLayer; + private readonly ILogger? _logger; + private readonly IMetricService _metricService; + private readonly string _objectKind; + private readonly Key _lockKey; + private readonly string _namespace; + private readonly string _acquisitionGuid; + private readonly CancellationTokenSource _cancellationTokenSource; + private readonly Task _automaticRenewalTask; + private bool _isReleased; + + public LockHandle( + IDatastoreRepositoryLayer datastoreRepositoryLayer, + ILogger? logger, + IMetricService metricService, + Key realLockKey, + string @namespace, + string acquisitionGuid, + string objectKind) + { + _datastoreRepositoryLayer = datastoreRepositoryLayer; + _logger = logger; + _lockKey = realLockKey; + _namespace = @namespace; + _acquisitionGuid = acquisitionGuid; + _cancellationTokenSource = new CancellationTokenSource(); + _isReleased = false; + _metricService = metricService; + _objectKind = objectKind; + + _logger?.LogLockHandleCreated(_acquisitionGuid, _namespace, _lockKey.GetNameFromKey()); + + _automaticRenewalTask = Task.Run(AutomaticRenewal); + } + + private async Task AutomaticRenewal() + { + _logger?.LogAutomaticRenewalTaskRunning(_acquisitionGuid, _namespace, _lockKey.GetNameFromKey()); + + while (!_isReleased) + { + _logger?.LogLockHandleIsNotReleased(_acquisitionGuid, _namespace, _lockKey.GetNameFromKey(), (int)_defaultRenewalDuration.TotalMilliseconds); + + await Task.Delay((int)_defaultRenewalDuration.TotalMilliseconds, _cancellationTokenSource.Token).ConfigureAwait(false); + + if (_isReleased) + { + _logger?.LogLockHandleWasReleasedSinceRenewalDelayBegan(_acquisitionGuid, _namespace, _lockKey.GetNameFromKey()); + + await _metricService.AddPoint( + _lockReleaseMetric, + 1, + null, + new Dictionary + { + { "namespace", _namespace ?? string.Empty }, + { "object_kind", _objectKind }, + }).ConfigureAwait(false); + + return; + } + + _logger?.LogBeginningRenewalTransaction(_acquisitionGuid, _namespace, _lockKey.GetNameFromKey()); + var transaction = await _datastoreRepositoryLayer.BeginTransactionAsync(_namespace, Repository.Transaction.TransactionMode.ReadWrite, null, CancellationToken.None).ConfigureAwait(false); + _logger?.LogBegunRenewalTransaction(_acquisitionGuid, _namespace, _lockKey.GetNameFromKey()); + var doRollback = false; + try + { + _logger?.LogLoadingExistingLockModelRenewal(_acquisitionGuid, _namespace, _lockKey.GetNameFromKey()); + var existingLock = await _datastoreRepositoryLayer.LoadAsync(_namespace, _lockKey, transaction, null, CancellationToken.None).ConfigureAwait(false); + if (existingLock == null) + { + _logger?.LogUnreleasedLockDuringRenewalAcquiredAndReleasedElsewhere(_acquisitionGuid, _namespace, _lockKey.GetNameFromKey()); + + await _metricService.AddPoint( + _lockReleaseMetric, + 1, + null, + new Dictionary + { + { "namespace", _namespace ?? string.Empty }, + { "object_kind", _objectKind }, + }).ConfigureAwait(false); + + // No lock? what? Assume someone else took control of the lock because + // we let it lapse, and then they were finished with it so they deleted it. + // In this case, the lock has been released due to expiry, so bail. + _isReleased = true; + return; + } + else + { + // Existing lock, check if we still have the handle on it. + if (existingLock.acquisitionGuid == _acquisitionGuid) + { + _logger?.LogUpdatingExpiryTime(_acquisitionGuid, _namespace, _lockKey.GetNameFromKey()); + + // Update the lock's expiry time to our current time plus the default expiry. + existingLock.dateExpiresUtc = SystemClock.Instance.GetCurrentInstant().Plus(_defaultExpiryDuration); + await _datastoreRepositoryLayer.UpdateAsync(_namespace, new[] { existingLock }.ToAsyncEnumerable(), transaction, null, CancellationToken.None).FirstAsync().ConfigureAwait(false); + } + else + { + _logger?.LogUnreleasedLockDuringRenewalAcquiredElsewhere(_acquisitionGuid, _namespace, _lockKey.GetNameFromKey()); + + await _metricService.AddPoint( + _lockReleaseMetric, + 1, + null, + new Dictionary + { + { "namespace", _namespace ?? string.Empty }, + { "object_kind", _objectKind }, + }).ConfigureAwait(false); + + // Someone else now owns the lock! Treat it as released from us. + _isReleased = true; + return; + } + } + + _logger?.LogAttemptingCommitRenewalTransaction(_acquisitionGuid, _namespace, _lockKey.GetNameFromKey()); + await _datastoreRepositoryLayer.CommitAsync(_namespace, transaction, null, CancellationToken.None).ConfigureAwait(false); + doRollback = false; + _logger?.LogSuccessfulCommitRenewalTransaction(_acquisitionGuid, _namespace, _lockKey.GetNameFromKey()); + + await _metricService.AddPoint( + _lockRenewedMetric, + 1, + null, + new Dictionary + { + { "namespace", _namespace ?? string.Empty }, + { "object_kind", _objectKind }, + }).ConfigureAwait(false); + } + catch (Exception ex) + { + _logger?.LogExceptionDuringRenewal(ex, _acquisitionGuid, _namespace, _lockKey.GetNameFromKey()); + } + finally + { + _logger?.LogReachedFinallyBlockRenewal(_acquisitionGuid, _namespace, _lockKey.GetNameFromKey()); + if (doRollback) + { + _logger?.LogAttemptingRollbackTransactionRenewal(_acquisitionGuid, _namespace, _lockKey.GetNameFromKey()); + await _datastoreRepositoryLayer.RollbackAsync(_namespace, transaction, null, CancellationToken.None).ConfigureAwait(false); + _logger?.LogSuccessfulRollbackTransactionRenewal(_acquisitionGuid, _namespace, _lockKey.GetNameFromKey()); + } + } + } + + _logger?.LogAutomaticRenewalTaskFinished(_acquisitionGuid, _namespace, _lockKey.GetNameFromKey()); + } + + public async Task Release() + { + await DisposeAsync().ConfigureAwait(false); + } + + public async ValueTask DisposeAsync() + { + if (_isReleased) + { + return; + } + + try + { + _logger?.LogStartingReleaseOfLock(_acquisitionGuid, _namespace, _lockKey.GetNameFromKey()); + + _isReleased = true; + _cancellationTokenSource.Cancel(); + + // Wait 1.5 seconds to ensure that we don't cause Datastore contention with ourselves due to short usage + // of the lock or the renewal just happening (we might still get contention from other processes). + await Task.Delay(1500).ConfigureAwait(false); + + _logger?.LogBeginningReleaseTransaction(_acquisitionGuid, _namespace, _lockKey.GetNameFromKey()); + var transaction = await _datastoreRepositoryLayer.BeginTransactionAsync(_namespace, Repository.Transaction.TransactionMode.ReadWrite, null, CancellationToken.None).ConfigureAwait(false); + _logger?.LogBegunReleaseTransaction(_acquisitionGuid, _namespace, _lockKey.GetNameFromKey()); + var doRollback = false; + try + { + _logger?.LogLoadingExistingLockModelForRelease(_acquisitionGuid, _namespace, _lockKey.GetNameFromKey()); + + var existingLock = await _datastoreRepositoryLayer.LoadAsync(_namespace, _lockKey, transaction, null, CancellationToken.None).ConfigureAwait(false); + if (existingLock == null) + { + _logger?.LogUnreleasedLockDuringReleaseAcquiredAndReleasedElsewhere(_acquisitionGuid, _namespace, _lockKey.GetNameFromKey()); + + // No lock? Someone else might have already grabbed it and released it (see + // the comment in renewal logic). + return; + } + else + { + // Existing lock, check if we still have the handle on it. + if (existingLock.acquisitionGuid == _acquisitionGuid) + { + _logger?.LogDeletingLockModel(_acquisitionGuid, _namespace, _lockKey.GetNameFromKey()); + + // We can explicitly delete the lock because we still own it. + await _datastoreRepositoryLayer.DeleteAsync(_namespace, new[] { existingLock }.ToAsyncEnumerable(), transaction, null, CancellationToken.None).ConfigureAwait(false); + } + else + { + _logger?.LogUnreleasedLockDuringReleasedAcquiredElsewhere(_acquisitionGuid, _namespace, _lockKey.GetNameFromKey()); + + // Someone else now owns the lock! Treat it as released from us. + _isReleased = true; + return; + } + } + + _logger?.LogAttemptingCommitReleaseTransaction(_acquisitionGuid, _namespace, _lockKey.GetNameFromKey()); + await _datastoreRepositoryLayer.CommitAsync(_namespace, transaction, null, CancellationToken.None).ConfigureAwait(false); + doRollback = false; + _logger?.LogSuccessfulCommitReleaseTransaction(_acquisitionGuid, _namespace, _lockKey.GetNameFromKey()); + + await _metricService.AddPoint( + _lockReleaseMetric, + 1, + null, + new Dictionary + { + { "namespace", _namespace ?? string.Empty }, + { "object_kind", _objectKind }, + }).ConfigureAwait(false); + } + catch (Exception ex) + { + _logger?.LogExceptionDuringRelease(ex, _acquisitionGuid, _namespace, _lockKey.GetNameFromKey()); + throw; + } + finally + { + _logger?.LogReachedFinallyBlockRelease(_acquisitionGuid, _namespace, _lockKey.GetNameFromKey()); + if (doRollback) + { + _logger?.LogAttemptingRollbackTransactionRelease(_acquisitionGuid, _namespace, _lockKey.GetNameFromKey()); + await _datastoreRepositoryLayer.RollbackAsync(_namespace, transaction, null, CancellationToken.None).ConfigureAwait(false); + _logger?.LogSuccessfulRollbackTransactionRelease(_acquisitionGuid, _namespace, _lockKey.GetNameFromKey()); + } + } + } + finally + { + if (_isReleased) + { + try + { + await _automaticRenewalTask.ConfigureAwait(false); + } + catch + { + } + _cancellationTokenSource.Dispose(); + } + } + } + } + } +} diff --git a/UET/Redpoint.CloudFramework/Locking/DatastoreBasedGlobalLockServiceLog.cs b/UET/Redpoint.CloudFramework/Locking/DatastoreBasedGlobalLockServiceLog.cs new file mode 100644 index 00000000..5bcb591c --- /dev/null +++ b/UET/Redpoint.CloudFramework/Locking/DatastoreBasedGlobalLockServiceLog.cs @@ -0,0 +1,315 @@ +namespace Redpoint.CloudFramework.Locking +{ + using Microsoft.Extensions.Logging; + + internal static partial class DatastoreBasedGlobalLockServiceLog + { + [LoggerMessage( + EventId = 0, + Level = LogLevel.Information, + Message = "Beginning acquisition of lock {ns}/{objectToLockName}...")] + public static partial void LogBeginningAcquisitionOfLock( + this ILogger logger, string ns, string? objectToLockName); + + [LoggerMessage( + EventId = 1, + Level = LogLevel.Information, + Message = "{acquisitionGuid}: Begun transaction for {ns}/{objectToLockName}...")] + public static partial void LogBegunTransaction( + this ILogger logger, string? acquisitionGuid, string ns, string? objectToLockName); + + [LoggerMessage( + EventId = 2, + Level = LogLevel.Information, + Message = "{acquisitionGuid}: Loading existing lock model for {ns}/{objectToLockName}...")] + public static partial void LogLoadingExistingLockModel( + this ILogger logger, string? acquisitionGuid, string ns, string? objectToLockName); + + [LoggerMessage( + EventId = 3, + Level = LogLevel.Information, + Message = "{acquisitionGuid}: No existing lock object for {ns}/{objectToLockName}, creating new lock model...")] + public static partial void LogNoExistingLockObject( + this ILogger logger, string? acquisitionGuid, string ns, string? objectToLockName); + + [LoggerMessage( + EventId = 4, + Level = LogLevel.Information, + Message = "{acquisitionGuid}: Found existing lock object for {ns}/{objectToLockName}, checking expiry...")] + public static partial void LogFoundExistingLockObject( + this ILogger logger, string? acquisitionGuid, string ns, string? objectToLockName); + + [LoggerMessage( + EventId = 5, + Level = LogLevel.Information, + Message = "{acquisitionGuid}: Existing lock {ns}/{objectToLockName} has naturally expired, taking...")] + public static partial void LogExistingLockNaturallyExpired( + this ILogger logger, string? acquisitionGuid, string ns, string? objectToLockName); + + [LoggerMessage( + EventId = 6, + Level = LogLevel.Information, + Message = "{acquisitionGuid}: Existing lock {ns}/{objectToLockName} still in use, throwing...")] + public static partial void LogExistingLockStillInUseThrowing( + this ILogger logger, string? acquisitionGuid, string ns, string? objectToLockName); + + [LoggerMessage( + EventId = 7, + Level = LogLevel.Information, + Message = "{acquisitionGuid}: Attempting commit of transaction for {ns}/{objectToLockName}...")] + public static partial void LogAttemptingTransactionCommit( + this ILogger logger, string? acquisitionGuid, string ns, string? objectToLockName); + + [LoggerMessage( + EventId = 8, + Level = LogLevel.Information, + Message = "{acquisitionGuid}: Successful commit of transaction for {ns}/{objectToLockName}, returning lock handle...")] + public static partial void LogSuccessfulTransactionCommit( + this ILogger logger, string? acquisitionGuid, string ns, string? objectToLockName); + + [LoggerMessage( + EventId = 9, + Level = LogLevel.Information, + Message = "{acquisitionGuid}: Encountered lock contention while acquiring {ns}/{objectToLockName}...")] + public static partial void LogEncounteredLogContention( + this ILogger logger, string? acquisitionGuid, string ns, string? objectToLockName); + + [LoggerMessage( + EventId = 10, + Level = LogLevel.Information, + Message = "{acquisitionGuid}: Encountered disappearing lock while acquiring {ns}/{objectToLockName}...")] + public static partial void LogEncounteredDisappearingLock( + this ILogger logger, string? acquisitionGuid, string ns, string? objectToLockName); + + [LoggerMessage( + EventId = 11, + Level = LogLevel.Information, + Message = "{acquisitionGuid}: Reached finally block for {ns}/{objectToLockName}...")] + public static partial void LogReachedFinallyBlock( + this ILogger logger, string? acquisitionGuid, string ns, string? objectToLockName); + + [LoggerMessage( + EventId = 12, + Level = LogLevel.Information, + Message = "{acquisitionGuid}: Attempting rollback of transaction for {ns}/{objectToLockName}...")] + public static partial void LogAttemptingRollbackTransaction( + this ILogger logger, string? acquisitionGuid, string ns, string? objectToLockName); + + [LoggerMessage( + EventId = 13, + Level = LogLevel.Information, + Message = "{acquisitionGuid}: Transaction rollback completed for {ns}/{objectToLockName}...")] + public static partial void LogSuccessfulRollbackTransaction( + this ILogger logger, string? acquisitionGuid, string ns, string? objectToLockName); + + [LoggerMessage( + EventId = 14, + Level = LogLevel.Information, + Message = "{acquisitionGuid}: Lock handle created for {ns}/{objectToLockName}...")] + public static partial void LogLockHandleCreated( + this ILogger logger, string? acquisitionGuid, string ns, string? objectToLockName); + + [LoggerMessage( + EventId = 15, + Level = LogLevel.Information, + Message = "{acquisitionGuid}: Automatic renewal task running for {ns}/{objectToLockName}...")] + public static partial void LogAutomaticRenewalTaskRunning( + this ILogger logger, string? acquisitionGuid, string ns, string? objectToLockName); + + [LoggerMessage( + EventId = 16, + Level = LogLevel.Information, + Message = "{acquisitionGuid}: Lock handle is not released {ns}/{objectToLockName}, delaying for {milliseconds}ms...")] + public static partial void LogLockHandleIsNotReleased( + this ILogger logger, string? acquisitionGuid, string ns, string? objectToLockName, int milliseconds); + + [LoggerMessage( + EventId = 17, + Level = LogLevel.Information, + Message = "{acquisitionGuid}: Lock handle was released since renewal delay began for {ns}/{objectToLockName}...")] + public static partial void LogLockHandleWasReleasedSinceRenewalDelayBegan( + this ILogger logger, string? acquisitionGuid, string ns, string? objectToLockName); + + [LoggerMessage( + EventId = 18, + Level = LogLevel.Information, + Message = "{acquisitionGuid}: Beginning renewal transaction for {ns}/{objectToLockName}...")] + public static partial void LogBeginningRenewalTransaction( + this ILogger logger, string? acquisitionGuid, string ns, string? objectToLockName); + + [LoggerMessage( + EventId = 19, + Level = LogLevel.Information, + Message = "{acquisitionGuid}: Begun renewal transaction for {ns}/{objectToLockName}...")] + public static partial void LogBegunRenewalTransaction( + this ILogger logger, string? acquisitionGuid, string ns, string? objectToLockName); + + [LoggerMessage( + EventId = 20, + Level = LogLevel.Information, + Message = "{acquisitionGuid}: Loading existing lock model for {ns}/{objectToLockName} (renewal)...")] + public static partial void LogLoadingExistingLockModelRenewal( + this ILogger logger, string? acquisitionGuid, string ns, string? objectToLockName); + + [LoggerMessage( + EventId = 21, + Level = LogLevel.Warning, + Message = "{acquisitionGuid}: Unreleased lock {ns}/{objectToLockName} during renewal appears to have been acquired and released by someone else!")] + public static partial void LogUnreleasedLockDuringRenewalAcquiredAndReleasedElsewhere( + this ILogger logger, string? acquisitionGuid, string ns, string? objectToLockName); + + [LoggerMessage( + EventId = 22, + Level = LogLevel.Information, + Message = "{acquisitionGuid}: Updating the expiry time on {ns}/{objectToLockName}...")] + public static partial void LogUpdatingExpiryTime( + this ILogger logger, string? acquisitionGuid, string ns, string? objectToLockName); + + [LoggerMessage( + EventId = 23, + Level = LogLevel.Warning, + Message = "{acquisitionGuid}: Unreleased lock {ns}/{objectToLockName} during renewal appears to have been acquired by someone else!")] + public static partial void LogUnreleasedLockDuringRenewalAcquiredElsewhere( + this ILogger logger, string? acquisitionGuid, string ns, string? objectToLockName); + + [LoggerMessage( + EventId = 24, + Level = LogLevel.Information, + Message = "{acquisitionGuid}: Attempting commit of renewal transaction for {ns}/{objectToLockName}...")] + public static partial void LogAttemptingCommitRenewalTransaction( + this ILogger logger, string? acquisitionGuid, string ns, string? objectToLockName); + + [LoggerMessage( + EventId = 25, + Level = LogLevel.Information, + Message = "{acquisitionGuid}: Successful commit of renewal transaction for {ns}/{objectToLockName}...")] + public static partial void LogSuccessfulCommitRenewalTransaction( + this ILogger logger, string? acquisitionGuid, string ns, string? objectToLockName); + + [LoggerMessage( + EventId = 26, + Level = LogLevel.Critical, + Message = "{acquisitionGuid}: Exception during renewal of {ns}/{objectToLockName}...")] + public static partial void LogExceptionDuringRenewal( + this ILogger logger, Exception ex, string? acquisitionGuid, string ns, string? objectToLockName); + + [LoggerMessage( + EventId = 27, + Level = LogLevel.Information, + Message = "{acquisitionGuid}: Reached finally block for renewal of {ns}/{objectToLockName}...")] + public static partial void LogReachedFinallyBlockRenewal( + this ILogger logger, string? acquisitionGuid, string ns, string? objectToLockName); + + [LoggerMessage( + EventId = 28, + Level = LogLevel.Information, + Message = "{acquisitionGuid}: Attempting rollback of renewal transaction for {ns}/{objectToLockName}...")] + public static partial void LogAttemptingRollbackTransactionRenewal( + this ILogger logger, string? acquisitionGuid, string ns, string? objectToLockName); + + [LoggerMessage( + EventId = 29, + Level = LogLevel.Information, + Message = "{acquisitionGuid}: Renewal transaction rollback completed for {ns}/{objectToLockName}...")] + public static partial void LogSuccessfulRollbackTransactionRenewal( + this ILogger logger, string? acquisitionGuid, string ns, string? objectToLockName); + + [LoggerMessage( + EventId = 30, + Level = LogLevel.Information, + Message = "{acquisitionGuid}: Automatic renewal task finished for {ns}/{objectToLockName}...")] + public static partial void LogAutomaticRenewalTaskFinished( + this ILogger logger, string? acquisitionGuid, string ns, string? objectToLockName); + + [LoggerMessage( + EventId = 31, + Level = LogLevel.Information, + Message = "{acquisitionGuid}: Starting release of lock {ns}/{objectToLockName}...")] + public static partial void LogStartingReleaseOfLock( + this ILogger logger, string? acquisitionGuid, string ns, string? objectToLockName); + + [LoggerMessage( + EventId = 32, + Level = LogLevel.Information, + Message = "{acquisitionGuid}: Beginning release transaction for {ns}/{objectToLockName}...")] + public static partial void LogBeginningReleaseTransaction( + this ILogger logger, string? acquisitionGuid, string ns, string? objectToLockName); + + [LoggerMessage( + EventId = 33, + Level = LogLevel.Information, + Message = "{acquisitionGuid}: Begun release transaction for {ns}/{objectToLockName}...")] + public static partial void LogBegunReleaseTransaction( + this ILogger logger, string? acquisitionGuid, string ns, string? objectToLockName); + + [LoggerMessage( + EventId = 34, + Level = LogLevel.Information, + Message = "{acquisitionGuid}: Loading existing lock model for {ns}/{objectToLockName} (release)...")] + public static partial void LogLoadingExistingLockModelForRelease( + this ILogger logger, string? acquisitionGuid, string ns, string? objectToLockName); + + [LoggerMessage( + EventId = 35, + Level = LogLevel.Warning, + Message = "{acquisitionGuid}: Unreleased lock {ns}/{objectToLockName} during release appears to have been acquired and released by someone else!")] + public static partial void LogUnreleasedLockDuringReleaseAcquiredAndReleasedElsewhere( + this ILogger logger, string? acquisitionGuid, string ns, string? objectToLockName); + + [LoggerMessage( + EventId = 36, + Level = LogLevel.Information, + Message = "{acquisitionGuid}: Deleting the lock model for {ns}/{objectToLockName}...")] + public static partial void LogDeletingLockModel( + this ILogger logger, string? acquisitionGuid, string ns, string? objectToLockName); + + [LoggerMessage( + EventId = 37, + Level = LogLevel.Warning, + Message = "{acquisitionGuid}: Unreleased lock {ns}/{objectToLockName} during release appears to have been acquired by someone else!")] + public static partial void LogUnreleasedLockDuringReleasedAcquiredElsewhere( + this ILogger logger, string? acquisitionGuid, string ns, string? objectToLockName); + + [LoggerMessage( + EventId = 38, + Level = LogLevel.Information, + Message = "{acquisitionGuid}: Attempting commit of release transaction for {ns}/{objectToLockName}...")] + public static partial void LogAttemptingCommitReleaseTransaction( + this ILogger logger, string? acquisitionGuid, string ns, string? objectToLockName); + + [LoggerMessage( + EventId = 39, + Level = LogLevel.Information, + Message = "{acquisitionGuid}: Successful commit of release transaction for {ns}/{objectToLockName}...")] + public static partial void LogSuccessfulCommitReleaseTransaction( + this ILogger logger, string? acquisitionGuid, string ns, string? objectToLockName); + + [LoggerMessage( + EventId = 40, + Level = LogLevel.Critical, + Message = "{acquisitionGuid}: Exception during release of {ns}/{objectToLockName}...")] + public static partial void LogExceptionDuringRelease( + this ILogger logger, Exception ex, string? acquisitionGuid, string ns, string? objectToLockName); + + [LoggerMessage( + EventId = 41, + Level = LogLevel.Information, + Message = "{acquisitionGuid}: Reached finally block for release of {ns}/{objectToLockName}...")] + public static partial void LogReachedFinallyBlockRelease( + this ILogger logger, string? acquisitionGuid, string ns, string? objectToLockName); + + [LoggerMessage( + EventId = 42, + Level = LogLevel.Information, + Message = "{acquisitionGuid}: Attempting rollback of release transaction for {ns}/{objectToLockName}...")] + public static partial void LogAttemptingRollbackTransactionRelease( + this ILogger logger, string? acquisitionGuid, string ns, string? objectToLockName); + + [LoggerMessage( + EventId = 43, + Level = LogLevel.Information, + Message = "{acquisitionGuid}: Release transaction rollback completed for {ns}/{objectToLockName}...")] + public static partial void LogSuccessfulRollbackTransactionRelease( + this ILogger logger, string? acquisitionGuid, string ns, string? objectToLockName); + } +} diff --git a/UET/Redpoint.CloudFramework/Locking/DefaultLockService.cs b/UET/Redpoint.CloudFramework/Locking/DefaultLockService.cs new file mode 100644 index 00000000..22bd3972 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Locking/DefaultLockService.cs @@ -0,0 +1,53 @@ +namespace Redpoint.CloudFramework.Locking +{ + using System; + using System.Threading.Tasks; + using Google.Cloud.Datastore.V1; + + public class DefaultLockService : ILockService + { + private readonly ICurrentTenantService _currentTenantService; + private readonly IGlobalLockService _globalLockService; + + public DefaultLockService( + ICurrentTenantService currentTenantService, + IGlobalLockService globalLockService) + { + _currentTenantService = currentTenantService; + _globalLockService = globalLockService; + } + + public async Task Acquire(Key objectToLock) + { + var currentTenant = await _currentTenantService.GetTenant().ConfigureAwait(false); + if (currentTenant == null) + { + throw new InvalidOperationException("ILockService can not be used without a tenant."); + } + var ns = currentTenant.DatastoreNamespace; + return await _globalLockService.Acquire(ns, objectToLock).ConfigureAwait(false); + } + + public async Task AcquireAndUse(Key objectToLock, Func block) + { + var currentTenant = await _currentTenantService.GetTenant().ConfigureAwait(false); + if (currentTenant == null) + { + throw new InvalidOperationException("ILockService can not be used without a tenant."); + } + var ns = currentTenant.DatastoreNamespace; + await _globalLockService.AcquireAndUse(ns, objectToLock, block).ConfigureAwait(false); + } + + public async Task AcquireAndUse(Key objectToLock, Func> block) + { + var currentTenant = await _currentTenantService.GetTenant().ConfigureAwait(false); + if (currentTenant == null) + { + throw new InvalidOperationException("ILockService can not be used without a tenant."); + } + var ns = currentTenant.DatastoreNamespace; + return await _globalLockService.AcquireAndUse(ns, objectToLock, block).ConfigureAwait(false); + } + } +} diff --git a/UET/Redpoint.CloudFramework/Locking/IGlobalLockService.cs b/UET/Redpoint.CloudFramework/Locking/IGlobalLockService.cs new file mode 100644 index 00000000..1afd12d9 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Locking/IGlobalLockService.cs @@ -0,0 +1,42 @@ +namespace Redpoint.CloudFramework.Locking +{ + using Google.Cloud.Datastore.V1; + using System; + using System.Threading.Tasks; + + public interface IGlobalLockService + { + /// + /// Acquires a lock in the given namespace on the given object's key. Returns the lock handle + /// which you should then call and await , which will release + /// the lock globally. By default locks have an expiry of 5 minutes, and a background task is + /// automatically spawned in the current process to extend the expiration every minute. This ensures + /// that if this process crashes, another process will be able to obtain the lock within 5 + /// minutes of the current process going away. + /// + /// The datastore namespace to store the lock object in. + /// The object key to lock on. + /// The lock handle which you should then call and await . + Task Acquire(string @namespace, Key objectToLock); + + /// + /// Acquires a lock in the given namespace using , and then calls + /// the given lambda asynchronously. When the lambda completes for any reason, the lock is released. + /// + /// The datastore namespace to store the lock object in. + /// The object key to lock on. + /// The lambda to execute while the lock is held. + /// The task that you should await on. + Task AcquireAndUse(string @namespace, Key objectToLock, Func block); + + /// + /// Acquires a lock in the given namespace using , and then calls + /// the given lambda asynchronously. When the lambda completes for any reason, the lock is released. + /// + /// The datastore namespace to store the lock object in. + /// The object key to lock on. + /// The lambda to execute while the lock is held. + /// The task with return value that you should await on. + Task AcquireAndUse(string @namespace, Key objectToLock, Func> block); + } +} diff --git a/UET/Redpoint.CloudFramework/Locking/ILockHandle.cs b/UET/Redpoint.CloudFramework/Locking/ILockHandle.cs new file mode 100644 index 00000000..49e1c62e --- /dev/null +++ b/UET/Redpoint.CloudFramework/Locking/ILockHandle.cs @@ -0,0 +1,10 @@ +namespace Redpoint.CloudFramework.Locking +{ + using System.Threading.Tasks; + + public interface ILockHandle : IAsyncDisposable + { + [Obsolete("Use DisposeAsync instead.")] + Task Release(); + } +} diff --git a/UET/Redpoint.CloudFramework/Locking/ILockService.cs b/UET/Redpoint.CloudFramework/Locking/ILockService.cs new file mode 100644 index 00000000..0ba38d22 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Locking/ILockService.cs @@ -0,0 +1,39 @@ +namespace Redpoint.CloudFramework.Locking +{ + using Google.Cloud.Datastore.V1; + using System; + using System.Threading.Tasks; + + public interface ILockService + { + /// + /// Acquires a lock in the current tenant's namespace on the given object's key. Returns the lock handle + /// which you should then call and await , which will release + /// the lock globally. By default locks have an expiry of 5 minutes, and a background task is + /// automatically spawned in the current process to extend the expiration every minute. This ensures + /// that if this process crashes, another process will be able to obtain the lock within 5 + /// minutes of the current process going away. + /// + /// The object key to lock on. + /// The lock handle which you should then call and await . + Task Acquire(Key objectToLock); + + /// + /// Acquires a lock in the current tenant's namespace using , and then calls + /// the given lambda asynchronously. When the lambda completes for any reason, the lock is released. + /// + /// The object key to lock on. + /// The lambda to execute while the lock is held. + /// The task that you should await on. + Task AcquireAndUse(Key objectToLock, Func block); + + /// + /// Acquires a lock in the current tenant's namespace using , and then calls + /// the given lambda asynchronously. When the lambda completes for any reason, the lock is released. + /// + /// The object key to lock on. + /// The lambda to execute while the lock is held. + /// The task with return value that you should await on. + Task AcquireAndUse(Key objectToLock, Func> block); + } +} diff --git a/UET/Redpoint.CloudFramework/Locking/LockAcquisitionException.cs b/UET/Redpoint.CloudFramework/Locking/LockAcquisitionException.cs new file mode 100644 index 00000000..75ffd2f2 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Locking/LockAcquisitionException.cs @@ -0,0 +1,11 @@ +namespace Redpoint.CloudFramework.Locking +{ + using System; + + public class LockAcquisitionException : Exception + { + public LockAcquisitionException(string lockId) : base("Unable to acquire lock: " + lockId + ", already in use.") + { + } + } +} diff --git a/UET/Redpoint.CloudFramework/Metric/GoogleMetricService.cs b/UET/Redpoint.CloudFramework/Metric/GoogleMetricService.cs new file mode 100644 index 00000000..8e0d1652 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Metric/GoogleMetricService.cs @@ -0,0 +1,273 @@ +namespace Redpoint.CloudFramework.Metric +{ + using Microsoft.Extensions.Logging; + using System; + using System.Threading.Tasks; + using Google.Cloud.Monitoring.V3; + using System.Collections.Generic; + using Redpoint.CloudFramework.Prefix; + using Google.Cloud.Datastore.V1; + using System.Threading; + using NodaTime; + using System.Security.Cryptography; + using System.Linq; + using System.Text; + using Google.Api.Gax.ResourceNames; + using Redpoint.CloudFramework.GoogleInfrastructure; + using Microsoft.Extensions.Hosting; + + public sealed class GoogleMetricService : IMetricService, IAsyncDisposable + { + private readonly IGoogleServices _googleServices; + private readonly ILogger _logger; + private readonly IGlobalPrefix _globalPrefix; + private readonly MetricServiceClient? _client; + private readonly Task? _flushTask; + private readonly Dictionary? _buffer; + private readonly SemaphoreSlim? _bufferSemaphore; + + public static CancellationTokenSource ProgramExitCancellationTokenSource { get; } = new CancellationTokenSource(); + + public GoogleMetricService( + IHostEnvironment hostEnvironment, + IGoogleServices googleServices, + ILogger logger, + IGlobalPrefix globalPrefix) + { + ArgumentNullException.ThrowIfNull(googleServices); + + _googleServices = googleServices; + _logger = logger; + _globalPrefix = globalPrefix; + + if (hostEnvironment.IsDevelopment() || hostEnvironment.IsStaging()) + { + return; + } + + try + { + _client = googleServices.Build( + MetricServiceClient.DefaultEndpoint, + MetricServiceClient.DefaultScopes); + } + catch (NotSupportedException) + { + // This environment might not support reporting metrics (for example, unit tests). + _client = null; + } + + if (_client != null) + { + _buffer = new Dictionary(); + _bufferSemaphore = new SemaphoreSlim(1); + _flushTask = Task.Run(BackgroundFlush); + } + } + + private class TimeSeriesBuffer + { + public TimeSeriesBuffer( + Google.Api.Metric metric, + Google.Api.MonitoredResource monitoredResource, + long pointCount) + { + Metric = metric; + MonitoredResource = monitoredResource; + PointCount = pointCount; + } + + public Google.Api.Metric Metric { get; } + + public Google.Api.MonitoredResource MonitoredResource { get; } + + public long PointCount { get; set; } + } + + private async Task BackgroundFlush() + { + var token = ProgramExitCancellationTokenSource.Token; + + var projectName = new ProjectName(_googleServices.ProjectId); + + while (!token.IsCancellationRequested) + { + var endTime = Google.Protobuf.WellKnownTypes.Timestamp.FromDateTimeOffset(DateTimeOffset.UtcNow); + + await Task.Delay((int)Duration.FromMinutes(1).TotalMilliseconds, token).ConfigureAwait(false); + + await _bufferSemaphore!.WaitAsync().ConfigureAwait(false); + try + { + foreach (var kv in _buffer!.ToArray()) + { + var timeSeriesData = new TimeSeries + { + Metric = kv.Value.Metric, + Resource = kv.Value.MonitoredResource, + MetricKind = Google.Api.MetricDescriptor.Types.MetricKind.Gauge, + ValueType = Google.Api.MetricDescriptor.Types.ValueType.Int64, + Points = + { + new Point + { + Interval = new TimeInterval + { + EndTime = endTime, + }, + Value = new TypedValue + { + Int64Value = kv.Value.PointCount, + } + } + } + }; + + await _client!.CreateTimeSeriesAsync(new CreateTimeSeriesRequest + { + ProjectName = projectName, + TimeSeries = + { + timeSeriesData, + } + }).ConfigureAwait(false); + + if (kv.Value.PointCount > 0) + { + // Reset the point count to zero so that if there's no data reported next time, we'll + // at least send a metric with a value of 0 so that graphs render correctly in + // Stackdriver. + kv.Value.PointCount = 0; + } + else + { + // The last sent metric had a value of zero, remove the entry from the buffer so we + // don't send metrics if we don't need (once it's reset to 0, we only need to notify + // Stackdriver again if it starts being non-zero). + _buffer!.Remove(kv.Key); + } + } + } + catch (Exception ex) + { + _logger.LogCritical(ex, ex.Message); + } + finally + { + _bufferSemaphore.Release(); + } + } + } + + private string ComputeHashKey(string metricType, Key? projectKey, Dictionary? labels) + { + var stringToHash = metricType + ":"; + if (projectKey == null) + { + stringToHash += "(global):"; + } + else + { + stringToHash += _globalPrefix.Create(projectKey) + ":"; + } + if (labels != null) + { + foreach (var key in labels.Keys.OrderBy(x => x)) + { + stringToHash += key + "=" + labels[key] + ":"; + } + } + + return Convert.ToHexString(SHA256.HashData(Encoding.ASCII.GetBytes(stringToHash))).ToLowerInvariant(); + } + + public async Task AddPoint(string metricType, long amount, Key? projectKey, Dictionary? labels) + { + if (_client == null) + { + // Environment does not support reporting metrics. + return; + } + + try + { + await _bufferSemaphore!.WaitAsync().ConfigureAwait(false); + try + { + var hashKey = ComputeHashKey(metricType, projectKey, labels); + + if (!_buffer!.TryGetValue(hashKey, out TimeSeriesBuffer? timeSeriesBuffer)) + { + var metric = new Google.Api.Metric + { + Type = "custom.googleapis.com/" + metricType, + Labels = + { + { "tenant_id", projectKey == null ? string.Empty : _globalPrefix.Create(projectKey) } + } + }; + if (labels != null) + { + foreach (var kv in labels) + { + metric.Labels.Add(kv.Key, kv.Value); + } + }; + + var monitoredResource = new Google.Api.MonitoredResource + { + Type = "global", + Labels = + { + { "project_id", _googleServices.ProjectId } + } + }; + timeSeriesBuffer = new TimeSeriesBuffer( + metric, + monitoredResource, + 0); + _buffer.Add(hashKey, timeSeriesBuffer); + } + + timeSeriesBuffer.PointCount += amount; + } + finally + { + _bufferSemaphore.Release(); + } + } + catch (Exception ex) + { + // Log the error when trying to post the metric, but don't throw. + _logger.LogError(new EventId(1), ex, ex.Message); + } + } + + public void AddPointSync(string metricType, long amount, Key? projectKey, Dictionary? labels = null) + { + // Run on background thread, no need to synchronously wait for this to complete. + Task.Run(async () => + { + await AddPoint(metricType, amount, projectKey, labels).ConfigureAwait(false); + }); + } + + public async ValueTask DisposeAsync() + { + ProgramExitCancellationTokenSource.Cancel(); + if (_flushTask != null) + { + try + { + await _flushTask.ConfigureAwait(false); + } + catch + { + } + } + _flushTask?.Dispose(); + _bufferSemaphore?.Dispose(); + ProgramExitCancellationTokenSource.Dispose(); + } + } +} diff --git a/UET/Redpoint.CloudFramework/Metric/IMetricService.cs b/UET/Redpoint.CloudFramework/Metric/IMetricService.cs new file mode 100644 index 00000000..f609f1c0 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Metric/IMetricService.cs @@ -0,0 +1,13 @@ +namespace Redpoint.CloudFramework.Metric +{ + using Google.Cloud.Datastore.V1; + using System.Collections.Generic; + using System.Threading.Tasks; + + public interface IMetricService + { + Task AddPoint(string metricType, long amount, Key? projectKey, Dictionary? labels = null); + + void AddPointSync(string metricType, long amount, Key? projectKey, Dictionary? labels = null); + } +} diff --git a/UET/Redpoint.CloudFramework/Metric/NullMetricService.cs b/UET/Redpoint.CloudFramework/Metric/NullMetricService.cs new file mode 100644 index 00000000..2306dbc9 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Metric/NullMetricService.cs @@ -0,0 +1,18 @@ +namespace Redpoint.CloudFramework.Metric +{ + using Google.Cloud.Datastore.V1; + using System.Collections.Generic; + using System.Threading.Tasks; + + internal class NullMetricService : IMetricService + { + public Task AddPoint(string metricType, long amount, Key? projectKey, Dictionary? labels = null) + { + return Task.CompletedTask; + } + + public void AddPointSync(string metricType, long amount, Key? projectKey, Dictionary? labels = null) + { + } + } +} diff --git a/UET/Redpoint.CloudFramework/Models/AttributedModel.cs b/UET/Redpoint.CloudFramework/Models/AttributedModel.cs new file mode 100644 index 00000000..784700b2 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Models/AttributedModel.cs @@ -0,0 +1,199 @@ +namespace Redpoint.CloudFramework.Models +{ + using Redpoint.CloudFramework.Repository.Converters.Value; + using Redpoint.CloudFramework.Repository.Converters.Value.Context; + using Redpoint.CloudFramework.Repository.Geographic; + using System; + using System.Collections.Generic; + using System.Diagnostics.CodeAnalysis; + using System.Linq; + using System.Reflection; + + /// + /// A version of Model that you can inherit from, where the Datastore schema is defined + /// by attributes on the class and properties instead of implementing the abstract Model + /// methods. + /// + /// Implements caching so that when the application has to determine the schema from the + /// model class, it's slightly faster than the naive implementation of returning newly + /// constructed objects from the Model methods. + /// + public class AttributedModel : Model, IGeoModel + { + private struct ModelInfo + { + public long _schemaVersion; + public string _kind; + public HashSet _indexes; + public Dictionary _types; + public Dictionary _defaultValues; + public Dictionary _geoHashKeyLengths; + } + + private static readonly IValueConverter[] _stringEnumValueConverters = new IValueConverter[] + { + new StringEnumValueConverter(), + new StringEnumArrayValueConverter(), + new StringEnumSetValueConverter(), + }; + + private static Dictionary _cachedInfo = new Dictionary(); + private readonly Type _type; + private readonly ModelInfo _modelInfo; + + [UnconditionalSuppressMessage("Trimming", "IL2072:Target parameter argument does not satisfy 'DynamicallyAccessedMembersAttribute' in call to target method. The return value of the source method does not have matching annotations.", Justification = "We're calling Activator.CreateInstance on the property type, where the property has a [Default] and thus must have a non-null value enforced by the C# compiler.")] + public AttributedModel() + { + _type = GetType(); + + if (!_cachedInfo.ContainsKey(_type)) + { + lock (_cachedInfo) + { + var kindAttribute = _type.GetCustomAttributes(typeof(IKindAttribute), false).Cast().FirstOrDefault() + ?? throw new InvalidOperationException($"Missing [Kind(\"...\")] attribute on {_type.FullName} class."); + if (kindAttribute.Type != _type) + { + throw new InvalidOperationException($"Attribute [Kind(\"...\")] has T that differs from runtime type of class, which is {_type.FullName}."); + } + + var typeWithRuntimeInfo = kindAttribute.Type; + + long schemaVersion = typeWithRuntimeInfo.GetCustomAttributes(typeof(SchemaVersionAttribute), false).Cast().FirstOrDefault()?.SchemaVersion ?? 1; + string kind = kindAttribute?.Kind!; + if (string.IsNullOrWhiteSpace(kind)) + { + throw new InvalidOperationException($"Attribute [Kind(\"...\")] on {_type.FullName} has an invalid value."); + } + + var indexes = new HashSet(); + var types = new Dictionary(); + var defaults = new Dictionary(); + var geoHashKeyLengths = new Dictionary(); + foreach (var property in typeWithRuntimeInfo.GetProperties(BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic)) + { + var isIndexed = property.GetCustomAttributes(typeof(IndexedAttribute), false).Length > 0; + var type = property.GetCustomAttributes(typeof(TypeAttribute), false).Cast().FirstOrDefault(); + var @default = property.GetCustomAttributes(typeof(DefaultAttribute), false).Cast().FirstOrDefault(); + var geopoint = property.GetCustomAttributes(typeof(GeopointAttribute), false).Cast().FirstOrDefault(); + if (type != null) + { + if (isIndexed) + { + indexes.Add(property.Name); + } + + types.Add(property.Name, type.Type); + + if (type.Type == FieldType.Geopoint) + { + if (geopoint == null) + { + throw new InvalidOperationException($"Missing [Geopoint(...)] attribute on Geopoint field {typeWithRuntimeInfo.FullName}.{property.Name}. This attribute is required for Geopoint fields."); + } + else + { + geoHashKeyLengths.Add(property.Name, geopoint.HashKeyLength); + } + } + + if (@default != null) + { + if (property.PropertyType.IsArray) + { + // We only support empty (non-null) arrays as defaults. +#pragma warning disable IL3050 // The Array.CreateInstance will be called for an array type explicitly used by the codebase. + defaults.Add(property.Name, Array.CreateInstance(property.PropertyType.GetElementType()!, 0)); +#pragma warning restore IL3050 + } + else + { + defaults.Add(property.Name, @default.DefaultValue); + } + } + else + { + if (property.PropertyType.IsValueType && + property.PropertyType.Name != typeof(Nullable<>).Name) + { + throw new InvalidOperationException($"Missing [Default(...)] attribute on {typeWithRuntimeInfo.FullName}.{property.Name}. Non-nullable value type properties must have the [Default] attribute. If you want to permit nulls, change this to a nullable value type instead (e.g. 'bool?' instead of 'bool')."); + } + } + } + } + + _cachedInfo[typeWithRuntimeInfo] = new ModelInfo() + { + _schemaVersion = schemaVersion, + _kind = kind, + _indexes = indexes, + _types = types, + _defaultValues = defaults, + _geoHashKeyLengths = geoHashKeyLengths, + }; + } + } + + _modelInfo = _cachedInfo[_type]; + + var conversionContext = new ClrValueConvertFromContext(); + foreach (var kv in _modelInfo._defaultValues) + { +#pragma warning disable IL2080 // To get to this point, _type must already have been checked with kindAttribute.Type != _type + var property = _type.GetProperty(kv.Key, BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic)!; + var didHandle = false; + foreach (var valueConverter in _stringEnumValueConverters) + { + if (valueConverter.GetFieldType() == _modelInfo._types[property.Name] && + valueConverter.IsConverterForClrType(property.PropertyType)) + { + property.SetValue( + this, + valueConverter.ConvertFromClrDefaultValue( + conversionContext, + property.Name, + property.PropertyType, + kv.Value)); + didHandle = true; + break; + } + } + if (!didHandle) + { + property.SetValue(this, kv.Value); + } +#pragma warning restore IL2080 + } + } + + public sealed override HashSet GetIndexes() + { + return _modelInfo._indexes; + } + + public sealed override string GetKind() + { + return _modelInfo._kind; + } + + public sealed override long GetSchemaVersion() + { + return _modelInfo._schemaVersion; + } + + public sealed override Dictionary GetTypes() + { + return _modelInfo._types; + } + + public sealed override Dictionary GetDefaultValues() + { + return _modelInfo._defaultValues; + } + + public Dictionary GetHashKeyLengthsForGeopointFields() + { + return _modelInfo._geoHashKeyLengths; + } + } +} diff --git a/UET/Redpoint.CloudFramework/Models/DefaultAttribute.cs b/UET/Redpoint.CloudFramework/Models/DefaultAttribute.cs new file mode 100644 index 00000000..9edd367a --- /dev/null +++ b/UET/Redpoint.CloudFramework/Models/DefaultAttribute.cs @@ -0,0 +1,39 @@ +namespace Redpoint.CloudFramework.Models +{ + using System; + + /// + /// Specifies a default value for the field in Datastore. + /// + /// + /// If Datastore would load an entity, and the value is null or not set, then the + /// framework returns the default value specified in the attribute. + /// + /// If you set the value of a reference field to null, the framework will store + /// the default value instead. It will still locally be null in C# until you next + /// load the model in C#. If you need to prevent C# from storing nulls, you should + /// enable the C# nullable feature. + /// + /// If a value-based property has a [Default(...)] attribute, then you can use + /// the non-nullable value type. For example, you can use "bool" instead of "bool?" + /// when declaring the property in your model. + /// + /// When you construct a model that inherits from AttributedModel that uses + /// the [Default] attribute, the AttributeModel base constructor initializes all + /// of the properties to their default values. This ensures that even newly + /// constructed models contain valid non-null values for defaulted properties + /// in Datastore. + /// + [AttributeUsage(AttributeTargets.Property)] + public sealed class DefaultAttribute : Attribute + { + public DefaultAttribute(object defaultValue) + { + ArgumentNullException.ThrowIfNull(defaultValue); + + DefaultValue = defaultValue; + } + + public object DefaultValue { get; } + } +} diff --git a/UET/Redpoint.CloudFramework/Models/DefaultLockModel.cs b/UET/Redpoint.CloudFramework/Models/DefaultLockModel.cs new file mode 100644 index 00000000..4a30f9a4 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Models/DefaultLockModel.cs @@ -0,0 +1,40 @@ +namespace Redpoint.CloudFramework.Models +{ + using NodaTime; + using System.Collections.Generic; + + public class DefaultLockModel : Model + { + // The lock key is part of the name in the key. + public Instant? dateExpiresUtc { get; set; } + public string? acquisitionGuid { get; set; } + + public override string GetKind() + { + return "Lock"; + } + + public override long GetSchemaVersion() + { + return 1; + } + + public override Dictionary GetTypes() + { + return new Dictionary + { + { "dateExpiresUtc", FieldType.Timestamp }, + { "acquisitionGuid", FieldType.String }, + }; + } + + public override HashSet GetIndexes() + { + return new HashSet + { + "dateExpiresUtc", + "acquisitionGuid" + }; + } + } +} diff --git a/UET/Redpoint.CloudFramework/Models/FieldType.cs b/UET/Redpoint.CloudFramework/Models/FieldType.cs new file mode 100644 index 00000000..009399b6 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Models/FieldType.cs @@ -0,0 +1,26 @@ +namespace Redpoint.CloudFramework.Models +{ + public enum FieldType + { +#pragma warning disable CA1720 // Identifier contains type name + String, + Boolean, + Integer, + Double, + Geopoint, + Key, + LocalKey, + GlobalKey, + UnsafeKey, + Timestamp, + Json, + File, + StringArray, + KeyArray, + EmbeddedEntity, + GlobalKeyArray, + UnsignedInteger, + UnsignedIntegerArray, +#pragma warning restore CA1720 // Identifier contains type name + } +} diff --git a/UET/Redpoint.CloudFramework/Models/GeopointAttribute.cs b/UET/Redpoint.CloudFramework/Models/GeopointAttribute.cs new file mode 100644 index 00000000..90a09868 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Models/GeopointAttribute.cs @@ -0,0 +1,64 @@ +namespace Redpoint.CloudFramework.Models +{ + using System; + + /// + /// Specifies the hash key length for Geopoint fields. This attribute + /// must be specified on all Geopoint fields if you're using AttributedModel. + /// + /// + /// The hash key length effectively determines the "granularity" of indexed + /// data. + ///

+ /// A larger hash key length will spread small geographic areas over lots of + /// partitions at the cost of more queries being performed for larger search + /// radii. A smaller hash key will mean that more entities have the same hash key, + /// and thus more entities will be filtered out server side (after having + /// been returned from Datastore). + ///

+ /// As a guide for choosing a value: + ///

+ /// - A value of 6, with a radius of 35km will query 38 partitions.
+ /// - A value of 2, with a radius of 35km will query 8 partitions. + ///

+ /// If you have a global application, and you're unsure of a value to choose, + /// we've found a value of 2 is a good balance for an application with a relatively + /// low number of entries spread over the entire planet. + ///

+ /// The hash key length can not be changed later without recreating your data. + ///
+ [AttributeUsage(AttributeTargets.Property)] + public sealed class GeopointAttribute : Attribute + { + /// + /// Constructs a geopoint attribute. + /// + /// + /// The hash key length effectively determines the "granularity" of indexed + /// data. + ///

+ /// A larger hash key length will spread small geographic areas over lots of + /// partitions at the cost of more queries being performed for larger search + /// radii. A smaller hash key will mean that more entities have the same hash key, + /// and thus more entities will be filtered out server side (after having + /// been returned from Datastore). + ///

+ /// As a guide for choosing a value: + ///

+ /// - A value of 6, with a radius of 35km will query 38 partitions.
+ /// - A value of 2, with a radius of 35km will query 8 partitions. + ///

+ /// If you have a global application, and you're unsure of a value to choose, + /// we've found a value of 2 is a good balance for an application with a relatively + /// low number of entries spread over the entire planet. + ///

+ /// The hash key length can not be changed later without recreating your data. + /// + public GeopointAttribute(ushort hashKeyLength) + { + HashKeyLength = hashKeyLength; + } + + public ushort HashKeyLength { get; } + } +} diff --git a/UET/Redpoint.CloudFramework/Models/IShardedCounterModel.cs b/UET/Redpoint.CloudFramework/Models/IShardedCounterModel.cs new file mode 100644 index 00000000..46e03f30 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Models/IShardedCounterModel.cs @@ -0,0 +1,27 @@ +namespace Redpoint.CloudFramework.Models +{ + [Obsolete("This interface is no longer used by IShardedCounter and IGlobalShardedCounter.")] + public interface IShardedCounterModel + { + /// + /// If specified, this Datastore field on the entity will have it's value set to "shard". + /// + /// The Datastore field name, or null for no field. + string? GetTypeFieldName(); + + /// + /// The name of the field to actually store the count in. The bypasses + /// the regular ORM datastore layer for performance, so it needs to explicitly know the Datastore field name here. + /// + /// The Datastore field name. + string GetCountFieldName(); + + /// + /// Converts the sharded counter name and shard index into the name for the Datastore key. + /// + /// The sharded counter name. + /// The index of the shard in the counter. + /// The formatted name. + string FormatShardName(string name, int index); + } +} diff --git a/UET/Redpoint.CloudFramework/Models/IndexedAttribute.cs b/UET/Redpoint.CloudFramework/Models/IndexedAttribute.cs new file mode 100644 index 00000000..2b34e2ba --- /dev/null +++ b/UET/Redpoint.CloudFramework/Models/IndexedAttribute.cs @@ -0,0 +1,12 @@ +namespace Redpoint.CloudFramework.Models +{ + using System; + + /// + /// Indicates that this property is indexed in Datastore. + /// + [AttributeUsage(AttributeTargets.Property, AllowMultiple = false, Inherited = false)] + public sealed class IndexedAttribute : Attribute + { + } +} diff --git a/UET/Redpoint.CloudFramework/Models/KindAttribute.cs b/UET/Redpoint.CloudFramework/Models/KindAttribute.cs new file mode 100644 index 00000000..3f4443e0 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Models/KindAttribute.cs @@ -0,0 +1,31 @@ +namespace Redpoint.CloudFramework.Models +{ + using System; + using System.Diagnostics.CodeAnalysis; + + /// + /// Sets the kind of the entity when this model is stored in Datastore. + /// + [AttributeUsage(AttributeTargets.Class, AllowMultiple = false, Inherited = false)] + public sealed class KindAttribute< + [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties | DynamicallyAccessedMemberTypes.NonPublicProperties)] T> : Attribute, IKindAttribute + { + public KindAttribute(string kind) + { + Kind = kind; + } + + public string Kind { get; } + + [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties | DynamicallyAccessedMemberTypes.NonPublicProperties)] + public Type Type => typeof(T); + } + + internal interface IKindAttribute + { + string Kind { get; } + + [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties | DynamicallyAccessedMemberTypes.NonPublicProperties)] + Type Type { get; } + } +} diff --git a/UET/Redpoint.CloudFramework/Models/Model.cs b/UET/Redpoint.CloudFramework/Models/Model.cs new file mode 100644 index 00000000..3dc24336 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Models/Model.cs @@ -0,0 +1,47 @@ +namespace Redpoint.CloudFramework.Models +{ + using Google.Cloud.Datastore.V1; + using NodaTime; + using System; + using System.Collections.Generic; + + public abstract class Model + { + // Declaring this field as nullable would make 99% of reading code + // overly verbose handling scenarios that can never happen (it will never + // be null for entities loaded from the database). The only time that + // the key can be null is if you are creating an entity and haven't + // called CreateAsync yet. +#pragma warning disable CS8618 + public Key Key { get; set; } +#pragma warning restore CS8618 + + public Instant? dateCreatedUtc { get; internal set; } + public Instant? dateModifiedUtc { get; internal set; } + public long? schemaVersion { get; set; } + + /// + /// The original entity when it was loaded; this is used to clear caches + /// when appropriate. + /// + internal Dictionary? _originalData { get; set; } + + public abstract string GetKind(); + + public abstract Dictionary GetTypes(); + + public abstract HashSet GetIndexes(); + + public abstract long GetSchemaVersion(); + + public virtual string GetDatastoreNamespaceForLocalKeys() + { + throw new NotSupportedException("This model has a property of type 'local-key', but does not implement GetDatastoreNamespaceForLocalKeys"); + } + + public virtual Dictionary? GetDefaultValues() + { + return null; + } + } +} diff --git a/UET/Redpoint.CloudFramework/Models/SchemaVersionAttribute.cs b/UET/Redpoint.CloudFramework/Models/SchemaVersionAttribute.cs new file mode 100644 index 00000000..e0ef87bb --- /dev/null +++ b/UET/Redpoint.CloudFramework/Models/SchemaVersionAttribute.cs @@ -0,0 +1,21 @@ +namespace Redpoint.CloudFramework.Models +{ + using System; + + /// + /// Overrides the schema version for this model. If you don't use this attribute, the schema + /// version defaults to 1. + /// + /// This attribute is also used to declare schema versions for nested JSON structures. + /// + [AttributeUsage(AttributeTargets.Class | AttributeTargets.Struct, AllowMultiple = false, Inherited = false)] + public sealed class SchemaVersionAttribute : Attribute + { + public SchemaVersionAttribute(long schemaVersion) + { + SchemaVersion = schemaVersion; + } + + public long SchemaVersion { get; } + } +} diff --git a/UET/Redpoint.CloudFramework/Models/TypeAttribute.cs b/UET/Redpoint.CloudFramework/Models/TypeAttribute.cs new file mode 100644 index 00000000..525e8f63 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Models/TypeAttribute.cs @@ -0,0 +1,19 @@ +namespace Redpoint.CloudFramework.Models +{ + using System; + + /// + /// Sets the type that this value will be indexed as in Datastore. If you don't + /// add this attribute to a property, it will be ignored by Datastore. + /// + [AttributeUsage(AttributeTargets.Property, AllowMultiple = false, Inherited = false)] + public sealed class TypeAttribute : Attribute + { + public TypeAttribute(FieldType type) + { + Type = type; + } + + public FieldType Type { get; } + } +} diff --git a/UET/Redpoint.CloudFramework/OpenApi/ApiAttribute.cs b/UET/Redpoint.CloudFramework/OpenApi/ApiAttribute.cs new file mode 100644 index 00000000..f64a0e10 --- /dev/null +++ b/UET/Redpoint.CloudFramework/OpenApi/ApiAttribute.cs @@ -0,0 +1,12 @@ +namespace Redpoint.CloudFramework.OpenApi +{ + using System; + + /// + /// Indicates that this MVC method should be exposed as an API method in the OpenAPI document. + /// + [AttributeUsage(AttributeTargets.Method)] + public sealed class ApiAttribute : Attribute + { + } +} diff --git a/UET/Redpoint.CloudFramework/OpenApi/Errorable.cs b/UET/Redpoint.CloudFramework/OpenApi/Errorable.cs new file mode 100644 index 00000000..5b8699c9 --- /dev/null +++ b/UET/Redpoint.CloudFramework/OpenApi/Errorable.cs @@ -0,0 +1,80 @@ +namespace Redpoint.CloudFramework.OpenApi +{ + using Microsoft.AspNetCore.Mvc; + using Microsoft.AspNetCore.Mvc.Infrastructure; + using Microsoft.Extensions.DependencyInjection; + using System.Net; + using System.Threading.Tasks; + + public class Errorable : IActionResult where T : class + { + private int _statusCode = 200; + + public static Errorable FromError(HttpStatusCode status, string message) + { + return new Errorable(null, status, message); + } + + public static Errorable FromObject(T value) + { + return new Errorable(value, null, null); + } + + public static implicit operator Errorable(T value) => FromObject(value); + public static Errorable FromT(T value) => FromObject(value); + + private Errorable(T? value, HttpStatusCode? statusCode, string? errorMessage) + { + Value = value; + ErrorMessage = errorMessage; + + if (errorMessage != null && statusCode != null) + { + if (statusCode != null) + { + _statusCode = (int)statusCode.Value; + } + else + { + _statusCode = (int)HttpStatusCode.InternalServerError; + } + } + else + { + _statusCode = (int)HttpStatusCode.OK; + } + } + + public Task ExecuteResultAsync(ActionContext context) + { + ArgumentNullException.ThrowIfNull(context); + + var services = context.HttpContext.RequestServices; + var executor = services.GetRequiredService>(); + + // @todo: What we really want here is the default serializer for the + // value type, plus an additional errorMessage property. + return executor.ExecuteAsync(context, new JsonResult(this) + { + StatusCode = _statusCode, + }); + } + + public T? Value { get; } + + public string? ErrorMessage { get; } + + public T EnsureValue() + { + if (ErrorMessage != null) + { + throw new ErrorableException(ErrorMessage) + { + StatusCode = _statusCode, + }; + } + + return Value!; + } + } +} diff --git a/UET/Redpoint.CloudFramework/OpenApi/ErrorableException.cs b/UET/Redpoint.CloudFramework/OpenApi/ErrorableException.cs new file mode 100644 index 00000000..9d4386f9 --- /dev/null +++ b/UET/Redpoint.CloudFramework/OpenApi/ErrorableException.cs @@ -0,0 +1,13 @@ +namespace Redpoint.CloudFramework.OpenApi +{ + using System; + + public class ErrorableException : Exception + { + public ErrorableException(string? message) : base(message) + { + } + + public required int StatusCode { get; init; } + } +} diff --git a/UET/Redpoint.CloudFramework/OpenApi/ExcludeSchemaAttribute.cs b/UET/Redpoint.CloudFramework/OpenApi/ExcludeSchemaAttribute.cs new file mode 100644 index 00000000..032f3760 --- /dev/null +++ b/UET/Redpoint.CloudFramework/OpenApi/ExcludeSchemaAttribute.cs @@ -0,0 +1,9 @@ +namespace Redpoint.CloudFramework.OpenApi +{ + using System; + + [AttributeUsage(AttributeTargets.Class)] + public sealed class ExcludeSchemaAttribute : Attribute + { + } +} diff --git a/UET/Redpoint.CloudFramework/OpenApi/ExcludeSchemaDocumentFilter.cs b/UET/Redpoint.CloudFramework/OpenApi/ExcludeSchemaDocumentFilter.cs new file mode 100644 index 00000000..f6f7c6d3 --- /dev/null +++ b/UET/Redpoint.CloudFramework/OpenApi/ExcludeSchemaDocumentFilter.cs @@ -0,0 +1,20 @@ +namespace Redpoint.CloudFramework.OpenApi +{ + using Microsoft.OpenApi.Models; + using Swashbuckle.AspNetCore.SwaggerGen; + using System.Linq; + + internal class ExcludeSchemaDocumentFilter : IDocumentFilter + { + public void Apply(OpenApiDocument swaggerDoc, DocumentFilterContext context) + { + foreach (var kv in swaggerDoc.Components.Schemas.ToArray()) + { + if (kv.Value.Deprecated) + { + swaggerDoc.Components.Schemas.Remove(kv.Key); + } + } + } + } +} diff --git a/UET/Redpoint.CloudFramework/OpenApi/ExcludeSchemaFilter.cs b/UET/Redpoint.CloudFramework/OpenApi/ExcludeSchemaFilter.cs new file mode 100644 index 00000000..4a0b1a0a --- /dev/null +++ b/UET/Redpoint.CloudFramework/OpenApi/ExcludeSchemaFilter.cs @@ -0,0 +1,17 @@ +namespace Redpoint.CloudFramework.OpenApi +{ + using Microsoft.OpenApi.Models; + using Swashbuckle.AspNetCore.SwaggerGen; + using System.Reflection; + + internal class ExcludeSchemaFilter : ISchemaFilter + { + public void Apply(OpenApiSchema schema, SchemaFilterContext context) + { + if (context.Type.GetCustomAttribute() != null) + { + schema.Deprecated = true; + } + } + } +} diff --git a/UET/Redpoint.CloudFramework/OpenApi/FormFileOperationFilter.cs b/UET/Redpoint.CloudFramework/OpenApi/FormFileOperationFilter.cs new file mode 100644 index 00000000..df57675a --- /dev/null +++ b/UET/Redpoint.CloudFramework/OpenApi/FormFileOperationFilter.cs @@ -0,0 +1,29 @@ +namespace Redpoint.CloudFramework.OpenApi +{ + using Microsoft.AspNetCore.Http; + using Microsoft.OpenApi.Models; + using Swashbuckle.AspNetCore.SwaggerGen; + using System; + using System.Linq; + + public class FormFileOperationFilter : IOperationFilter + { + public void Apply(OpenApiOperation operation, OperationFilterContext context) + { + ArgumentNullException.ThrowIfNull(operation); + ArgumentNullException.ThrowIfNull(context); + + var fileUploadMime = "multipart/form-data"; + if (operation.RequestBody == null || !operation.RequestBody.Content.Any(x => x.Key.Equals(fileUploadMime, StringComparison.OrdinalIgnoreCase))) + return; + + var fileParams = context.MethodInfo.GetParameters().Where(p => p.ParameterType == typeof(IFormFile)); + operation.RequestBody.Content[fileUploadMime].Schema.Properties = + fileParams.ToDictionary(k => k.Name ?? string.Empty, v => new OpenApiSchema() + { + Type = "string", + Format = "binary" + }); + } + } +} diff --git a/UET/Redpoint.CloudFramework/OpenApi/InstantJsonConverter.cs b/UET/Redpoint.CloudFramework/OpenApi/InstantJsonConverter.cs new file mode 100644 index 00000000..27dd8b3b --- /dev/null +++ b/UET/Redpoint.CloudFramework/OpenApi/InstantJsonConverter.cs @@ -0,0 +1,23 @@ +namespace Redpoint.CloudFramework.OpenApi +{ + using NodaTime; + using NodaTime.Text; + using System; + using System.Text.Json; + using System.Text.Json.Serialization; + + public class InstantJsonConverter : JsonConverter + { + public override Instant Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) + { + return OffsetDateTimePattern.Rfc3339.Parse(reader.GetString()!).Value.ToInstant(); + } + + public override void Write(Utf8JsonWriter writer, Instant value, JsonSerializerOptions options) + { + ArgumentNullException.ThrowIfNull(writer); + + writer.WriteStringValue(OffsetDateTimePattern.Rfc3339.Format(value.WithOffset(Offset.Zero))); + } + } +} diff --git a/UET/Redpoint.CloudFramework/OpenApi/OnlyApiMethodsFilter.cs b/UET/Redpoint.CloudFramework/OpenApi/OnlyApiMethodsFilter.cs new file mode 100644 index 00000000..10e46ebc --- /dev/null +++ b/UET/Redpoint.CloudFramework/OpenApi/OnlyApiMethodsFilter.cs @@ -0,0 +1,24 @@ +namespace Redpoint.CloudFramework.OpenApi +{ + using Microsoft.AspNetCore.Mvc.Controllers; + using Microsoft.OpenApi.Models; + using Swashbuckle.AspNetCore.SwaggerGen; + + public class OnlyApiMethodsFilter : IDocumentFilter + { + public void Apply(OpenApiDocument swaggerDoc, DocumentFilterContext context) + { + ArgumentNullException.ThrowIfNull(swaggerDoc); + ArgumentNullException.ThrowIfNull(context); + + foreach (var apiDescription in context.ApiDescriptions) + { + if (((ControllerActionDescriptor)apiDescription.ActionDescriptor).MethodInfo.GetCustomAttributes(typeof(ApiAttribute), false).Length == 0) + { + var key = "/" + apiDescription.RelativePath?.TrimEnd('/'); + swaggerDoc.Paths.Remove(key); + } + } + } + } +} diff --git a/UET/Redpoint.CloudFramework/OpenApi/PaginatedQueryCursorSchemaFilter.cs b/UET/Redpoint.CloudFramework/OpenApi/PaginatedQueryCursorSchemaFilter.cs new file mode 100644 index 00000000..d49ee4f6 --- /dev/null +++ b/UET/Redpoint.CloudFramework/OpenApi/PaginatedQueryCursorSchemaFilter.cs @@ -0,0 +1,18 @@ +namespace Redpoint.CloudFramework.OpenApi +{ + using Microsoft.OpenApi.Models; + using Redpoint.CloudFramework.Repository.Pagination; + using Swashbuckle.AspNetCore.SwaggerGen; + + internal class PaginatedQueryCursorSchemaFilter : ISchemaFilter + { + public void Apply(OpenApiSchema schema, SchemaFilterContext context) + { + if (context.Type == typeof(PaginatedQueryCursor)) + { + schema.Properties = null; + schema.Type = "string"; + } + } + } +} diff --git a/UET/Redpoint.CloudFramework/OpenApi/RedpointApplicationModelProvider.cs b/UET/Redpoint.CloudFramework/OpenApi/RedpointApplicationModelProvider.cs new file mode 100644 index 00000000..e9c063ee --- /dev/null +++ b/UET/Redpoint.CloudFramework/OpenApi/RedpointApplicationModelProvider.cs @@ -0,0 +1,58 @@ +#nullable enable + +namespace Redpoint.CloudFramework.OpenApi +{ + using Microsoft.AspNetCore.Http; + using Microsoft.AspNetCore.Mvc; + using Microsoft.AspNetCore.Mvc.ApplicationModels; + using System.Threading.Tasks; + + public class RedpointApplicationModelProvider : IApplicationModelProvider + { + public int Order => 200; + + public void OnProvidersExecuted(ApplicationModelProviderContext context) + { + } + + public static System.Type NormalizePotentialAsyncType(System.Type type) + { + ArgumentNullException.ThrowIfNull(type); + + if (type.IsConstructedGenericType) + { + if (type.GetGenericTypeDefinition() == typeof(Task<>)) + { + return type.GetGenericArguments()[0]; + } + if (type.GetGenericTypeDefinition() == typeof(ValueTask<>)) + { + return type.GetGenericArguments()[0]; + } + } + return type; + } + + public void OnProvidersExecuting(ApplicationModelProviderContext context) + { + ArgumentNullException.ThrowIfNull(context); + + foreach (var controller in context.Result.Controllers) + { + foreach (var action in controller.Actions) + { + var returnType = NormalizePotentialAsyncType(action.ActionMethod.ReturnType); + + if (returnType.IsConstructedGenericType && + returnType.GetGenericTypeDefinition() == typeof(Errorable<>)) + { + action.Filters.Add(new ProducesResponseTypeAttribute(returnType, StatusCodes.Status200OK)); + action.Filters.Add(new ProducesResponseTypeAttribute(returnType, StatusCodes.Status400BadRequest)); + action.Filters.Add(new ProducesResponseTypeAttribute(returnType, StatusCodes.Status404NotFound)); + action.Filters.Add(new ProducesResponseTypeAttribute(returnType, StatusCodes.Status500InternalServerError)); + } + } + } + } + } +} diff --git a/UET/Redpoint.CloudFramework/OpenApi/RequiredSchemaFilter.cs b/UET/Redpoint.CloudFramework/OpenApi/RequiredSchemaFilter.cs new file mode 100644 index 00000000..34de5685 --- /dev/null +++ b/UET/Redpoint.CloudFramework/OpenApi/RequiredSchemaFilter.cs @@ -0,0 +1,27 @@ +namespace Redpoint.CloudFramework.OpenApi +{ + using Microsoft.OpenApi.Models; + using Swashbuckle.AspNetCore.SwaggerGen; + using System.Linq; + + internal class RequiredSchemaFilter : ISchemaFilter + { + public void Apply(OpenApiSchema schema, SchemaFilterContext context) + { + if (schema.Properties == null) + { + return; + } + + var notNullableProperties = schema + .Properties + .Where(x => !schema.Required.Contains(x.Key)) + .ToList(); + + foreach (var property in notNullableProperties) + { + schema.Required.Add(property.Key); + } + } + } +} diff --git a/UET/Redpoint.CloudFramework/OpenApi/ServiceCollectionExtensions.cs b/UET/Redpoint.CloudFramework/OpenApi/ServiceCollectionExtensions.cs new file mode 100644 index 00000000..0bf7415b --- /dev/null +++ b/UET/Redpoint.CloudFramework/OpenApi/ServiceCollectionExtensions.cs @@ -0,0 +1,101 @@ +namespace Redpoint.CloudFramework.OpenApi +{ + using Microsoft.AspNetCore.Mvc.ApplicationModels; + using Microsoft.Extensions.DependencyInjection; + using Microsoft.OpenApi.Models; + using NodaTime; + using System; + using System.IO; + using System.Reflection; + + public static class ServiceCollectionExtensions + { + public static IMvcBuilder AddCloudFrameworkCustomisation(this IMvcBuilder builder) + { + builder.AddJsonOptions(options => + { + options.JsonSerializerOptions.Converters.Add(new InstantJsonConverter()); + }); + return builder; + } + + public static void AddSwaggerGenForReactApp(this IServiceCollection services, string productName = "Internal API") + { + services.AddTransient(); + + services.AddSwaggerGen(options => + { + options.DocumentFilter(); + options.OperationFilter(); + options.SchemaFilter(); + options.SchemaFilter(); + options.SchemaFilter(); + options.DocumentFilter(); + options.MapType(() => + { + return new OpenApiSchema + { + Type = "string", + Format = "date-time", + }; + }); + + options.CustomSchemaIds(x => + { + if (x.IsConstructedGenericType && + x.GetGenericTypeDefinition() == typeof(Errorable<>)) + { + return "Errorable" + x.GetGenericArguments()[0].Name; + } + else if (x.IsConstructedGenericType) + { + var genericName = x.GetGenericTypeDefinition().Name; + return string.Concat(genericName.AsSpan(0, x.GetGenericTypeDefinition().Name.IndexOf('`', StringComparison.Ordinal)), "_", x.GetGenericArguments()[0].Name); + } + else + { + return x.Name; + } + }); + + options.SupportNonNullableReferenceTypes(); + options.UseAllOfToExtendReferenceSchemas(); + options.UseAllOfForInheritance(); + + options.CustomOperationIds(e => $"{e.ActionDescriptor.RouteValues["action"]}"); + + options.SwaggerDoc("v1", new OpenApiInfo + { + Version = "1.0.0", + Title = productName, + Description = $"Describes the API endpoints of {productName}, which are used by the frontend React components. Not for public use." + }); + + options.DocInclusionPredicate((docName, description) => + { + return description.RelativePath != null && description.RelativePath.StartsWith("api/", StringComparison.Ordinal); + }); + + var xmlFilename = $"{Assembly.GetEntryAssembly()!.GetName().Name}.xml"; + if (System.IO.File.Exists(Path.Combine(AppContext.BaseDirectory, xmlFilename))) + { + options.IncludeXmlComments(Path.Combine(AppContext.BaseDirectory, xmlFilename)); + } + else + { +#pragma warning disable IL3000 // Avoid accessing Assembly file path when publishing as a single file + var assemblyLocation = Assembly.GetEntryAssembly()!.Location; +#pragma warning restore IL3000 // Avoid accessing Assembly file path when publishing as a single file + if (!string.IsNullOrEmpty(assemblyLocation)) + { + var altXmlFilename = Path.Combine(Path.GetDirectoryName(assemblyLocation)!, xmlFilename); + if (System.IO.File.Exists(altXmlFilename)) + { + options.IncludeXmlComments(altXmlFilename); + } + } + } + }); + } + } +} diff --git a/UET/Redpoint.CloudFramework/Prefix/DefaultPrefix.cs b/UET/Redpoint.CloudFramework/Prefix/DefaultPrefix.cs new file mode 100644 index 00000000..a11c9efd --- /dev/null +++ b/UET/Redpoint.CloudFramework/Prefix/DefaultPrefix.cs @@ -0,0 +1,73 @@ +namespace Redpoint.CloudFramework.Prefix +{ + using System; + using System.Threading.Tasks; + using Google.Cloud.Datastore.V1; + using Redpoint.CloudFramework.Models; + + public class DefaultPrefix : IPrefix + { + private readonly ICurrentTenantService _currentProjectService; + private readonly IGlobalPrefix _globalPrefix; + + public DefaultPrefix(ICurrentTenantService currentProjectService, IGlobalPrefix globalPrefix) + { + _currentProjectService = currentProjectService; + _globalPrefix = globalPrefix; + } + + public string Create(Key key) + { + return _globalPrefix.Create(key); + } + + public string CreateInternal(Key key) + { + return _globalPrefix.CreateInternal(key); + } + + public async Task Parse(string identifier) + { + var currentTenant = await _currentProjectService.GetTenant().ConfigureAwait(false); + if (currentTenant == null) + { + throw new InvalidOperationException("IPrefix can not be used without a tenant."); + } + var ns = currentTenant.DatastoreNamespace; + return _globalPrefix.Parse(ns, identifier); + } + + public async Task ParseInternal(string identifier) + { + var currentTenant = await _currentProjectService.GetTenant().ConfigureAwait(false); + if (currentTenant == null) + { + throw new InvalidOperationException("IPrefix can not be used without a tenant."); + } + var ns = currentTenant.DatastoreNamespace; + return _globalPrefix.ParseInternal(ns, identifier); + } + + public async Task ParseLimited(string identifier, string kind) + { + var currentTenant = await _currentProjectService.GetTenant().ConfigureAwait(false); + if (currentTenant == null) + { + throw new InvalidOperationException("IPrefix can not be used without a tenant."); + } + var ns = currentTenant.DatastoreNamespace; + return _globalPrefix.ParseLimited(ns, identifier, kind); + } + + public async Task ParseLimited(string identifier) where T : Model, new() + { + var currentTenant = await _currentProjectService.GetTenant().ConfigureAwait(false); + if (currentTenant == null) + { + throw new InvalidOperationException("IPrefix can not be used without a tenant."); + } + var ns = currentTenant.DatastoreNamespace; + return _globalPrefix.ParseLimited(ns, identifier); + } + } +} diff --git a/UET/Redpoint.CloudFramework/Prefix/GlobalPrefix.cs b/UET/Redpoint.CloudFramework/Prefix/GlobalPrefix.cs new file mode 100644 index 00000000..49694481 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Prefix/GlobalPrefix.cs @@ -0,0 +1,361 @@ +namespace Redpoint.CloudFramework.Prefix +{ + using System; + using System.Collections.Generic; + using System.Diagnostics.CodeAnalysis; + using System.Globalization; + using System.Linq; + using Google.Cloud.Datastore.V1; + using Redpoint.CloudFramework.GoogleInfrastructure; + using Redpoint.CloudFramework.Models; + + public class GlobalPrefix : IGlobalPrefix + { + private readonly IReadOnlyDictionary _prefixes; + private readonly IReadOnlyDictionary _reversePrefixes; + private readonly IGoogleServices _googleServices; + private static readonly char[] _dashSeparator = new[] { '-' }; + + private class GlobalPrefixRegistration : IPrefixRegistration + { + private readonly Dictionary _prefixes; + private readonly Dictionary _reversePrefixes; + + public GlobalPrefixRegistration() + { + _prefixes = new Dictionary(); + _reversePrefixes = new Dictionary(); + } + + public void RegisterPrefix<[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicConstructors)] T>(string prefix) where T : Model, new() + { + var kind = new T().GetKind(); + + if (_prefixes.TryGetValue(prefix, out string? existingKind)) + { + throw new InvalidOperationException($"Prefix '{prefix}' is used by both '{existingKind}' and '{kind}'. Remove the prefix conflict."); + } + if (_reversePrefixes.TryGetValue(kind, out string? existingPrefix)) + { + throw new InvalidOperationException($"Model kind '{kind}' already has prefix '{existingPrefix}' assigned to it, so you can't also register '{prefix}'. Remove the prefix conflict."); + } + + _prefixes.Add(prefix, kind); + _reversePrefixes.Add(kind, prefix); + } + + public IReadOnlyDictionary Prefixes => _prefixes; + public IReadOnlyDictionary ReversePrefixes => _reversePrefixes; + } + + public GlobalPrefix( + IPrefixProvider[] prefixProviders, + IGoogleServices googleServices) + { + ArgumentNullException.ThrowIfNull(prefixProviders); + + _googleServices = googleServices; + + var registration = new GlobalPrefixRegistration(); + foreach (var provider in prefixProviders) + { + provider.RegisterPrefixes(registration); + } + + _prefixes = registration.Prefixes; + _reversePrefixes = registration.ReversePrefixes; + } + + /// + /// Parse a public identifier into a Google Datastore key object. + /// + /// The datastore namespace of the resulting key. + /// The identifier to parse. + /// A key object. + public Key Parse(string datastoreNamespace, string identifier) + { + var prefix = ParsePathElement(identifier); + + var k = new Key + { + PartitionId = new PartitionId(_googleServices.ProjectId, datastoreNamespace) + }; + k.Path.Add(prefix); + return k; + } + + /// + /// Parse a public identifier into a Google Datastore key object and verify it's kind. + /// + /// The datastore namespace of the resulting key. + /// The identifier to parse. + /// The resulting kind that the key must match. + /// A key object. + public Key ParseLimited(string datastoreNamespace, string identifier, string kind) + { + if (string.IsNullOrWhiteSpace(kind)) + { + throw new ArgumentNullException(nameof(kind)); + } + + var result = Parse(datastoreNamespace, identifier); + if (result.Path.Last().Kind != kind) + { + throw new IdentifierWrongTypeException(identifier, kind); + } + + return result; + } + + /// + /// Parse a public identifier into a Google Datastore key object and verify it's kind. + /// + /// The datastore model that this must match. + /// The datastore namespace of the resulting key. + /// The identifier to parse. + /// A key object. + public Key ParseLimited(string datastoreNamespace, string identifier) where T : Model, new() + { + return ParseLimited(datastoreNamespace, identifier, new T().GetKind()); + } + + /// + /// Parse an internal or public identifier into a Google Datastore key. + /// + /// The datastore namespace of the resulting key. + /// The identifier to parse. + /// A key object. + public Key ParseInternal(string datastoreNamespace, string identifier) + { + if (string.IsNullOrWhiteSpace(identifier)) + { + throw new ArgumentNullException(nameof(identifier)); + } + + var isNamespacedKey = false; + if (identifier.StartsWith('#')) + { + isNamespacedKey = true; + identifier = identifier.Substring(1); + } + + var identifiers = GlobalPrefix.ParsePipeSeperated(identifier); + var pathElements = new List(); + + string projectId; + string namespaceId; + int offset = 0; + + if (isNamespacedKey) + { + if (identifiers[0] != "v1") + { + throw new ArgumentException("Namespaced key is not a supported version"); + } + + projectId = identifiers[1]; + namespaceId = identifiers[2]; + + offset = 3; + } + else + { + projectId = _googleServices.ProjectId; + namespaceId = datastoreNamespace; + } + + for (var i = offset; i < identifiers.Length; i++) + { + var component = identifiers[i]; + var colonIndex = component.IndexOf(':', StringComparison.Ordinal); + if (colonIndex != -1) + { + var kind = component.Substring(0, colonIndex); + var ident = component.Substring(colonIndex + 1); + if (ident.StartsWith("id=", StringComparison.Ordinal)) + { + var id = long.Parse(ident.AsSpan("id=".Length), CultureInfo.InvariantCulture); + pathElements.Add(new Key.Types.PathElement(kind, id)); + } + else if (ident.StartsWith("name=", StringComparison.Ordinal)) + { + var name = ident.Substring("name=".Length); + pathElements.Add(new Key.Types.PathElement(kind, name)); + } + else + { + throw new InvalidOperationException("Unknown ID or name for identifier '" + component + "'"); + } + } + else + { + pathElements.Add(ParsePathElement(component)); + } + } + + var k = new Key + { + PartitionId = new PartitionId(projectId, namespaceId) + }; + k.Path.AddRange(pathElements); + return k; + } + + /// + /// Creates a public identifier from a Datastore key. + /// + /// The datastore key to create an identifier from. + /// The public identifier. + public string Create(Key key) + { + ArgumentNullException.ThrowIfNull(key); + if (key.Path.Count == 0) + { + throw new InvalidOperationException("Datastore key does not have any path elements; can not generate public identifier"); + } + if (key.Path.Count > 1) + { + throw new InvalidOperationException("Datastore key has more than one path element (nested children), can not generate public identifier"); + } + return CreatePathElement(key.Path[0]); + } + + /// + /// Creates a public or internal identifier from a Datastore key. + /// + /// The datastore key to create an identifier from. + /// + /// The public or internal identifier. + public string CreateInternal(Key key, PathGenerationMode pathGenerationMode = PathGenerationMode.Default) + { + ArgumentNullException.ThrowIfNull(key); + + var keyComponents = new List + { + "v1", + key.PartitionId.ProjectId, + key.PartitionId.NamespaceId + }; + + for (var i = 0; i < key.Path.Count; i++) + { + var pathElement = key.Path[i]; + + if (!_reversePrefixes.ContainsKey(pathElement.Kind) || + pathGenerationMode == PathGenerationMode.NoShortPathComponents || + pathElement.IdTypeCase != Key.Types.PathElement.IdTypeOneofCase.Id) + { + if (pathElement.IdTypeCase == Key.Types.PathElement.IdTypeOneofCase.Id) + { + if (pathElement.Id <= 0) + { + throw new InvalidOperationException("Numeric component must be a positive value"); + } + } + + if (pathElement.IdTypeCase == Key.Types.PathElement.IdTypeOneofCase.Name) + { + keyComponents.Add(pathElement.Kind + ":name=" + pathElement.Name.Replace("\\", "\\\\", StringComparison.Ordinal).Replace("|", "\\|", StringComparison.Ordinal)); + } + else + { + keyComponents.Add(pathElement.Kind + ":id=" + pathElement.Id); + } + } + else + { + keyComponents.Add(CreatePathElement(pathElement)); + } + } + + return "#" + string.Join("|", keyComponents); + } + + private static string[] ParsePipeSeperated(string value) + { + var results = new List(); + var buffer = string.Empty; + var isEscaped = false; + for (var v = 0; v < value.Length; v++) + { + if (isEscaped) + { + buffer += value[v]; + isEscaped = false; + } + else + { + if (value[v] == '\\') + { + isEscaped = true; + continue; + } + + if (value[v] == '|') + { + results.Add(buffer); + buffer = string.Empty; + continue; + } + + buffer += value[v]; + } + } + if (buffer.Length > 0) + { + results.Add(buffer); + } + return results.ToArray(); + } + + private Key.Types.PathElement ParsePathElement(string identifier) + { + ArgumentNullException.ThrowIfNull(identifier); + + var components = identifier.Split(_dashSeparator, 2); + if (components.Length != 2) + { + throw new IdentifierInvalidException(identifier, "Missing seperator in identifier"); + } + + var prefix = components[0].ToLowerInvariant(); + if (!_prefixes.TryGetValue(prefix, out string? value)) + { + throw new IdentifierInvalidException(identifier, "Unknown prefix in identifier"); + } + if (string.IsNullOrWhiteSpace(components[1])) + { + throw new IdentifierInvalidException(identifier, "Missing numeric component to identifier"); + } + var parsable = long.TryParse(components[1], NumberStyles.Any, CultureInfo.InvariantCulture, + out var numericIdentifier); + if (!parsable || numericIdentifier.ToString(CultureInfo.InvariantCulture) != components[1]) + { + throw new IdentifierInvalidException(identifier, "Badly formatted numeric component in identifier"); + } + + return new Key.Types.PathElement(value, numericIdentifier); + } + + private string CreatePathElement(Key.Types.PathElement pathElement) + { + if (string.IsNullOrWhiteSpace(pathElement.Kind)) + { + throw new ArgumentException("Kind property on datastore key is null or empty", nameof(pathElement)); + } + if (!_reversePrefixes.TryGetValue(pathElement.Kind, out string? value)) + { + throw new ArgumentException("No prefix for object kind: " + pathElement.Kind, nameof(pathElement)); + } + if (pathElement.IdTypeCase != Key.Types.PathElement.IdTypeOneofCase.Id) + { + throw new ArgumentException("Only numeric based Datastore keys can be publicly prefixed", nameof(pathElement)); + } + if (pathElement.Id < 0) + { + throw new ArgumentException("Numeric component must be a positive value", nameof(pathElement)); + } + return value + "-" + pathElement.Id; + } + } +} diff --git a/UET/Redpoint.CloudFramework/Prefix/IGlobalPrefix.cs b/UET/Redpoint.CloudFramework/Prefix/IGlobalPrefix.cs new file mode 100644 index 00000000..7cf7cd1f --- /dev/null +++ b/UET/Redpoint.CloudFramework/Prefix/IGlobalPrefix.cs @@ -0,0 +1,15 @@ +namespace Redpoint.CloudFramework.Prefix +{ + using Google.Cloud.Datastore.V1; + using Redpoint.CloudFramework.Models; + + public interface IGlobalPrefix + { + string Create(Key key); + string CreateInternal(Key key, PathGenerationMode pathGenerationMode = PathGenerationMode.Default); + Key Parse(string datastoreNamespace, string identifier); + Key ParseInternal(string datastoreNamespace, string identifier); + Key ParseLimited(string datastoreNamespace, string identifier, string kind); + Key ParseLimited(string datastoreNamespace, string identifier) where T : Model, new(); + } +} diff --git a/UET/Redpoint.CloudFramework/Prefix/IPrefix.cs b/UET/Redpoint.CloudFramework/Prefix/IPrefix.cs new file mode 100644 index 00000000..aa60b50e --- /dev/null +++ b/UET/Redpoint.CloudFramework/Prefix/IPrefix.cs @@ -0,0 +1,16 @@ +namespace Redpoint.CloudFramework.Prefix +{ + using System.Threading.Tasks; + using Google.Cloud.Datastore.V1; + using Redpoint.CloudFramework.Models; + + public interface IPrefix + { + string Create(Key key); + string CreateInternal(Key key); + Task Parse(string identifier); + Task ParseInternal(string identifier); + Task ParseLimited(string identifier, string kind); + Task ParseLimited(string identifier) where T : Model, new(); + } +} diff --git a/UET/Redpoint.CloudFramework/Prefix/IPrefixProvider.cs b/UET/Redpoint.CloudFramework/Prefix/IPrefixProvider.cs new file mode 100644 index 00000000..a5515ffc --- /dev/null +++ b/UET/Redpoint.CloudFramework/Prefix/IPrefixProvider.cs @@ -0,0 +1,14 @@ +namespace Redpoint.CloudFramework.Prefix +{ + /// + /// Maps shortened prefixes like 'u' to kinds of models like 'user'. + /// + public interface IPrefixProvider + { + /// + /// Called by to perform prefix registration. + /// + /// The interface that can be used by the provider to register prefixes. + void RegisterPrefixes(IPrefixRegistration registration); + } +} diff --git a/UET/Redpoint.CloudFramework/Prefix/IPrefixRegistration.cs b/UET/Redpoint.CloudFramework/Prefix/IPrefixRegistration.cs new file mode 100644 index 00000000..3bcc3002 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Prefix/IPrefixRegistration.cs @@ -0,0 +1,15 @@ +namespace Redpoint.CloudFramework.Prefix +{ + using Redpoint.CloudFramework.Models; + using System.Diagnostics.CodeAnalysis; + + public interface IPrefixRegistration + { + /// + /// Register the model to use the specified prefix. + /// + /// The model type. + /// The prefix to use for the model. + void RegisterPrefix<[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicConstructors)] T>(string prefix) where T : Model, new(); + } +} diff --git a/UET/Redpoint.CloudFramework/Prefix/IdentifierInvalidException.cs b/UET/Redpoint.CloudFramework/Prefix/IdentifierInvalidException.cs new file mode 100644 index 00000000..ae6cf84f --- /dev/null +++ b/UET/Redpoint.CloudFramework/Prefix/IdentifierInvalidException.cs @@ -0,0 +1,17 @@ +namespace Redpoint.CloudFramework.Prefix +{ + using System; + + public class IdentifierInvalidException : Exception + { + public IdentifierInvalidException(string identifier, string reason) + : base("Identifier invalid: " + identifier + ", " + reason) + { + Identifier = identifier; + Reason = reason; + } + + public string Identifier { get; } + public string Reason { get; } + } +} diff --git a/UET/Redpoint.CloudFramework/Prefix/IdentifierWrongTypeException.cs b/UET/Redpoint.CloudFramework/Prefix/IdentifierWrongTypeException.cs new file mode 100644 index 00000000..66470f95 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Prefix/IdentifierWrongTypeException.cs @@ -0,0 +1,17 @@ +namespace Redpoint.CloudFramework.Prefix +{ + using System; + + public class IdentifierWrongTypeException : Exception + { + public IdentifierWrongTypeException(string identifier, string expectedKind) + : base("Identifier wrong type: " + identifier + ", expected " + expectedKind) + { + Identifier = identifier; + ExpectedKind = expectedKind; + } + + public string Identifier { get; } + public string ExpectedKind { get; } + } +} diff --git a/UET/Redpoint.CloudFramework/Prefix/PathGenerationMode.cs b/UET/Redpoint.CloudFramework/Prefix/PathGenerationMode.cs new file mode 100644 index 00000000..bb946f8f --- /dev/null +++ b/UET/Redpoint.CloudFramework/Prefix/PathGenerationMode.cs @@ -0,0 +1,8 @@ +namespace Redpoint.CloudFramework.Prefix +{ + public enum PathGenerationMode + { + Default, + NoShortPathComponents, + } +} diff --git a/UET/Redpoint.CloudFramework/Processor/ContinuousProcessorHostedService.cs b/UET/Redpoint.CloudFramework/Processor/ContinuousProcessorHostedService.cs new file mode 100644 index 00000000..5a824fa2 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Processor/ContinuousProcessorHostedService.cs @@ -0,0 +1,82 @@ +namespace Redpoint.CloudFramework.Processor +{ + using Microsoft.Extensions.DependencyInjection; + using Microsoft.Extensions.Hosting; + using Microsoft.Extensions.Logging; + using System.Threading.Tasks; + + internal class ContinuousProcessorHostedService : IHostedService, IAsyncDisposable where T : IContinuousProcessor + { + private readonly IServiceProvider _serviceProvider; + private readonly ILogger> _logger; + + private CancellationTokenSource? _cancellationTokenSource = null; + private Task? _runningTask = null; + + public ContinuousProcessorHostedService( + IServiceProvider serviceProvider, + ILogger> logger) + { + _serviceProvider = serviceProvider; + _logger = logger; + } + + public async Task StartAsync(CancellationToken cancellationToken) + { + await StopInternalAsync(cancellationToken).ConfigureAwait(false); + + _logger.LogInformation($"ContinuousProcessorHostedService<{typeof(T).Name}>.StartAsync: Creating new CTS."); + var cancellationTokenSource = _cancellationTokenSource = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken); + + _logger.LogInformation($"ContinuousProcessorHostedService<{typeof(T).Name}>.StartAsync: Starting the running task via Task.Run."); + _runningTask = Task.Run(() => + { + var instance = _serviceProvider.GetRequiredService(); + _logger.LogInformation($"ContinuousProcessorHostedService<{typeof(T).Name}>.StartAsync: Calling ExecuteAsync inside Task.Run."); + return instance.ExecuteAsync(cancellationTokenSource.Token); + }, cancellationTokenSource.Token); + } + + private async Task StopInternalAsync(CancellationToken cancellationToken) + { + if (_cancellationTokenSource != null) + { + if (_runningTask != null) + { + _logger.LogInformation($"ContinuousProcessorHostedService<{typeof(T).Name}>.StopInternalAsync: Cancelling CTS."); + _cancellationTokenSource.Cancel(); + try + { + _logger.LogInformation($"ContinuousProcessorHostedService<{typeof(T).Name}>.StopInternalAsync: Awaiting the running task to allow it to gracefully stop..."); + await _runningTask.ConfigureAwait(false); + } + catch (OperationCanceledException) + { + _logger.LogInformation($"ContinuousProcessorHostedService<{typeof(T).Name}>.StopInternalAsync: Awaited task threw OperationCanceledException (this is normal)."); + } + finally + { + _logger.LogInformation($"ContinuousProcessorHostedService<{typeof(T).Name}>.StopInternalAsync: Clearing _runningTask to null."); + _runningTask = null; + } + } + _logger.LogInformation($"ContinuousProcessorHostedService<{typeof(T).Name}>.StopInternalAsync: Disposing CTS."); + _cancellationTokenSource.Dispose(); + _logger.LogInformation($"ContinuousProcessorHostedService<{typeof(T).Name}>.StopInternalAsync: Clearing CTS to null."); + _cancellationTokenSource = null; + } + } + + public async Task StopAsync(CancellationToken cancellationToken) + { + _logger.LogInformation($"ContinuousProcessorHostedService<{typeof(T).Name}>.StopAsync: Deferring to StopInternalAsync."); + await StopInternalAsync(cancellationToken).ConfigureAwait(false); + } + + public async ValueTask DisposeAsync() + { + _logger.LogInformation($"ContinuousProcessorHostedService<{typeof(T).Name}>.DisposeAsync: Deferring to StopAsync."); + await StopAsync(CancellationToken.None).ConfigureAwait(false); + } + } +} diff --git a/UET/Redpoint.CloudFramework/Processor/IContinuousProcessor.cs b/UET/Redpoint.CloudFramework/Processor/IContinuousProcessor.cs new file mode 100644 index 00000000..099024b0 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Processor/IContinuousProcessor.cs @@ -0,0 +1,13 @@ +namespace Redpoint.CloudFramework.Processor +{ + using Microsoft.Extensions.DependencyInjection; + using Microsoft.Extensions.Hosting; + using System.Threading.Tasks; + + public interface IContinuousProcessor + { + static abstract string RoleName { get; } + + Task ExecuteAsync(CancellationToken shutdownCancellationToken); + } +} diff --git a/UET/Redpoint.CloudFramework/Processor/IQuartzScheduledProcessorBinding.cs b/UET/Redpoint.CloudFramework/Processor/IQuartzScheduledProcessorBinding.cs new file mode 100644 index 00000000..3ed3405a --- /dev/null +++ b/UET/Redpoint.CloudFramework/Processor/IQuartzScheduledProcessorBinding.cs @@ -0,0 +1,9 @@ +namespace Redpoint.CloudFramework.Processor +{ + using Quartz; + + internal interface IQuartzScheduledProcessorBinding + { + void Bind(QuartzOptions options); + } +} diff --git a/UET/Redpoint.CloudFramework/Processor/IScheduledProcessor.cs b/UET/Redpoint.CloudFramework/Processor/IScheduledProcessor.cs new file mode 100644 index 00000000..c2b6cd5a --- /dev/null +++ b/UET/Redpoint.CloudFramework/Processor/IScheduledProcessor.cs @@ -0,0 +1,12 @@ +namespace Redpoint.CloudFramework.Processor +{ + using Quartz; + using System.Threading.Tasks; + + public interface IScheduledProcessor + { + static abstract string RoleName { get; } + + Task ExecuteAsync(IJobExecutionContext context); + } +} diff --git a/UET/Redpoint.CloudFramework/Processor/QuartzCloudFrameworkPostConfigureOptions.cs b/UET/Redpoint.CloudFramework/Processor/QuartzCloudFrameworkPostConfigureOptions.cs new file mode 100644 index 00000000..3c9266f4 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Processor/QuartzCloudFrameworkPostConfigureOptions.cs @@ -0,0 +1,24 @@ +namespace Redpoint.CloudFramework.Processor +{ + using Microsoft.Extensions.Options; + using Quartz; + + internal class QuartzCloudFrameworkPostConfigureOptions : IPostConfigureOptions + { + private readonly IEnumerable _bindings; + + public QuartzCloudFrameworkPostConfigureOptions( + IEnumerable bindings) + { + _bindings = bindings; + } + + public void PostConfigure(string? name, QuartzOptions options) + { + foreach (var binding in _bindings) + { + binding.Bind(options); + } + } + } +} diff --git a/UET/Redpoint.CloudFramework/Processor/QuartzJobMapping.cs b/UET/Redpoint.CloudFramework/Processor/QuartzJobMapping.cs new file mode 100644 index 00000000..63f6102a --- /dev/null +++ b/UET/Redpoint.CloudFramework/Processor/QuartzJobMapping.cs @@ -0,0 +1,22 @@ +namespace Redpoint.CloudFramework.Processor +{ + using Microsoft.Extensions.DependencyInjection; + using Quartz; + using System.Threading.Tasks; + + internal class QuartzJobMapping : IJob where TProcessor : IScheduledProcessor + { + private readonly IServiceProvider _serviceProvider; + + public QuartzJobMapping(IServiceProvider serviceProvider) + { + _serviceProvider = serviceProvider; + } + + public Task Execute(IJobExecutionContext context) + { + var instance = _serviceProvider.GetRequiredService(); + return instance.ExecuteAsync(context); + } + } +} diff --git a/UET/Redpoint.CloudFramework/Processor/QuartzScheduledProcessorBinding.cs b/UET/Redpoint.CloudFramework/Processor/QuartzScheduledProcessorBinding.cs new file mode 100644 index 00000000..bae66c3d --- /dev/null +++ b/UET/Redpoint.CloudFramework/Processor/QuartzScheduledProcessorBinding.cs @@ -0,0 +1,36 @@ +namespace Redpoint.CloudFramework.Processor +{ + using Quartz; + + internal class QuartzScheduledProcessorBinding : IQuartzScheduledProcessorBinding where T : IScheduledProcessor + { + private readonly string _datastoreIdentifier; + private readonly Action _triggerBuilder; + + public QuartzScheduledProcessorBinding( + string datastoreIdentifier, + Action triggerBuilder) + { + _datastoreIdentifier = datastoreIdentifier; + _triggerBuilder = triggerBuilder; + } + + public void Bind(QuartzOptions options) + { + options.AddJob>(configure => + { + configure.DisallowConcurrentExecution(true) + .PersistJobDataAfterExecution(false) + .RequestRecovery(false) + .StoreDurably(false) + .WithIdentity(_datastoreIdentifier); + }); + options.AddTrigger(configure => + { + _triggerBuilder(configure); + configure.ForJob(_datastoreIdentifier) + .WithIdentity(_datastoreIdentifier); + }); + } + } +} diff --git a/UET/Redpoint.CloudFramework/README.md b/UET/Redpoint.CloudFramework/README.md new file mode 100644 index 00000000..c1acd4cd --- /dev/null +++ b/UET/Redpoint.CloudFramework/README.md @@ -0,0 +1,9 @@ +# Redpoint.CloudFramework + +A framework for building ASP.NET Core applications on top of Google Cloud Firestore in Datastore mode. Not only does this framework provide a **model-based API for interacting with Google Cloud Firestore**, it contains useful implementations of things like **database migrations**, **distributed locks**, **geographic indexes** and **sharded counters**. + +This framework is provided as-is and has frequent API changes as we adapt it for our own needs. + +## License + +This software is licensed under the MIT license. There's absolutely no support for this software. \ No newline at end of file diff --git a/UET/Redpoint.CloudFramework/React/ApplicationBuilderExtensions.cs b/UET/Redpoint.CloudFramework/React/ApplicationBuilderExtensions.cs new file mode 100644 index 00000000..13a58e9f --- /dev/null +++ b/UET/Redpoint.CloudFramework/React/ApplicationBuilderExtensions.cs @@ -0,0 +1,47 @@ +namespace Redpoint.CloudFramework.React +{ + using global::React.AspNet; + using Microsoft.AspNetCore.Builder; + using Microsoft.Extensions.DependencyInjection; + using Newtonsoft.Json.Converters; + + public static class ApplicationBuilderExtensions + { + public static void UseReactAppWithOpenApi(this IApplicationBuilder app, bool enableReact18 = false) + { + app.UseSwagger(options => + { + options.RouteTemplate = "/api/{documentName}/openapi.json"; + }); + + app.UseSwaggerUI(options => + { + options.SwaggerEndpoint("/api/v1/openapi.json", "v1"); + options.InjectStylesheet("/css/swagger.css"); + }); + + app.UseReact(config => + { + config + .SetReuseJavaScriptEngines(true) + .SetReactAppBuildPath("~/dist"); + if (enableReact18) + { + config.EnableReact18RootAPI(); + } + + // Ensure React initialization uses the same enum encoding as System.Text.Json. + config.JsonSerializerSettings.Converters.Add(new StringEnumConverter()); + + // Do not perform camel-casing automatically. + config.JsonSerializerSettings.ContractResolver = null; + + var resourceFilter = app.ApplicationServices.GetService(); + if (resourceFilter != null) + { + config.FilterResource = x => resourceFilter.ShouldIncludeResource(x); + } + }); + } + } +} diff --git a/UET/Redpoint.CloudFramework/React/IWebpackResourceFilter.cs b/UET/Redpoint.CloudFramework/React/IWebpackResourceFilter.cs new file mode 100644 index 00000000..b9fbe178 --- /dev/null +++ b/UET/Redpoint.CloudFramework/React/IWebpackResourceFilter.cs @@ -0,0 +1,7 @@ +namespace Redpoint.CloudFramework.React +{ + public interface IWebpackResourceFilter + { + bool ShouldIncludeResource(string path); + } +} diff --git a/UET/Redpoint.CloudFramework/React/JsonHelperExtensions.cs b/UET/Redpoint.CloudFramework/React/JsonHelperExtensions.cs new file mode 100644 index 00000000..53ac81b8 --- /dev/null +++ b/UET/Redpoint.CloudFramework/React/JsonHelperExtensions.cs @@ -0,0 +1,23 @@ +namespace Redpoint.CloudFramework.React +{ + using Microsoft.AspNetCore.Mvc.Rendering; + using Newtonsoft.Json; + using System.IO; + using System.Text.Encodings.Web; + + public static class JsonHelperExtensions + { + public static object? SerializeForReact(this IJsonHelper Json, HtmlEncoder encoder, object input) + { + ArgumentNullException.ThrowIfNull(Json); + + string json; + using (var writer = new StringWriter()) + { + Json.Serialize(input).WriteTo(writer, encoder); + json = writer.ToString(); + } + return JsonConvert.DeserializeObject(json); + } + } +} diff --git a/UET/Redpoint.CloudFramework/React/ServiceCollectionExtensions.cs b/UET/Redpoint.CloudFramework/React/ServiceCollectionExtensions.cs new file mode 100644 index 00000000..0567d51f --- /dev/null +++ b/UET/Redpoint.CloudFramework/React/ServiceCollectionExtensions.cs @@ -0,0 +1,32 @@ +namespace Redpoint.CloudFramework.React +{ + using global::React.AspNet; + using JavaScriptEngineSwitcher.Extensions.MsDependencyInjection; + using JavaScriptEngineSwitcher.V8; + using Microsoft.Extensions.DependencyInjection; + using Microsoft.Extensions.Hosting; + using Redpoint.CloudFramework.OpenApi; + using System.Diagnostics; + + public static class ServiceCollectionExtensions + { + public static void AddReactAppWithOpenApi(this IServiceCollection services, IHostEnvironment hostEnvironment) + { + // Add React services. + services.AddJsEngineSwitcher(options => options.DefaultEngineName = V8JsEngine.EngineName) + .AddV8(); + services.AddReact(); + + services.AddSwaggerGenForReactApp(); + services.AddWebpackDevWatchForReactAppInDevelopment(hostEnvironment); + } + + public static void AddWebpackDevWatchForReactAppInDevelopment(this IServiceCollection services, IHostEnvironment hostEnvironment) + { + if (hostEnvironment.IsDevelopment() && Debugger.IsAttached) + { + services.AddSingleton(); + } + } + } +} diff --git a/UET/Redpoint.CloudFramework/React/WebpackDevWatchHostedService.cs b/UET/Redpoint.CloudFramework/React/WebpackDevWatchHostedService.cs new file mode 100644 index 00000000..eb813120 --- /dev/null +++ b/UET/Redpoint.CloudFramework/React/WebpackDevWatchHostedService.cs @@ -0,0 +1,129 @@ +namespace Redpoint.CloudFramework.React +{ + using Microsoft.AspNetCore.Hosting; + using Microsoft.Extensions.Hosting; + using Microsoft.Extensions.Logging; + using System; + using System.Diagnostics; + using System.IO; + using System.Threading; + using System.Threading.Tasks; + + /// + /// This hosted service runs "webpack --watch --mode development" while + /// the app is being run under a debugger in development. This is because + /// the built-in ASP.NET Core functionality surrounding webpack is all designed + /// around SPAs and proxying requests, but we really just need to update the + /// built content on disk and then get the web browser to reload the page. + /// + public sealed class WebpackDevWatchHostedService : IHostedService, IDisposable + { + private readonly IWebHostEnvironment _webHostEnvironment; + private readonly ILogger _logger; + + private Process? _webpack; + private bool _expectExit; + + public WebpackDevWatchHostedService( + IWebHostEnvironment webHostEnvironment, + ILogger logger) + { + _webHostEnvironment = webHostEnvironment; + _logger = logger; + _expectExit = false; + } + + public Task StartAsync(CancellationToken cancellationToken) + { + var startInfo = new ProcessStartInfo + { + UseShellExecute = false, + FileName = Path.Combine(_webHostEnvironment.ContentRootPath, "ClientApp", "node_modules", ".bin", "webpack.cmd"), + ArgumentList = + { + "--mode", + "development", + "--watch" + }, + WorkingDirectory = Path.Combine(_webHostEnvironment.ContentRootPath, "ClientApp"), + RedirectStandardOutput = true, + RedirectStandardError = true, + CreateNoWindow = true, + }; + if (File.Exists(Path.Combine(_webHostEnvironment.ContentRootPath, "ClientApp", "tsconfig.webpack.json"))) + { + startInfo.EnvironmentVariables.Add("TS_NODE_PROJECT", "tsconfig.webpack.json"); + } + + _webpack = new Process(); + _webpack.StartInfo = startInfo; + _webpack.Exited += OnWebpackExited; + _webpack.OutputDataReceived += OnWebpackOutput; + _webpack.ErrorDataReceived += OnWebpackError; + _webpack.EnableRaisingEvents = true; + _webpack.Start(); + _webpack.BeginOutputReadLine(); + _webpack.BeginErrorReadLine(); + + return Task.CompletedTask; + } + + private void OnWebpackError(object sender, DataReceivedEventArgs e) + { + if (!string.IsNullOrWhiteSpace(e.Data)) + { + _logger.LogInformation(e.Data.Trim()); + } + } + + private void OnWebpackOutput(object sender, DataReceivedEventArgs e) + { + if (!string.IsNullOrWhiteSpace(e.Data)) + { + _logger.LogInformation(e.Data.Trim()); + } + } + + private async void OnWebpackExited(object? sender, EventArgs e) + { + if (_expectExit) + { + return; + } + + _logger.LogInformation("webpack --watch exited unexpectedly, restarting in 1000ms."); + _webpack = null; + await Task.Delay(1000).ConfigureAwait(false); + if (_expectExit) + { + return; + } + + await StartAsync(CancellationToken.None).ConfigureAwait(false); + } + + public Task StopAsync(CancellationToken cancellationToken) + { + _expectExit = true; + if (_webpack != null) + { + _webpack.Kill(); + _webpack.Dispose(); + _webpack = null; + } + + return Task.CompletedTask; + } + + public void Dispose() + { + _expectExit = true; + if (_webpack != null) + { + _webpack.Kill(); + _webpack.Dispose(); + _webpack = null; + } + } + } +} diff --git a/UET/Redpoint.CloudFramework/Redpoint.CloudFramework.csproj b/UET/Redpoint.CloudFramework/Redpoint.CloudFramework.csproj new file mode 100644 index 00000000..b2440988 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Redpoint.CloudFramework.csproj @@ -0,0 +1,57 @@ + + + + + + + A framework for building ASP.NET Core applications on top of Google Cloud Firestore in Datastore mode. Not only does this framework provide a model-based API for interacting with Google Cloud Firestore, it contains useful implementations of things like database migrations, distributed locks, geographic indexes and sharded counters. + cloud, datastore, asp.net, asp.net core, framework, distributed counters, sharded counters, geographic queries + + + + false + + + + $(NoWarn);1591 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + RDCommandLine + + + + + + + + + + + + diff --git a/UET/Redpoint.CloudFramework/Repository/Contention/DefaultDatastoreContentionRetry.cs b/UET/Redpoint.CloudFramework/Repository/Contention/DefaultDatastoreContentionRetry.cs new file mode 100644 index 00000000..866495b1 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Contention/DefaultDatastoreContentionRetry.cs @@ -0,0 +1,43 @@ +namespace Redpoint.CloudFramework.Repository.Contention +{ + using Grpc.Core; + using System; + using System.Threading.Tasks; + + internal class DefaultDatastoreContentionRetry : IDatastoreContentionRetry + { + public async Task RunWithContentionRetryAsync( + Func logic, + CancellationToken cancellationToken) + { + retry: + cancellationToken.ThrowIfCancellationRequested(); + try + { + await logic().ConfigureAwait(false); + } + catch (RpcException ex) when (ex.IsContentionException()) + { + await Task.Delay(1000, cancellationToken).ConfigureAwait(false); + goto retry; + } + } + + public async Task RunWithContentionRetryAsync( + Func> logic, + CancellationToken cancellationToken) + { + retry: + cancellationToken.ThrowIfCancellationRequested(); + try + { + return await logic().ConfigureAwait(false); + } + catch (RpcException ex) when (ex.IsContentionException()) + { + await Task.Delay(1000, cancellationToken).ConfigureAwait(false); + goto retry; + } + } + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Contention/IDatastoreContentionRetry.cs b/UET/Redpoint.CloudFramework/Repository/Contention/IDatastoreContentionRetry.cs new file mode 100644 index 00000000..a618801b --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Contention/IDatastoreContentionRetry.cs @@ -0,0 +1,16 @@ +namespace Redpoint.CloudFramework.Repository.Contention +{ + using System; + using System.Threading.Tasks; + + public interface IDatastoreContentionRetry + { + Task RunWithContentionRetryAsync( + Func logic, + CancellationToken cancellationToken); + + Task RunWithContentionRetryAsync( + Func> logic, + CancellationToken cancellationToken); + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Converters/Expression/DefaultExpressionConverter.cs b/UET/Redpoint.CloudFramework/Repository/Converters/Expression/DefaultExpressionConverter.cs new file mode 100644 index 00000000..4d49cfa0 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Converters/Expression/DefaultExpressionConverter.cs @@ -0,0 +1,426 @@ +namespace Redpoint.CloudFramework.Repository.Converters.Expression +{ + using Google.Cloud.Datastore.V1; + using Google.Type; + using Redpoint.CloudFramework.Models; + using Redpoint.CloudFramework.Repository.Converters.Timestamp; + using Redpoint.StringEnum; + using System; + using System.Collections.Generic; + using System.Linq; + using System.Linq.Expressions; + using System.Reflection; + + // @note: This implementation has to be completely reworked, since it doesn't use IValueConverters yet. + + internal class DefaultExpressionConverter : IExpressionConverter + { + private readonly IInstantTimestampConverter _instantTimestampConverter; + private readonly Dictionary _valueConverters; + + public DefaultExpressionConverter( + IInstantTimestampConverter instantTimestampConverter) + { + _instantTimestampConverter = instantTimestampConverter; + + _valueConverters = new Dictionary(); + foreach (var converter in typeof(Value).GetMethods(BindingFlags.Public | BindingFlags.Static).Where(x => x.Name == "op_Implicit" && x.GetParameters().Length == 1 && x.ReturnType == typeof(Value))) + { + _valueConverters.Add(converter.GetParameters()[0].ParameterType, converter); + } + } + + private string GetFieldReferencedInExpression(Expression expression, ParameterExpression modelExpression, T referenceModel) where T : Model + { + if (expression.NodeType == ExpressionType.MemberAccess) + { + var access = (MemberExpression)expression; + if (access.Expression != modelExpression) + { + // Support for embedded entity queries. + if (access.Expression!.NodeType == ExpressionType.Call) + { + var callAccess = (MethodCallExpression)access.Expression; + if (callAccess.Method == typeof(Entity).GetMethod("get_Item", BindingFlags.Public | BindingFlags.Instance)) + { + // This is sub-access on an entity. + var subpropertyName = Expression.Lambda>(callAccess.Arguments[0]).Compile()(); + return GetFieldReferencedInExpression( + callAccess.Object!, + modelExpression, + referenceModel) + "." + subpropertyName; + } + } + + throw new InvalidOperationException($"Expression must be a member access operation, and the expression that the member access is being performed on must be the model parameter expression. It was a '{access.Expression.NodeType}' type expression instead."); + } + + if (access.Member.Name == nameof(Model.Key)) + { + throw new InvalidOperationException($"The 'Key' property can only have the 'HasAncestor' extension method called on it; it can not be used in a comparison."); + } + else + { + if (access.Member.Name == nameof(Model.dateCreatedUtc) || + access.Member.Name == nameof(Model.dateModifiedUtc) || + (referenceModel.GetIndexes().Contains(access.Member.Name) && + referenceModel.GetTypes().ContainsKey(access.Member.Name))) + { + return access.Member.Name; + } + + if (access.Member.Name == nameof(Model.schemaVersion)) + { + return nameof(Model.schemaVersion); + } + } + + throw new InvalidOperationException($"Expression must be a member access operation, and the member being access must be an indexed field or the 'Key' property. It was '{access.Member.Name}' instead, which is not an indexed field (as per GetIndexes())."); + } + else + { + throw new InvalidOperationException($"Expression must be a member access operation (like 'x.field'). It was an '{expression.NodeType}' expression instead."); + } + } + + private Value? EvaluateExpressionRHSToValue(Expression toEvaluate) + { + var valueRaw = Expression.Lambda>(Expression.Convert(toEvaluate, typeof(object))).Compile()(); + + if (valueRaw == null) + { + return null; + } + + var valueType = valueRaw.GetType(); + + if (valueType == typeof(NodaTime.Instant)) + { + return _instantTimestampConverter.FromNodaTimeInstantToDatastoreValue((NodaTime.Instant)valueRaw, false); + } + else if (valueType == typeof(ulong)) + { + return unchecked((long)(ulong)valueRaw); + } + else if (valueType == typeof(ulong?)) + { + return unchecked((long?)(ulong?)valueRaw); + } + else if (valueType.IsConstructedGenericType && + valueType.GetGenericTypeDefinition() == typeof(StringEnumValue<>)) + { + return new Value + { + // We know ToString() for StringEnumValue<> gives us the enumeration value + // for Datastore. + StringValue = valueRaw.ToString(), + }; + } + else if (_valueConverters.TryGetValue(valueType, out MethodInfo? converterMethod)) + { + return (Value)converterMethod.Invoke(null, new object[] { valueRaw })!; + } + else + { + throw new InvalidOperationException($"The RHS expression '{toEvaluate}' evaluates to a value with a type of '{valueRaw.GetType()}', which can not be converted into a Datastore value."); + } + } + + public Filter? ConvertExpressionToFilter(Expression expression, ParameterExpression modelExpression, T referenceModel, ref GeoQueryParameters? geoParameters, ref bool hasAncestorQuery) where T : Model + { + if (expression.NodeType == ExpressionType.Constant && ((ConstantExpression)expression).Type == typeof(bool) && (bool)((ConstantExpression)expression).Value! == true) + { + // Match everything. + return null; + } + else if (expression.NodeType == ExpressionType.AndAlso) + { + // We check for nulls here, since geopoint filters will return null (there's no direct mapping of a + // geopoint filter onto the Filter class). + var binaryExpression = (BinaryExpression)expression; + var lhs = ConvertExpressionToFilter(binaryExpression.Left, modelExpression, referenceModel, ref geoParameters, ref hasAncestorQuery); + var rhs = ConvertExpressionToFilter(binaryExpression.Right, modelExpression, referenceModel, ref geoParameters, ref hasAncestorQuery); + if (lhs == null && rhs == null) + { + throw new ArgumentNullException(nameof(expression), "Expected at least one side of an && expression to have a non-geo filter."); + } + if (lhs == null) return rhs; + if (rhs == null) return lhs; + return Filter.And(lhs, rhs); + } + else if (expression.NodeType == ExpressionType.Call) + { + var callExpression = (MethodCallExpression)expression; + + if (callExpression.Object != null) + { + // Not an extension method call. + throw new InvalidOperationException($"You can only call supported extension methods on specific field types in queries; invoke unsupported '{callExpression.Method.Name}' method."); + } + + var targetExpression = callExpression.Arguments[0]; + var parentExpression = callExpression.Arguments[1]; + + if (targetExpression.NodeType != ExpressionType.MemberAccess) + { + throw new InvalidOperationException($"The only supported method call expressions must be made directly on a model's property or Key. Attempted to invoke a method call on expression with type '{targetExpression.NodeType}'."); + } + + var targetMemberAccess = (MemberExpression)targetExpression; + if (targetMemberAccess.Expression != modelExpression || + !(targetMemberAccess.Member is PropertyInfo)) + { + throw new InvalidOperationException($"The only supported method call expressions must be made directly on a model's property or Key. Attempted to use member access on base expression of type '{targetMemberAccess.Expression?.NodeType}'."); + } + + var propertyInfo = (PropertyInfo)targetMemberAccess.Member; + + if (callExpression.Method == typeof(RepositoryExtensions).GetMethod(nameof(RepositoryExtensions.HasAncestor), BindingFlags.Static | BindingFlags.Public) && + callExpression.Arguments.Count == 2) + { + if (propertyInfo.Name != nameof(Model.Key)) + { + throw new InvalidOperationException($"You can only use 'HasAncestor' on the primary Key and not key properties. Attempted to use member access on property named '{propertyInfo.Name}'."); + } + + hasAncestorQuery = true; + return Filter.HasAncestor(EvaluateExpressionRHSToValue(parentExpression)?.KeyValue); + } + else if ( + callExpression.Method == typeof(GeoExtensions).GetMethod(nameof(GeoExtensions.WithinKilometers), BindingFlags.Static | BindingFlags.Public) && + callExpression.Arguments.Count == 3) + { + if (propertyInfo.PropertyType != typeof(LatLng)) + { + throw new InvalidOperationException($"You can only use 'WithinKilometers' on geopoint properties. Attempted to use member access on property named '{propertyInfo.Name}'."); + } + + if (geoParameters != null) + { + throw new InvalidOperationException($"You can only use a single geopoint filter in a query (you can only call 'WithinKilometers' once in the expression)."); + } + + var centerLatLng = Expression.Lambda>(callExpression.Arguments[1]).Compile()(); + var distanceKilometers = Expression.Lambda>(callExpression.Arguments[2]).Compile()(); + var serverSideFilter = Expression.Lambda>(callExpression, modelExpression).Compile(); + var serverSideAccessor = Expression.Lambda>(targetExpression, modelExpression).Compile(); + + geoParameters = new GeoQueryParameters + { + GeoFieldName = propertyInfo.Name, + CenterPoint = centerLatLng, + MinPoint = GeoExtensions.GetRectangularMinPoint(centerLatLng, distanceKilometers), + MaxPoint = GeoExtensions.GetRectangularMaxPoint(centerLatLng, distanceKilometers), + ServerSideFilter = serverSideFilter, + ServerSideAccessor = serverSideAccessor, + DistanceKm = distanceKilometers, + }; + return null; + } + else if (callExpression.Method == typeof(RepositoryExtensions).GetMethod(nameof(RepositoryExtensions.IsAnyString), BindingFlags.Static | BindingFlags.Public) && + callExpression.Arguments.Count == 2) + { + var targetValue = Expression.Lambda>(callExpression.Arguments[1]).Compile()(); + return Filter.Equal(propertyInfo.Name, targetValue); + } + else + { + throw new InvalidOperationException($"The only supported method call expressions are calling 'HasAncestor' on the primary Key and calling 'WithinKilometers' on geopoint properties. Attempted to invoke unsupported '{callExpression.Method.Name}' method."); + } + } + else if ( + expression.NodeType == ExpressionType.Equal || + expression.NodeType == ExpressionType.LessThan || + expression.NodeType == ExpressionType.LessThanOrEqual || + expression.NodeType == ExpressionType.GreaterThan || + expression.NodeType == ExpressionType.GreaterThanOrEqual) + { + var binaryExpression = (BinaryExpression)expression; + var field = GetFieldReferencedInExpression(binaryExpression.Left, modelExpression, referenceModel); + var value = EvaluateExpressionRHSToValue(binaryExpression.Right); + + switch (expression.NodeType) + { + case ExpressionType.Equal: + return Filter.Property(field, value, PropertyFilter.Types.Operator.Equal); + case ExpressionType.GreaterThan: + return Filter.Property(field, value, PropertyFilter.Types.Operator.GreaterThan); + case ExpressionType.GreaterThanOrEqual: + return Filter.Property(field, value, PropertyFilter.Types.Operator.GreaterThanOrEqual); + case ExpressionType.LessThan: + return Filter.Property(field, value, PropertyFilter.Types.Operator.LessThan); + case ExpressionType.LessThanOrEqual: + return Filter.Property(field, value, PropertyFilter.Types.Operator.LessThanOrEqual); + } + } + else if (expression.NodeType == ExpressionType.MemberAccess && + ((MemberExpression)expression).Member.MemberType == MemberTypes.Property && + (((PropertyInfo)((MemberExpression)expression).Member).PropertyType == typeof(bool) || + ((PropertyInfo)((MemberExpression)expression).Member).PropertyType == typeof(bool?))) + { + // Same as memberAccess == true. + var field = GetFieldReferencedInExpression(expression, modelExpression, referenceModel); + return Filter.Property(field, true, PropertyFilter.Types.Operator.Equal); + } + else if (expression.NodeType == ExpressionType.Not && + ((UnaryExpression)expression).Operand.NodeType == ExpressionType.MemberAccess && + ((MemberExpression)((UnaryExpression)expression).Operand).Member.MemberType == + MemberTypes.Property && + (((PropertyInfo)((MemberExpression)((UnaryExpression)expression).Operand).Member).PropertyType == + typeof(bool) || + ((PropertyInfo)((MemberExpression)((UnaryExpression)expression).Operand).Member).PropertyType == + typeof(bool?))) + { + // Same as memberAccess == false. + var field = GetFieldReferencedInExpression((MemberExpression)((UnaryExpression)expression).Operand, modelExpression, referenceModel); + return Filter.Property(field, false, PropertyFilter.Types.Operator.Equal); + } + + throw new InvalidOperationException($"Expression of type '{expression.NodeType}' is not supported in QueryAsync calls."); + } + + public IEnumerable? ConvertExpressionToOrder(Expression expression, ParameterExpression modelExpression, T referenceModel, ref GeoQueryParameters? geoParameters) where T : Model + { + if (expression.NodeType == ExpressionType.Or) + { + if (geoParameters != null) + { + throw new InvalidOperationException("Geographic queries can only sort by the geographic field at the top level; you can not order by multiple properties in a geographic query."); + } + + var binaryExpression = (BinaryExpression)expression; + return + ConvertExpressionToOrder(binaryExpression.Left, modelExpression, referenceModel, ref geoParameters)!.Concat( + ConvertExpressionToOrder(binaryExpression.Right, modelExpression, referenceModel, ref geoParameters)!); + } + else if (expression.NodeType == ExpressionType.Call) + { + var callExpression = (MethodCallExpression)expression; + + if (callExpression.Object != null || + callExpression.Arguments.Count != 1) + { + // Not an extension method call. + throw new InvalidOperationException($"You can only call supported extension methods on specific field types in queries; invoke unsupported '{callExpression.Method.Name}' method."); + } + + var targetExpression = callExpression.Arguments[0]; + + if (targetExpression.NodeType != ExpressionType.MemberAccess) + { + throw new InvalidOperationException($"The only supported method call expressions must be made directly on a model's property. Attempted to invoke a method call on expression with type '{targetExpression.NodeType}'."); + } + + var targetMemberAccess = (MemberExpression)targetExpression; + if (targetMemberAccess.Expression != modelExpression || + !(targetMemberAccess.Member is PropertyInfo)) + { + throw new InvalidOperationException($"The only supported method call expressions must be made directly on a model's property. Attempted to use member access on base expression of type '{targetMemberAccess.Expression?.NodeType}'."); + } + + var propertyInfo = (PropertyInfo)targetMemberAccess.Member; + + if (callExpression.Method == typeof(GeoExtensions).GetMethod(nameof(GeoExtensions.Nearest), BindingFlags.Static | BindingFlags.Public) || + callExpression.Method == typeof(GeoExtensions).GetMethod(nameof(GeoExtensions.Furthest), BindingFlags.Static | BindingFlags.Public)) + { + if (propertyInfo.PropertyType != typeof(LatLng)) + { + throw new InvalidOperationException($"You can only use 'Nearest' and 'Furthest' on geopoint properties in sort expressions. Attempted to use member access on property named '{propertyInfo.Name}'."); + } + + if (propertyInfo.Name != geoParameters?.GeoFieldName) + { + throw new InvalidOperationException($"You can only sort by geographic properties if you are also filtering on them with 'WithinKilometers'."); + } + else if (geoParameters.SortDirection.HasValue) + { + throw new InvalidOperationException($"You can only specify a geographic field once in a sort expression."); + } + + geoParameters.SortDirection = callExpression.Method == typeof(GeoExtensions).GetMethod(nameof(GeoExtensions.Nearest), BindingFlags.Static | BindingFlags.Public) ? PropertyOrder.Types.Direction.Ascending : PropertyOrder.Types.Direction.Descending; + return null; + } + else + { + throw new InvalidOperationException($"The only supported method call expressions are calling 'Nearest' or 'Furthest' on geopoint properties in sort expressions. Attempted to invoke unsupported '{callExpression.Method.Name}' method."); + } + } + else if (expression.NodeType == ExpressionType.LessThan || + expression.NodeType == ExpressionType.GreaterThan) + { + var leftField = GetFieldReferencedInExpression(((BinaryExpression)expression).Left, modelExpression, referenceModel); + var rightField = GetFieldReferencedInExpression(((BinaryExpression)expression).Right, modelExpression, referenceModel); + if (leftField != rightField) + { + throw new InvalidOperationException($"Individual order expressions must be of the form 'x.prop > x.prop' or 'x.prop < x.prop', and the property name must be the same. The left field was '{leftField}' and the right field was '{rightField}'."); + } + + return new PropertyOrder[1] + { + new PropertyOrder + { + Property = new PropertyReference(leftField), + Direction = expression.NodeType == ExpressionType.LessThan ? PropertyOrder.Types.Direction.Ascending : PropertyOrder.Types.Direction.Descending, + } + }; + } + else + { + throw new InvalidOperationException($"Overall order expressions must be of the form 'x.prop > x.prop | x.prop2 < x.prop2'. The expression type was '{expression.NodeType}'."); + } + } + + private class SimplifyFilterStackValue + { + public CompositeFilter _filter; + public int _index; + public SimplifyFilterStackValue(CompositeFilter filter, int index) + { + _filter = filter; + _index = index; + } + } + + public Filter? SimplifyFilter(Filter? filter) + { + if (filter == null) + { + return null; + } + + if (filter.FilterTypeCase == Filter.FilterTypeOneofCase.PropertyFilter) + { + return filter; + } + + // Otherwise, recursively expand all of the composite filters so we can have + // a single top-level composite filter. + var newFilters = new List(); + var filterStack = new Stack(); + filterStack.Push(new SimplifyFilterStackValue(filter.CompositeFilter, 0)); + while (filterStack.Count > 0) + { + var stackValue = filterStack.Peek(); + if (stackValue._index >= stackValue._filter.Filters.Count) + { + filterStack.Pop(); + continue; + } + + var nextFilter = stackValue._filter.Filters[stackValue._index]; + stackValue._index++; + + if (nextFilter.FilterTypeCase == Filter.FilterTypeOneofCase.PropertyFilter) + { + newFilters.Add(nextFilter); + } + else + { + filterStack.Push(new SimplifyFilterStackValue(nextFilter.CompositeFilter, 0)); + } + } + + return Filter.And(newFilters); + } + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Converters/Expression/GeoQueryParameters.cs b/UET/Redpoint.CloudFramework/Repository/Converters/Expression/GeoQueryParameters.cs new file mode 100644 index 00000000..56c7b239 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Converters/Expression/GeoQueryParameters.cs @@ -0,0 +1,19 @@ +namespace Redpoint.CloudFramework.Repository.Converters.Expression +{ + using Google.Cloud.Datastore.V1; + using Google.Type; + using Redpoint.CloudFramework.Models; + using System; + + internal class GeoQueryParameters where T : Model + { + public required string GeoFieldName { get; set; } + public required LatLng MinPoint { get; set; } + public required LatLng MaxPoint { get; set; } + public required LatLng CenterPoint { get; set; } + public required Func ServerSideFilter { get; set; } + public required Func ServerSideAccessor { get; set; } + public required float DistanceKm { get; set; } + public PropertyOrder.Types.Direction? SortDirection { get; set; } + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Converters/Expression/IExpressionConverter.cs b/UET/Redpoint.CloudFramework/Repository/Converters/Expression/IExpressionConverter.cs new file mode 100644 index 00000000..24181e1f --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Converters/Expression/IExpressionConverter.cs @@ -0,0 +1,16 @@ +namespace Redpoint.CloudFramework.Repository.Converters.Expression +{ + using Google.Cloud.Datastore.V1; + using Redpoint.CloudFramework.Models; + using System.Collections.Generic; + using System.Linq.Expressions; + + internal interface IExpressionConverter + { + Filter? ConvertExpressionToFilter(Expression expression, ParameterExpression modelExpression, T referenceModel, ref GeoQueryParameters? geoParameters, ref bool hasAncestorQuery) where T : Model; + + IEnumerable? ConvertExpressionToOrder(Expression expression, ParameterExpression modelExpression, T referenceModel, ref GeoQueryParameters? geoParameters) where T : Model; + + Filter? SimplifyFilter(Filter? filter); + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Converters/JsonHelpers/NamedEnumConverter.cs b/UET/Redpoint.CloudFramework/Repository/Converters/JsonHelpers/NamedEnumConverter.cs new file mode 100644 index 00000000..a28bce9b --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Converters/JsonHelpers/NamedEnumConverter.cs @@ -0,0 +1,93 @@ +namespace Redpoint.CloudFramework.Repository.Converters.JsonHelpers +{ + using Newtonsoft.Json; + using Redpoint.CloudFramework.Infrastructure; + using System; + using System.Reflection; + + internal class NamedEnumConverter : JsonConverter + { + public override bool CanConvert(Type objectType) + { + var t = (objectType.IsValueType && objectType.IsGenericType && objectType.GetGenericTypeDefinition() == typeof(Nullable<>)) + ? Nullable.GetUnderlyingType(objectType)! + : objectType; + + return t.IsEnum; + } + + public override object? ReadJson(JsonReader reader, Type objectType, object? existingValue, JsonSerializer serializer) + { + if (reader.TokenType == JsonToken.Null) + { + if (!(objectType.IsValueType && objectType.IsGenericType && objectType.GetGenericTypeDefinition() == typeof(Nullable<>))) + { + throw new JsonSerializationException("Cannot convert null value to " + objectType.Name); + } + + return null; + } + + var enumType = (objectType.IsValueType && objectType.IsGenericType && objectType.GetGenericTypeDefinition() == typeof(Nullable<>)) + ? Nullable.GetUnderlyingType(objectType)! + : objectType; + + var enumTypeNamedAttribute = enumType.GetCustomAttributes(typeof(INamedEnum)).Cast().FirstOrDefault(); + if (enumTypeNamedAttribute == null || enumTypeNamedAttribute.EnumType != enumType) + { + throw new JsonSerializationException($"{enumType.FullName} is missing the [NamedEnum<{enumType.Name}>] attribute, or its type parameter is incorrect."); + } + + if (reader.TokenType == JsonToken.String) + { + var memberInfos = enumTypeNamedAttribute.EnumType.GetFields(BindingFlags.Public | BindingFlags.Static); + foreach (var memberInfo in memberInfos) + { + var attributes = memberInfo.GetCustomAttributes(typeof(NamedEnumValueAttribute), false); + var namedValue = ((NamedEnumValueAttribute)attributes[0]).Name; + + if (namedValue == reader.Value!.ToString()) + { + return Enum.Parse(enumTypeNamedAttribute.EnumType, memberInfo.Name); + } + } + + throw new JsonSerializationException("Unable to find mapped value for '" + reader.Value!.ToString() + "'."); + } + + throw new JsonSerializationException("Unexpected token when parsing enum."); + } + + public override void WriteJson(JsonWriter writer, object? value, JsonSerializer serializer) + { + if (value == null) + { + writer.WriteNull(); + return; + } + + var objectType = value.GetType(); + var enumType = (objectType.IsValueType && objectType.IsGenericType && objectType.GetGenericTypeDefinition() == typeof(Nullable<>)) + ? Nullable.GetUnderlyingType(objectType) + : objectType; + if (enumType == null) + { + writer.WriteNull(); + return; + } + + var fieldName = Enum.GetName(enumType, value); + if (fieldName == null) + { + writer.WriteNull(); + return; + } + + var memberInfo = enumType.GetMember(fieldName); + var attributes = memberInfo[0].GetCustomAttributes(typeof(NamedEnumValueAttribute), false); + var namedValue = ((NamedEnumValueAttribute)attributes[0]).Name; + + writer.WriteValue(namedValue); + } + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Converters/JsonHelpers/NodaTimeInstantJsonConverter.cs b/UET/Redpoint.CloudFramework/Repository/Converters/JsonHelpers/NodaTimeInstantJsonConverter.cs new file mode 100644 index 00000000..d3838b4c --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Converters/JsonHelpers/NodaTimeInstantJsonConverter.cs @@ -0,0 +1,45 @@ +namespace Redpoint.CloudFramework.Repository.Converters.JsonHelpers +{ + using Newtonsoft.Json; + using Newtonsoft.Json.Linq; + using NodaTime; + using System; + + internal class NodaTimeInstantJsonConverter : JsonConverter + { + public override bool CanConvert(Type objectType) + { + return objectType == typeof(Instant); + } + + public override object? ReadJson(JsonReader reader, Type objectType, object? existingValue, JsonSerializer serializer) + { + var obj = JToken.ReadFrom(reader); + + if (obj == null || obj?.Type == JTokenType.Null) + { + return null; + } + + return Instant.FromUnixTimeSeconds(obj!["seconds"]?.Value() ?? 0).PlusNanoseconds(obj!["nanos"]?.Value() ?? 0); + } + + public override void WriteJson(JsonWriter writer, object? value, JsonSerializer serializer) + { + var instant = (Instant?)value; + + if (instant == null) + { + JValue.CreateNull().WriteTo(writer); + return; + } + + var seconds = instant.Value.ToUnixTimeSeconds(); + var nanos = (instant.Value - Instant.FromUnixTimeSeconds(instant.Value.ToUnixTimeSeconds())).SubsecondNanoseconds; + var obj = new JObject(); + obj["seconds"] = (long)seconds; + obj["nanos"] = (long)nanos; + obj.WriteTo(writer); + } + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Converters/JsonHelpers/VersionedJsonConverter.cs b/UET/Redpoint.CloudFramework/Repository/Converters/JsonHelpers/VersionedJsonConverter.cs new file mode 100644 index 00000000..f2cdc72a --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Converters/JsonHelpers/VersionedJsonConverter.cs @@ -0,0 +1,89 @@ +namespace Redpoint.CloudFramework.Repository.Converters.JsonHelpers +{ + using Newtonsoft.Json; + using Newtonsoft.Json.Linq; + using Redpoint.CloudFramework.Models; + using System; + using System.Linq; + using System.Reflection; + + internal class VersionedJsonConverter : JsonConverter + { + public override bool CanConvert(Type objectType) + { + // Check if the type has a SchemaVersion attribute. + var schemaVersionAttributes = objectType.GetCustomAttributes(typeof(SchemaVersionAttribute), false).OfType().ToArray(); + if (schemaVersionAttributes.Length == 0) + { + return false; + } + else + { + // Check that the code has a deserializer for every version. + var schemaVersion = schemaVersionAttributes[0].SchemaVersion; + for (var version = 1u; version < schemaVersion; version++) + { + if (objectType.GetMethod("DeserializeFromVersion" + version, BindingFlags.Public | BindingFlags.Static) == null) + { + return false; + } + } + + return true; + } + } + + public override object? ReadJson(JsonReader reader, Type objectType, object? existingValue, JsonSerializer serializer) + { + var obj = JToken.ReadFrom(reader); + + var codeVersion = objectType.GetCustomAttributes(typeof(SchemaVersionAttribute), false).OfType().First().SchemaVersion; + + if (obj.Type != JTokenType.Object) + { + return obj.ToObject(objectType); + } + + var schemaVersion = obj["$rcf$schemaVersion"]; + if (schemaVersion == null) + { + return obj.ToObject(objectType); + } + + var dataVersion = schemaVersion.ToObject(); + + if (codeVersion == dataVersion) + { + return obj.ToObject(objectType); + } + + // Schema version doesn't match, ask to code to deserialize from older version. + var deserializer = objectType.GetMethod("DeserializeFromVersion" + dataVersion, BindingFlags.Public | BindingFlags.Static)!; + return deserializer.Invoke(null, new[] { obj.ToString() }); + } + + public override void WriteJson(JsonWriter writer, object? value, JsonSerializer serializer) + { + if (value == null) + { + writer.WriteNull(); + return; + } + + var codeVersion = value.GetType().GetCustomAttributes(typeof(SchemaVersionAttribute), false).OfType().First().SchemaVersion; + + var token = JToken.FromObject(value); + + if (token.Type != JTokenType.Object) + { + token.WriteTo(writer); + } + else + { + var obj = (JObject)token; + obj.AddFirst(new JProperty("$rcf$schemaVersion", codeVersion)); + obj.WriteTo(writer); + } + } + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Converters/Model/EntityModelConverter.cs b/UET/Redpoint.CloudFramework/Repository/Converters/Model/EntityModelConverter.cs new file mode 100644 index 00000000..6eea847e --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Converters/Model/EntityModelConverter.cs @@ -0,0 +1,201 @@ +namespace Redpoint.CloudFramework.Repository.Converters.Model +{ + using Google.Cloud.Datastore.V1; + using Microsoft.Extensions.Logging; + using NodaTime; + using Redpoint.CloudFramework.Models; + using Redpoint.CloudFramework.Repository.Converters.Timestamp; + using Redpoint.CloudFramework.Repository.Converters.Value; + using Redpoint.CloudFramework.Repository.Converters.Value.Context; + using System; + using System.Collections.Generic; + using System.Reflection; + using Value = Google.Cloud.Datastore.V1.Value; + + internal class EntityModelConverter : IModelConverter + { + private readonly ILogger _logger; + private readonly IInstantTimestampConverter _instantTimestampConversion; + private readonly IValueConverterProvider _valueConverterProvider; + + public EntityModelConverter( + ILogger logger, + IInstantTimestampConverter instantTimestampConversion, + IValueConverterProvider valueConverterProvider) + { + _logger = logger; + _instantTimestampConversion = instantTimestampConversion; + _valueConverterProvider = valueConverterProvider; + } + + public T From(string @namespace, Entity entity) where T : Model, new() + { + var @ref = new T(); + @ref._originalData = new Dictionary(); + + var delayedLoads = new List>(); + + var conversionContext = new DatastoreValueConvertFromContext + { + ModelNamespace = @namespace, + }; + + var defaults = @ref.GetDefaultValues(); + var types = @ref.GetTypes(); + foreach (var kv in types) + { + var typeInfo = @ref.GetType(); + var propInfo = typeInfo.GetProperty(kv.Key, BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic); + if (propInfo == null) + { + _logger.LogWarning($"Model {typeof(T).FullName} declares property {kv.Key} but is missing C# declaration"); + continue; + } + + var converter = _valueConverterProvider.GetConverter(kv.Value, propInfo.PropertyType); + + object? value; + if (entity[kv.Key]?.IsNull ?? true) + { + // Preserve null. + value = null; + } + else + { + value = converter.ConvertFromDatastoreValue( + conversionContext, + kv.Key, + propInfo.PropertyType, + entity[kv.Key], + (callback) => + { + delayedLoads.Add((localNamespace) => + { + var delayedValue = callback(localNamespace); + propInfo.SetValue(@ref, delayedValue); + @ref._originalData[kv.Key] = delayedValue; + }); + }); + } + + if (value == null && + defaults != null && + defaults.TryGetValue(kv.Key, out object? defaultValue)) + { + value = converter.ConvertFromClrDefaultValue( + conversionContext, + kv.Key, + propInfo.PropertyType, + defaultValue); + } + + propInfo.SetValue(@ref, value); + + @ref._originalData[kv.Key] = value; + } + + @ref.dateCreatedUtc = _instantTimestampConversion.FromDatastoreValueToNodaTimeInstant(entity["dateCreatedUtc"]); + @ref.dateModifiedUtc = _instantTimestampConversion.FromDatastoreValueToNodaTimeInstant(entity["dateModifiedUtc"]); + @ref.Key = entity.Key; + if (entity["schemaVersion"]?.IsNull ?? true || entity["schemaVersion"].ValueTypeCase != Value.ValueTypeOneofCase.IntegerValue) + { + @ref.schemaVersion = null; + } + else + { + @ref.schemaVersion = entity["schemaVersion"].IntegerValue; + } + + // If we have any delayed local key assignments, run them now (before migrations, in case + // migrations want to handle local-key properties). + if (delayedLoads.Count > 0) + { + var localNamespace = @ref.GetDatastoreNamespaceForLocalKeys(); + foreach (var delayedLoad in delayedLoads) + { + delayedLoad(localNamespace); + } + } + + return @ref; + } + + public Entity To(string @namespace, T? model, bool isCreateContext, Func? incompleteKeyFactory) where T : Model, new() + { + var entity = new Entity(); + + ArgumentNullException.ThrowIfNull(model); + + var conversionContext = new DatastoreValueConvertToContext + { + ModelNamespace = @namespace, + Model = model, + Entity = entity, + }; + + var defaults = model.GetDefaultValues(); + var types = model.GetTypes(); + var indexes = model.GetIndexes(); + foreach (var kv in types) + { + var typeInfo = model.GetType(); + var propInfo = typeInfo.GetProperty(kv.Key, BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic); + if (propInfo == null) + { + throw new InvalidOperationException($"The property '{kv.Key}' could not be found on '{typeInfo.FullName}'. Ensure the datastore type declarations are correct."); + } + var value = propInfo.GetValue(model); + + var converter = _valueConverterProvider.GetConverter(kv.Value, propInfo.PropertyType); + + if (value == null && + defaults != null && + defaults.TryGetValue(kv.Key, out object? defaultValue)) + { + value = converter.ConvertFromClrDefaultValue( + conversionContext, + kv.Key, + propInfo.PropertyType, + defaultValue); + } + + entity[kv.Key] = converter.ConvertToDatastoreValue( + conversionContext, + kv.Key, + propInfo.PropertyType, + value, + indexes.Contains(kv.Key)); + } + + if (model.Key == null) + { + // @note: This used to call CreateIncompleteKey for the caller, but since the database context + // isn't available here, it's now a callback instead. + ArgumentNullException.ThrowIfNull(incompleteKeyFactory); + entity.Key = incompleteKeyFactory(model); + } + else + { + entity.Key = model.Key; + } + + var now = SystemClock.Instance.GetCurrentInstant(); + if (isCreateContext || model.dateCreatedUtc == null) + { + model.dateCreatedUtc = now; + } + + model.dateModifiedUtc = now; + model.schemaVersion = model.GetSchemaVersion(); + + entity["dateCreatedUtc"] = _instantTimestampConversion.FromNodaTimeInstantToDatastoreValue(model.dateCreatedUtc, false); + entity["dateModifiedUtc"] = _instantTimestampConversion.FromNodaTimeInstantToDatastoreValue(model.dateModifiedUtc, false); + entity["schemaVersion"] = model.schemaVersion; + + // hasImplicitMigrationsApplied is only for runtime checks so application code can see + // if an entity was implicitly modified by migrations. + + return entity; + } + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Converters/Model/IModelConverter.cs b/UET/Redpoint.CloudFramework/Repository/Converters/Model/IModelConverter.cs new file mode 100644 index 00000000..94112c4c --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Converters/Model/IModelConverter.cs @@ -0,0 +1,13 @@ +namespace Redpoint.CloudFramework.Repository.Converters.Model +{ + using Google.Cloud.Datastore.V1; + using Redpoint.CloudFramework.Models; + using System; + + internal interface IModelConverter + { + T? From(string @namespace, TOther data) where T : Model, new(); + + TOther To(string @namespace, T? model, bool isCreateContext, Func? incompleteKeyFactory) where T : Model, new(); + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Converters/Model/JsonModelConverter.cs b/UET/Redpoint.CloudFramework/Repository/Converters/Model/JsonModelConverter.cs new file mode 100644 index 00000000..feebdf81 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Converters/Model/JsonModelConverter.cs @@ -0,0 +1,204 @@ +namespace Redpoint.CloudFramework.Repository.Converters.Model +{ + using Redpoint.CloudFramework.Models; + using System; + using System.Collections.Generic; + using Google.Cloud.Datastore.V1; + using Microsoft.Extensions.Logging; + using Newtonsoft.Json.Linq; + using Redpoint.CloudFramework.Prefix; + using System.Reflection; + using Redpoint.CloudFramework.Repository.Converters.Timestamp; + using Redpoint.CloudFramework.Repository.Converters.Value; + using Redpoint.CloudFramework.Repository.Converters.Value.Context; + + internal class JsonModelConverter : IModelConverter + { + private readonly IGlobalPrefix _globalPrefix; + private readonly ILogger _logger; + private readonly IInstantTimestampJsonConverter _instantTimestampJsonConverter; + private readonly IValueConverterProvider _valueConverterProvider; + + public JsonModelConverter( + IGlobalPrefix globalPrefix, + ILogger logger, + IInstantTimestampJsonConverter instantTimestampJsonConverter, + IValueConverterProvider valueConverterProvider) + { + _globalPrefix = globalPrefix; + _logger = logger; + _instantTimestampJsonConverter = instantTimestampJsonConverter; + _valueConverterProvider = valueConverterProvider; + } + + public T? From(string @namespace, string jsonCache) where T : Model, new() + { + var model = new T(); + model._originalData = new Dictionary(); + + var hashset = JObject.Parse(jsonCache); + + if (hashset["_isnull"]?.Value() ?? false) + { + // The object does not exist (and we've cached the non-existence of it + // during a previous load). + return null; + } + + var delayedLoads = new List>(); + + var conversionContext = new JsonValueConvertFromContext + { + ModelNamespace = @namespace, + }; + + var defaults = model.GetDefaultValues(); + var types = model.GetTypes(); + foreach (var kv in types) + { + var typeInfo = model.GetType(); + var propInfo = typeInfo.GetProperty(kv.Key, BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic); + if (propInfo == null) + { + _logger.LogWarning($"Model {typeof(T).FullName} declares property {kv.Key} but is missing C# declaration"); + continue; + } + + var converter = _valueConverterProvider.GetConverter(kv.Value, propInfo.PropertyType); + + object? value; + if (!hashset.ContainsKey(kv.Key) || + hashset[kv.Key] == null || + hashset[kv.Key]!.Type == JTokenType.Null) + { + // Preserve null. + value = null; + } + else + { + value = converter.ConvertFromJsonToken( + conversionContext, + kv.Key, + propInfo.PropertyType, + hashset[kv.Key]!, + (callback) => + { + delayedLoads.Add((localNamespace) => + { + var delayedValue = callback(localNamespace); + propInfo.SetValue(model, delayedValue); + model._originalData[kv.Key] = delayedValue; + }); + }); + } + + if (value == null && + defaults != null && + defaults.TryGetValue(kv.Key, out object? defaultValue)) + { + value = converter.ConvertFromClrDefaultValue( + conversionContext, + kv.Key, + propInfo.PropertyType, + defaultValue); + } + + propInfo.SetValue(model, value); + + model._originalData[kv.Key] = value; + } + + var keyStr = hashset["_key"]?.Value(); + if (keyStr == null) + { + throw new InvalidOperationException("JSON entity in cache has incorrect _key property!"); + } + model.Key = _globalPrefix.ParseInternal(@namespace, keyStr); + model.dateCreatedUtc = _instantTimestampJsonConverter.FromJsonCacheToNodaTimeInstant(hashset["_dateCreatedUtc"]); + model.dateModifiedUtc = _instantTimestampJsonConverter.FromJsonCacheToNodaTimeInstant(hashset["_dateModifiedUtc"]); + model.schemaVersion = hashset["_schemaVersion"]?.Value(); + + // If we have any delayed local key assignments, run them now (before migrations, in case + // migrations want to handle local-key properties). + if (delayedLoads.Count > 0) + { + var localNamespace = model.GetDatastoreNamespaceForLocalKeys(); + foreach (var delayedLoad in delayedLoads) + { + delayedLoad(localNamespace); + } + } + + return model; + } + + public string To(string @namespace, T? model, bool isCreateContext, Func? incompleteKeyFactory) where T : Model, new() + { + var hashset = new JObject(); + + if (model == null) + { + hashset.Add("_isnull", true); + } + else + { + var conversionContext = new JsonValueConvertToContext + { + ModelNamespace = @namespace, + Model = model, + }; + + var defaults = model.GetDefaultValues(); + var types = model.GetTypes(); + foreach (var kv in types) + { + var typeInfo = model.GetType(); + var propInfo = typeInfo.GetProperty(kv.Key, BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic); + if (propInfo == null) + { + _logger.LogWarning($"Model {typeof(T).FullName} declares property {kv.Key} but is missing C# declaration"); + continue; + } + + var value = propInfo.GetValue(model); + + var converter = _valueConverterProvider.GetConverter(kv.Value, propInfo.PropertyType); + + if (value == null && + defaults != null && + defaults.TryGetValue(kv.Key, out object? defaultValue)) + { + value = converter.ConvertFromClrDefaultValue( + conversionContext, + kv.Key, + propInfo.PropertyType, + defaultValue); + } + + if (value == null) + { + hashset.Add(kv.Key, JValue.CreateNull()); + } + else + { + hashset.Add( + kv.Key, + converter.ConvertToJsonToken( + conversionContext, + kv.Key, + propInfo.PropertyType, + value)); + } + } + + hashset.Add("_key", _globalPrefix.CreateInternal(model.Key)); + hashset.Add("_dateCreatedUtc", _instantTimestampJsonConverter.FromNodaTimeInstantToJsonCache(model.dateCreatedUtc)); + hashset.Add("_dateModifiedUtc", _instantTimestampJsonConverter.FromNodaTimeInstantToJsonCache(model.dateModifiedUtc)); + hashset.Add("_schemaVersion", model.schemaVersion); + } + + return hashset.ToString(); + } + + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Converters/Timestamp/DefaultInstantTimestampConverter.cs b/UET/Redpoint.CloudFramework/Repository/Converters/Timestamp/DefaultInstantTimestampConverter.cs new file mode 100644 index 00000000..d1a27f12 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Converters/Timestamp/DefaultInstantTimestampConverter.cs @@ -0,0 +1,37 @@ +namespace Redpoint.CloudFramework.Repository.Converters.Timestamp +{ + using Google.Protobuf.WellKnownTypes; + using NodaTime; + using Value = Google.Cloud.Datastore.V1.Value; + + internal class DefaultInstantTimestampConverter : IInstantTimestampConverter + { + public Instant? FromDatastoreValueToNodaTimeInstant(Value value) + { + if (value?.TimestampValue == null) + { + return null; + } + + return Instant.FromUnixTimeSeconds(value.TimestampValue.Seconds) + NodaTime.Duration.FromNanoseconds(value.TimestampValue.Nanos); + } + + public Value FromNodaTimeInstantToDatastoreValue(Instant? instant, bool excludeFromIndexes) + { + if (instant == null) + { + return Value.ForNull(); + } + + return new Value + { + TimestampValue = new Timestamp + { + Seconds = instant.Value.ToUnixTimeSeconds(), + Nanos = (instant.Value - Instant.FromUnixTimeSeconds(instant.Value.ToUnixTimeSeconds())).SubsecondNanoseconds, + }, + ExcludeFromIndexes = excludeFromIndexes, + }; + } + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Converters/Timestamp/DefaultInstantTimestampJsonConverter.cs b/UET/Redpoint.CloudFramework/Repository/Converters/Timestamp/DefaultInstantTimestampJsonConverter.cs new file mode 100644 index 00000000..ef1f7966 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Converters/Timestamp/DefaultInstantTimestampJsonConverter.cs @@ -0,0 +1,33 @@ +namespace Redpoint.CloudFramework.Repository.Converters.Timestamp +{ + using Newtonsoft.Json.Linq; + using NodaTime; + + internal class DefaultInstantTimestampJsonConverter : IInstantTimestampJsonConverter + { + public Instant? FromJsonCacheToNodaTimeInstant(JToken? obj) + { + if (obj == null || obj?.Type == JTokenType.Null) + { + return null; + } + + return Instant.FromUnixTimeSeconds(obj!["seconds"]?.Value() ?? 0).PlusNanoseconds(obj!["nanos"]?.Value() ?? 0); + } + + public JToken FromNodaTimeInstantToJsonCache(Instant? instant) + { + if (instant == null) + { + return JValue.CreateNull(); + } + + var seconds = instant.Value.ToUnixTimeSeconds(); + var nanos = (instant.Value - Instant.FromUnixTimeSeconds(instant.Value.ToUnixTimeSeconds())).SubsecondNanoseconds; + var obj = new JObject(); + obj["seconds"] = (long)seconds; + obj["nanos"] = (long)nanos; + return obj; + } + } +} \ No newline at end of file diff --git a/UET/Redpoint.CloudFramework/Repository/Converters/Timestamp/IInstantTimestampConverter.cs b/UET/Redpoint.CloudFramework/Repository/Converters/Timestamp/IInstantTimestampConverter.cs new file mode 100644 index 00000000..65ac36d2 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Converters/Timestamp/IInstantTimestampConverter.cs @@ -0,0 +1,11 @@ +namespace Redpoint.CloudFramework.Repository.Converters.Timestamp +{ + using Google.Cloud.Datastore.V1; + using NodaTime; + + public interface IInstantTimestampConverter + { + Instant? FromDatastoreValueToNodaTimeInstant(Value value); + Value FromNodaTimeInstantToDatastoreValue(Instant? instant, bool excludeFromIndexes); + } +} \ No newline at end of file diff --git a/UET/Redpoint.CloudFramework/Repository/Converters/Timestamp/IInstantTimestampJsonConverter.cs b/UET/Redpoint.CloudFramework/Repository/Converters/Timestamp/IInstantTimestampJsonConverter.cs new file mode 100644 index 00000000..fecfaa1e --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Converters/Timestamp/IInstantTimestampJsonConverter.cs @@ -0,0 +1,11 @@ +namespace Redpoint.CloudFramework.Repository.Converters.Timestamp +{ + using Newtonsoft.Json.Linq; + using NodaTime; + + internal interface IInstantTimestampJsonConverter + { + Instant? FromJsonCacheToNodaTimeInstant(JToken? obj); + JToken FromNodaTimeInstantToJsonCache(Instant? instant); + } +} \ No newline at end of file diff --git a/UET/Redpoint.CloudFramework/Repository/Converters/Value/BaseArrayValueConverter.cs b/UET/Redpoint.CloudFramework/Repository/Converters/Value/BaseArrayValueConverter.cs new file mode 100644 index 00000000..5c41dd22 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Converters/Value/BaseArrayValueConverter.cs @@ -0,0 +1,190 @@ +namespace Redpoint.CloudFramework.Repository.Converters.Value +{ + using Redpoint.CloudFramework.Models; + using Newtonsoft.Json.Linq; + using Type = System.Type; + using Google.Protobuf.WellKnownTypes; + using Value = Google.Cloud.Datastore.V1.Value; + using Google.Cloud.Datastore.V1; + using Redpoint.CloudFramework.Repository.Converters.Value.Context; + using System.Collections; + using System.Collections.Generic; + + internal abstract class BaseArrayValueConverter : IValueConverter + { + public abstract FieldType GetFieldType(); + + public abstract bool IsConverterForClrType(Type propertyClrArrayType); + + protected abstract Type GetElementType(Type propertyClrArrayType); + + protected abstract object ConstructClrValueFromArrayList(Type propertyClrArrayType, ArrayList arrayList); + + protected abstract object? ConvertFromClrDefaultElementValue( + ClrValueConvertFromContext context, + string propertyName, + Type propertyClrElementType, + object propertyNonNullClrDefaultElementValue); + + public object? ConvertFromClrDefaultValue( + ClrValueConvertFromContext context, + string propertyName, + Type propertyClrType, + object propertyClrDefaultValue) + { + var result = new ArrayList(); + foreach (var element in (IEnumerable)propertyClrDefaultValue) + { + if (element != null) + { + result.Add(ConvertFromClrDefaultElementValue( + context, + propertyName, + GetElementType(propertyClrType), + element)); + } + } + return ConstructClrValueFromArrayList( + propertyClrType, + result); + } + + protected abstract object? ConvertFromDatastoreElementValue( + DatastoreValueConvertFromContext context, + string propertyName, + Type propertyClrElementType, + Value propertyNonNullDatastoreElementValue); + + public object? ConvertFromDatastoreValue( + DatastoreValueConvertFromContext context, + string propertyName, + Type propertyClrType, + Value propertyNonNullDatastoreValue, + AddConvertFromDelayedLoad addConvertFromDelayedLoad) + { + var result = new ArrayList(); + foreach (var element in propertyNonNullDatastoreValue.ArrayValue.Values) + { + if (!element.IsNull) + { + result.Add(ConvertFromDatastoreElementValue( + context, + propertyName, + GetElementType(propertyClrType), + element)); + } + } + return ConstructClrValueFromArrayList( + propertyClrType, + result); + } + + protected abstract Value ConvertToDatastoreElementValue( + DatastoreValueConvertToContext context, + string propertyName, + Type propertyClrElementType, + object propertyNonNullClrElementValue, + bool propertyIndexed); + + public Value ConvertToDatastoreValue( + DatastoreValueConvertToContext context, + string propertyName, + Type propertyClrType, + object? propertyClrValue, + bool propertyIndexed) + { + if (propertyClrValue == null) + { + return new Value + { + NullValue = NullValue.NullValue, + ExcludeFromIndexes = !propertyIndexed, + }; + } + + ArrayValue datastoreValue = new ArrayValue(); + foreach (var clrElementValue in (IEnumerable)propertyClrValue) + { + if (clrElementValue != null) + { + datastoreValue.Values.Add(ConvertToDatastoreElementValue( + context, + propertyName, + GetElementType(propertyClrType), + clrElementValue, + propertyIndexed)); + } + } + return new Value + { + ArrayValue = datastoreValue, + // @note: This is apparently not permitted according to the Datastore Emulator. + // ExcludeFromIndexes = !propertyIndexed, + }; + } + + protected abstract object? ConvertFromJsonElementToken( + JsonValueConvertFromContext context, + string propertyName, + Type propertyClrElementType, + JToken propertyNonNullJsonElementToken); + + public object? ConvertFromJsonToken( + JsonValueConvertFromContext context, + string propertyName, + Type propertyClrType, + JToken propertyNonNullJsonToken, + AddConvertFromDelayedLoad addConvertFromDelayedLoad) + { + var result = new ArrayList(); + + var array = propertyNonNullJsonToken.Value(); + // @note: Guards against JSON cache tokens not being array values. + if (array != null) + { + foreach (var token in array) + { + if (token.Type != JTokenType.Null) + { + result.Add(ConvertFromJsonElementToken( + context, + propertyName, + GetElementType(propertyClrType), + token)); + } + } + } + + return ConstructClrValueFromArrayList( + propertyClrType, + result); + } + + protected abstract JToken ConvertFromJsonElementValue( + JsonValueConvertToContext context, + string propertyName, + Type propertyClrElementType, + object propertyNonNullClrElementValue); + + public JToken ConvertToJsonToken( + JsonValueConvertToContext context, + string propertyName, + Type propertyClrType, + object propertyNonNullClrValue) + { + var jsonElementTokens = new List(); + foreach (var clrElementValue in (IEnumerable)propertyNonNullClrValue) + { + if (clrElementValue != null) + { + jsonElementTokens.Add(ConvertFromJsonElementValue( + context, + propertyName, + GetElementType(propertyClrType), + clrElementValue)); + } + } + return new JArray(jsonElementTokens); + } + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Converters/Value/BooleanValueConverter.cs b/UET/Redpoint.CloudFramework/Repository/Converters/Value/BooleanValueConverter.cs new file mode 100644 index 00000000..391b8e94 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Converters/Value/BooleanValueConverter.cs @@ -0,0 +1,87 @@ +namespace Redpoint.CloudFramework.Repository.Converters.Value +{ + using Redpoint.CloudFramework.Models; + using Newtonsoft.Json.Linq; + using Type = System.Type; + using Google.Protobuf.WellKnownTypes; + using Value = Google.Cloud.Datastore.V1.Value; + using Redpoint.CloudFramework.Repository.Converters.Value.Context; + + internal class BooleanValueConverter : IValueConverter + { + public FieldType GetFieldType() + { + return FieldType.Boolean; + } + + public bool IsConverterForClrType(Type clrType) + { + return clrType == typeof(bool) || + clrType == typeof(bool?); + } + + public object? ConvertFromClrDefaultValue( + ClrValueConvertFromContext context, + string propertyName, + Type propertyClrType, + object propertyClrDefaultValue) + { + return propertyClrDefaultValue; + } + + public object? ConvertFromDatastoreValue( + DatastoreValueConvertFromContext context, + string propertyName, + Type propertyClrType, + Value propertyNonNullDatastoreValue, + AddConvertFromDelayedLoad addConvertFromDelayedLoad) + { + return propertyNonNullDatastoreValue.BooleanValue; + } + + public Value ConvertToDatastoreValue( + DatastoreValueConvertToContext context, + string propertyName, + Type propertyClrType, + object? propertyClrValue, + bool propertyIndexed) + { + var nullable = (bool?)propertyClrValue; + if (!nullable.HasValue) + { + return new Value + { + NullValue = NullValue.NullValue, + ExcludeFromIndexes = !propertyIndexed, + }; + } + else + { + return new Value + { + BooleanValue = nullable.Value, + ExcludeFromIndexes = !propertyIndexed, + }; + } + } + + public object? ConvertFromJsonToken( + JsonValueConvertFromContext context, + string propertyName, + Type propertyClrType, + JToken propertyNonNullJsonToken, + AddConvertFromDelayedLoad addConvertFromDelayedLoad) + { + return propertyNonNullJsonToken.Value(); + } + + public JToken ConvertToJsonToken( + JsonValueConvertToContext context, + string propertyName, + Type propertyClrType, + object propertyNonNullClrValue) + { + return new JValue((bool)propertyNonNullClrValue); + } + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Converters/Value/Context/AddConvertFromDelayedLoad.cs b/UET/Redpoint.CloudFramework/Repository/Converters/Value/Context/AddConvertFromDelayedLoad.cs new file mode 100644 index 00000000..f71fce1d --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Converters/Value/Context/AddConvertFromDelayedLoad.cs @@ -0,0 +1,8 @@ +namespace Redpoint.CloudFramework.Repository.Converters.Value.Context +{ + /// + /// This callback is provided to conversion methods so they can register delayed loads. + /// + /// The delayed load to invoke after all other fields are loaded. + internal delegate void AddConvertFromDelayedLoad(ConvertFromDelayedLoad convertFromDelayedLoad); +} diff --git a/UET/Redpoint.CloudFramework/Repository/Converters/Value/Context/ClrValueConvertFromContext.cs b/UET/Redpoint.CloudFramework/Repository/Converters/Value/Context/ClrValueConvertFromContext.cs new file mode 100644 index 00000000..48c1c41e --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Converters/Value/Context/ClrValueConvertFromContext.cs @@ -0,0 +1,9 @@ +namespace Redpoint.CloudFramework.Repository.Converters.Value.Context +{ + /// + /// Provides additional context when converting constant expressions from [Default] attributes into full CLR values. + /// + internal class ClrValueConvertFromContext + { + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Converters/Value/Context/ConvertFromDelayedLoad.cs b/UET/Redpoint.CloudFramework/Repository/Converters/Value/Context/ConvertFromDelayedLoad.cs new file mode 100644 index 00000000..f63a947f --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Converters/Value/Context/ConvertFromDelayedLoad.cs @@ -0,0 +1,13 @@ +namespace Redpoint.CloudFramework.Repository.Converters.Value.Context +{ + using Redpoint.CloudFramework.Models; + using Type = Type; + + /// + /// A delegate which is called by the conversion after all other fields are loaded, with the + /// local namespace value provided by . + /// + /// The local namespace value provided by . + /// The CLR value that would normally have been directly returned from or . + internal delegate object? ConvertFromDelayedLoad(string localNamespace); +} diff --git a/UET/Redpoint.CloudFramework/Repository/Converters/Value/Context/DatastoreValueConvertFromContext.cs b/UET/Redpoint.CloudFramework/Repository/Converters/Value/Context/DatastoreValueConvertFromContext.cs new file mode 100644 index 00000000..659342e1 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Converters/Value/Context/DatastoreValueConvertFromContext.cs @@ -0,0 +1,10 @@ +namespace Redpoint.CloudFramework.Repository.Converters.Value.Context +{ + /// + /// Provides additional context when converting a Datastore value into a CLR value. + /// + internal class DatastoreValueConvertFromContext : ClrValueConvertFromContext + { + public required string ModelNamespace { get; init; } + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Converters/Value/Context/DatastoreValueConvertToContext.cs b/UET/Redpoint.CloudFramework/Repository/Converters/Value/Context/DatastoreValueConvertToContext.cs new file mode 100644 index 00000000..d58f48a3 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Converters/Value/Context/DatastoreValueConvertToContext.cs @@ -0,0 +1,17 @@ +namespace Redpoint.CloudFramework.Repository.Converters.Value.Context +{ + using Redpoint.CloudFramework.Models; + using Google.Cloud.Datastore.V1; + + /// + /// Provides additional context when a CLR value into a Datastore value. + /// + internal class DatastoreValueConvertToContext : ClrValueConvertFromContext + { + public required string ModelNamespace { get; init; } + + public required Model Model { get; init; } + + public required Entity Entity { get; init; } + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Converters/Value/Context/JsonValueConvertFromContext.cs b/UET/Redpoint.CloudFramework/Repository/Converters/Value/Context/JsonValueConvertFromContext.cs new file mode 100644 index 00000000..b5b5a362 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Converters/Value/Context/JsonValueConvertFromContext.cs @@ -0,0 +1,10 @@ +namespace Redpoint.CloudFramework.Repository.Converters.Value.Context +{ + /// + /// Provides additional context when converting a JSON value into a CLR value. + /// + internal class JsonValueConvertFromContext : ClrValueConvertFromContext + { + public required string ModelNamespace { get; init; } + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Converters/Value/Context/JsonValueConvertToContext.cs b/UET/Redpoint.CloudFramework/Repository/Converters/Value/Context/JsonValueConvertToContext.cs new file mode 100644 index 00000000..d65ee3b0 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Converters/Value/Context/JsonValueConvertToContext.cs @@ -0,0 +1,14 @@ +namespace Redpoint.CloudFramework.Repository.Converters.Value.Context +{ + using Redpoint.CloudFramework.Models; + + /// + /// Provides additional context when a CLR value into a JSON value. + /// + internal class JsonValueConvertToContext : ClrValueConvertFromContext + { + public required string ModelNamespace { get; init; } + + public required Model Model { get; init; } + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Converters/Value/DefaultValueConverterProvider.cs b/UET/Redpoint.CloudFramework/Repository/Converters/Value/DefaultValueConverterProvider.cs new file mode 100644 index 00000000..616322ee --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Converters/Value/DefaultValueConverterProvider.cs @@ -0,0 +1,50 @@ +namespace Redpoint.CloudFramework.Repository.Converters.Value +{ + using Redpoint.CloudFramework.Models; + using System; + using System.Collections.Concurrent; + using System.Collections.Generic; + + internal class DefaultValueConverterProvider : IValueConverterProvider + { + private readonly Dictionary> _converters; + private readonly ConcurrentDictionary<(FieldType, Type), IValueConverter> _converterLookupCache; + + public DefaultValueConverterProvider( + IEnumerable valueConverters) + { + _converters = new Dictionary>(); + foreach (var valueConverter in valueConverters) + { + var fieldType = valueConverter.GetFieldType(); + if (!_converters.TryGetValue(fieldType, out List? matchedConverters)) + { + matchedConverters = new List(); + _converters.Add(fieldType, matchedConverters); + } + matchedConverters.Add(valueConverter); + } + _converterLookupCache = new ConcurrentDictionary<(FieldType, Type), IValueConverter>(); + } + + public IValueConverter GetConverter(FieldType fieldType, Type propertyClrType) + { + if (_converterLookupCache.TryGetValue((fieldType, propertyClrType), out var converter)) + { + return converter; + } + if (_converters.TryGetValue(fieldType, out var matchedConverters)) + { + foreach (var matchedConverter in matchedConverters) + { + if (matchedConverter.IsConverterForClrType(propertyClrType)) + { + _converterLookupCache.TryAdd((fieldType, propertyClrType), matchedConverter); + return matchedConverter; + } + } + } + throw new NotSupportedException($"Model field type '{fieldType}' and property CLR type '{propertyClrType}' has no matching value converter!"); + } + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Converters/Value/DoubleValueConverter.cs b/UET/Redpoint.CloudFramework/Repository/Converters/Value/DoubleValueConverter.cs new file mode 100644 index 00000000..33d6e413 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Converters/Value/DoubleValueConverter.cs @@ -0,0 +1,87 @@ +namespace Redpoint.CloudFramework.Repository.Converters.Value +{ + using Redpoint.CloudFramework.Models; + using Newtonsoft.Json.Linq; + using Type = System.Type; + using Google.Protobuf.WellKnownTypes; + using Value = Google.Cloud.Datastore.V1.Value; + using Redpoint.CloudFramework.Repository.Converters.Value.Context; + + internal class DoubleValueConverter : IValueConverter + { + public FieldType GetFieldType() + { + return FieldType.Double; + } + + public bool IsConverterForClrType(Type clrType) + { + return clrType == typeof(double) || + clrType == typeof(double?); + } + + public object? ConvertFromClrDefaultValue( + ClrValueConvertFromContext context, + string propertyName, + Type propertyClrType, + object propertyClrDefaultValue) + { + return propertyClrDefaultValue; + } + + public object? ConvertFromDatastoreValue( + DatastoreValueConvertFromContext context, + string propertyName, + Type propertyClrType, + Value propertyNonNullDatastoreValue, + AddConvertFromDelayedLoad addConvertFromDelayedLoad) + { + return propertyNonNullDatastoreValue.DoubleValue; + } + + public Value ConvertToDatastoreValue( + DatastoreValueConvertToContext context, + string propertyName, + Type propertyClrType, + object? propertyClrValue, + bool propertyIndexed) + { + var nullable = (double?)propertyClrValue; + if (!nullable.HasValue) + { + return new Value + { + NullValue = NullValue.NullValue, + ExcludeFromIndexes = !propertyIndexed, + }; + } + else + { + return new Value + { + DoubleValue = nullable.Value, + ExcludeFromIndexes = !propertyIndexed, + }; + } + } + + public object? ConvertFromJsonToken( + JsonValueConvertFromContext context, + string propertyName, + Type propertyClrType, + JToken propertyNonNullJsonToken, + AddConvertFromDelayedLoad addConvertFromDelayedLoad) + { + return propertyNonNullJsonToken.Value(); + } + + public JToken ConvertToJsonToken( + JsonValueConvertToContext context, + string propertyName, + Type propertyClrType, + object propertyNonNullClrValue) + { + return new JValue((double)propertyNonNullClrValue); + } + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Converters/Value/EmbeddedEntityValueConverter.cs b/UET/Redpoint.CloudFramework/Repository/Converters/Value/EmbeddedEntityValueConverter.cs new file mode 100644 index 00000000..fdaf5f0f --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Converters/Value/EmbeddedEntityValueConverter.cs @@ -0,0 +1,278 @@ +namespace Redpoint.CloudFramework.Repository.Converters.Value +{ + using Redpoint.CloudFramework.Models; + using Newtonsoft.Json.Linq; + using System; + using Type = System.Type; + using Google.Protobuf.WellKnownTypes; + using Value = Google.Cloud.Datastore.V1.Value; + using Google.Cloud.Datastore.V1; + using Google.Protobuf; + using Google.Type; + using Redpoint.CloudFramework.Prefix; + using Redpoint.CloudFramework.Repository.Converters.Timestamp; + using Redpoint.CloudFramework.Repository.Converters.Value.Context; + + internal class EmbeddedEntityValueConverter : IValueConverter + { + private readonly IGlobalPrefix _globalPrefix; + private readonly IInstantTimestampConverter _instantTimestampConverter; + private readonly IInstantTimestampJsonConverter _instantTimestampJsonConverter; + + public EmbeddedEntityValueConverter( + IGlobalPrefix globalPrefix, + IInstantTimestampConverter instantTimestampConverter, + IInstantTimestampJsonConverter instantTimestampJsonConverter) + { + _globalPrefix = globalPrefix; + _instantTimestampConverter = instantTimestampConverter; + _instantTimestampJsonConverter = instantTimestampJsonConverter; + } + + public FieldType GetFieldType() + { + return FieldType.EmbeddedEntity; + } + + public bool IsConverterForClrType(Type clrType) + { + return clrType == typeof(Entity); + } + + public object? ConvertFromClrDefaultValue( + ClrValueConvertFromContext context, + string propertyName, + Type propertyClrType, + object propertyClrDefaultValue) + { + return propertyClrDefaultValue; + } + + public object? ConvertFromDatastoreValue( + DatastoreValueConvertFromContext context, + string propertyName, + Type propertyClrType, + Value propertyNonNullDatastoreValue, + AddConvertFromDelayedLoad addConvertFromDelayedLoad) + { + return propertyNonNullDatastoreValue.EntityValue; + } + + public Value ConvertToDatastoreValue( + DatastoreValueConvertToContext context, + string propertyName, + Type propertyClrType, + object? propertyClrValue, + bool propertyIndexed) + { + if (propertyClrValue == null) + { + return new Value + { + NullValue = NullValue.NullValue, + ExcludeFromIndexes = !propertyIndexed, + }; + } + else + { + return new Value + { + EntityValue = (Entity)propertyClrValue, + ExcludeFromIndexes = !propertyIndexed, + }; + } + } + + public object? ConvertFromJsonToken( + JsonValueConvertFromContext context, + string propertyName, + Type propertyClrType, + JToken propertyNonNullJsonToken, + AddConvertFromDelayedLoad addConvertFromDelayedLoad) + { + return FromJsonCacheToEmbeddedEntity(propertyNonNullJsonToken); + } + + public JToken ConvertToJsonToken( + JsonValueConvertToContext context, + string propertyName, + Type propertyClrType, + object propertyNonNullClrValue) + { + return FromEmbeddedEntityToJsonCache((Entity)propertyNonNullClrValue); + } + + private Value FromJsonCacheToEmbeddedEntityValue(JToken input) + { + if (input == null) + { + return Value.ForNull(); + } + + switch (input.Type) + { + case JTokenType.None: + case JTokenType.Null: + return Value.ForNull(); + case JTokenType.Boolean: + return new Value(input.Value()); + case JTokenType.String: + return new Value(input.Value()); + case JTokenType.Integer: + return new Value(input.Value()); + case JTokenType.Float: + return new Value(input.Value()); + case JTokenType.Object: + JObject? obj = input.Value(); + if (obj == null) + { + return Value.ForNull(); + } + string? objType = (string?)obj["type"]; + JToken? objValue = obj["value"]; + if (objType == null) + { + return Value.ForNull(); + } + switch (objType) + { + case "blob": + if (objValue == null) + { + return Value.ForNull(); + } + return new Value(ByteString.FromBase64(objValue.Value())); + case "entity": + return new Value(FromJsonCacheToEmbeddedEntity(objValue)); + case "geopoint": + JToken? objLatitude = obj["latitude"]; + JToken? objLongitude = obj["longitude"]; + if (objLatitude == null || objLongitude == null) + { + return Value.ForNull(); + } + return new Value(new LatLng + { + Latitude = objLatitude.Value(), + Longitude = objLongitude.Value(), + }); + case "key": + JToken? objNs = obj["ns"]; + if (objNs == null || objValue == null) + { + return Value.ForNull(); + } + string? strNs = objNs.Value(); + string? strValue = objValue.Value(); + if (strNs == null || strValue == null) + { + return Value.ForNull(); + } + return new Value(_globalPrefix.ParseInternal(strNs, strValue)); + case "timestamp": + return new Value(_instantTimestampConverter.FromNodaTimeInstantToDatastoreValue(_instantTimestampJsonConverter.FromJsonCacheToNodaTimeInstant(objValue), false)); + default: + throw new InvalidOperationException("Unsupported serialized entity type."); + } + case JTokenType.Array: + var arr = new ArrayValue(); + var jArray = input.Value(); + if (jArray != null) + { + foreach (var elem in jArray) + { + arr.Values.Add(FromJsonCacheToEmbeddedEntityValue(elem)); + } + } + return new Value(arr); + default: + throw new InvalidOperationException("Unsupported serialized entity type."); + } + } + + private Entity? FromJsonCacheToEmbeddedEntity(JToken? obj) + { + if (obj == null || obj?.Type == JTokenType.Null || obj?.Type != JTokenType.Object) + { + return null; + } + + var entity = new Entity(); + foreach (var kv in obj.Cast()) + { + entity.Properties.Add(kv.Name, FromJsonCacheToEmbeddedEntityValue(kv.Value)); + } + return entity; + } + + private JToken? FromEmbeddedEntityValueToJsonCache(Value value) + { + switch (value.ValueTypeCase) + { + case Value.ValueTypeOneofCase.None: + return null; + case Value.ValueTypeOneofCase.NullValue: + return JValue.CreateNull(); + case Value.ValueTypeOneofCase.BooleanValue: + return new JValue(value.BooleanValue); + case Value.ValueTypeOneofCase.StringValue: + return new JValue(value.StringValue); + case Value.ValueTypeOneofCase.IntegerValue: + return new JValue(value.IntegerValue); + case Value.ValueTypeOneofCase.DoubleValue: + return new JValue(value.DoubleValue); + case Value.ValueTypeOneofCase.ArrayValue: + var array = new JArray(); + foreach (var element in value.ArrayValue.Values) + { + array.Add(FromEmbeddedEntityValueToJsonCache(element)!); + } + return array; + case Value.ValueTypeOneofCase.BlobValue: + var blob = new JObject(); + blob["type"] = "blob"; + blob["value"] = value.BlobValue.ToBase64(); + return blob; + case Value.ValueTypeOneofCase.EntityValue: + var nestedEntity = new JObject(); + nestedEntity["type"] = "entity"; + nestedEntity["value"] = FromEmbeddedEntityToJsonCache(value.EntityValue); + return nestedEntity; + case Value.ValueTypeOneofCase.GeoPointValue: + var geo = new JObject(); + geo["type"] = "geopoint"; + geo["latitude"] = value.GeoPointValue.Latitude; + geo["longitude"] = value.GeoPointValue.Longitude; + return geo; + case Value.ValueTypeOneofCase.KeyValue: + var key = new JObject(); + key["type"] = "key"; + key["ns"] = value.KeyValue.PartitionId.NamespaceId; + key["value"] = _globalPrefix.CreateInternal(value.KeyValue); + return key; + case Value.ValueTypeOneofCase.TimestampValue: + var ts = new JObject(); + ts["type"] = "timestamp"; + ts["value"] = _instantTimestampJsonConverter.FromNodaTimeInstantToJsonCache(_instantTimestampConverter.FromDatastoreValueToNodaTimeInstant(value.TimestampValue)); + return ts; + default: + throw new InvalidOperationException("Unsupported property type on embedded entity value."); + } + } + + private JToken FromEmbeddedEntityToJsonCache(Entity entity) + { + if (entity == null) + { + return JValue.CreateNull(); + } + + var obj = new JObject(); + foreach (var prop in entity.Properties) + { + obj[prop.Key] = FromEmbeddedEntityValueToJsonCache(prop.Value); + } + return obj; + } + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Converters/Value/GeopointValueConverter.cs b/UET/Redpoint.CloudFramework/Repository/Converters/Value/GeopointValueConverter.cs new file mode 100644 index 00000000..7d9dbbbc --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Converters/Value/GeopointValueConverter.cs @@ -0,0 +1,152 @@ +namespace Redpoint.CloudFramework.Repository.Converters.Value +{ + using Redpoint.CloudFramework.Models; + using Newtonsoft.Json.Linq; + using Type = System.Type; + using Google.Protobuf.WellKnownTypes; + using Value = Google.Cloud.Datastore.V1.Value; + using Google.Type; + using Redpoint.CloudFramework.Repository.Geographic; + using System.Globalization; + using Redpoint.CloudFramework.Repository.Converters.Value.Context; + + internal class GeopointValueConverter : IValueConverter + { + public FieldType GetFieldType() + { + return FieldType.Geopoint; + } + + public bool IsConverterForClrType(Type clrType) + { + return clrType == typeof(LatLng); + } + + public object? ConvertFromClrDefaultValue( + ClrValueConvertFromContext context, + string propertyName, + Type propertyClrType, + object propertyClrDefaultValue) + { + return propertyClrDefaultValue; + } + + public object? ConvertFromDatastoreValue( + DatastoreValueConvertFromContext context, + string propertyName, + Type propertyClrType, + Value propertyNonNullDatastoreValue, + AddConvertFromDelayedLoad addConvertFromDelayedLoad) + { + return propertyNonNullDatastoreValue.GeoPointValue; + } + + public Value ConvertToDatastoreValue( + DatastoreValueConvertToContext context, + string propertyName, + Type propertyClrType, + object? propertyClrValue, + bool propertyIndexed) + { + var geopoint = (LatLng?)propertyClrValue; + Value result; + if (geopoint == null) + { + result = new Value + { + NullValue = NullValue.NullValue, + ExcludeFromIndexes = !propertyIndexed, + }; + + if (propertyIndexed) + { + var geomodel = context.Model as IGeoModel; + if (geomodel != null) + { + var geopointFieldLengths = geomodel.GetHashKeyLengthsForGeopointFields(); + if (geopointFieldLengths.ContainsKey(propertyName)) + { + context.Entity[propertyName + GeoConstants.GeoHashPropertySuffix] = new Value + { + NullValue = NullValue.NullValue, + ExcludeFromIndexes = false, + }; + context.Entity[propertyName + GeoConstants.HashKeyPropertySuffix] = new Value + { + NullValue = NullValue.NullValue, + ExcludeFromIndexes = false, + }; + } + } + } + } + else + { + result = new Value + { + GeoPointValue = geopoint, + ExcludeFromIndexes = !propertyIndexed, + }; + + if (propertyIndexed) + { + var geomodel = context.Model as IGeoModel; + if (geomodel != null) + { + var geopointFieldLengths = geomodel.GetHashKeyLengthsForGeopointFields(); + if (geopointFieldLengths.TryGetValue(propertyName, out ushort geopointKeyLength)) + { + var geohash = S2Manager.GenerateGeohash(geopoint); + var geohashkey = S2Manager.GenerateGeohashKey(geohash, geopointKeyLength); + + context.Entity[propertyName + GeoConstants.GeoHashPropertySuffix] = new Value + { + StringValue = geohash.ToString(CultureInfo.InvariantCulture), + ExcludeFromIndexes = false, + }; + context.Entity[propertyName + GeoConstants.HashKeyPropertySuffix] = new Value + { + IntegerValue = (long)geohashkey, + ExcludeFromIndexes = false, + }; + } + } + } + } + return result; + } + + public object? ConvertFromJsonToken( + JsonValueConvertFromContext context, + string propertyName, + Type propertyClrType, + JToken propertyNonNullJsonToken, + AddConvertFromDelayedLoad addConvertFromDelayedLoad) + { + if (propertyNonNullJsonToken == null || propertyNonNullJsonToken?.Type == JTokenType.Null) + { + return null; + } + + return new LatLng + { + Latitude = propertyNonNullJsonToken!["lat"]?.Value() ?? 0, + Longitude = propertyNonNullJsonToken!["long"]?.Value() ?? 0, + }; + } + + public JToken ConvertToJsonToken( + JsonValueConvertToContext context, + string propertyName, + Type propertyClrType, + object propertyNonNullClrValue) + { + var geopoint = (LatLng)propertyNonNullClrValue; + + var obj = new JObject(); + obj["lat"] = (double)geopoint.Latitude; + obj["long"] = (double)geopoint.Longitude; + return obj; + } + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Converters/Value/GlobalKeyArrayValueConverter.cs b/UET/Redpoint.CloudFramework/Repository/Converters/Value/GlobalKeyArrayValueConverter.cs new file mode 100644 index 00000000..c9e484a4 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Converters/Value/GlobalKeyArrayValueConverter.cs @@ -0,0 +1,151 @@ +namespace Redpoint.CloudFramework.Repository.Converters.Value +{ + using Redpoint.CloudFramework.Models; + using Newtonsoft.Json.Linq; + using Type = System.Type; + using Value = Google.Cloud.Datastore.V1.Value; + using Redpoint.CloudFramework.Repository.Converters.Value.Context; + using System.Collections; + using System.Collections.Generic; + using Google.Cloud.Datastore.V1; + using Redpoint.CloudFramework.Prefix; + + internal class GlobalKeyArrayValueConverter : BaseArrayValueConverter + { + private readonly IGlobalPrefix _globalPrefix; + + public GlobalKeyArrayValueConverter(IGlobalPrefix globalPrefix) + { + _globalPrefix = globalPrefix; + } + + public override FieldType GetFieldType() + { + return FieldType.GlobalKeyArray; + } + + public override bool IsConverterForClrType(Type propertyClrArrayType) + { + return propertyClrArrayType == typeof(Key[]) || + propertyClrArrayType == typeof(IReadOnlyList) || + propertyClrArrayType == typeof(List); + } + + protected override Type GetElementType(Type propertyClrArrayType) + { + return typeof(Key); + } + + protected override object ConstructClrValueFromArrayList( + Type propertyClrArrayType, + ArrayList arrayList) + { + return arrayList.Cast().ToArray(); + } + + protected override object? ConvertFromClrDefaultElementValue( + ClrValueConvertFromContext context, + string propertyName, + Type propertyClrElementType, + object? propertyNonNullClrDefaultElementValue) + { + throw new InvalidOperationException("FieldType.GlobalKeyArray does not support default values. These property must be nullable and omit [Default]."); + } + + protected override object? ConvertFromDatastoreElementValue( + DatastoreValueConvertFromContext context, + string propertyName, + Type propertyClrElementType, + Value propertyNonNullDatastoreElementValue) + { + var globalKeyValue = propertyNonNullDatastoreElementValue.KeyValue; + + if (string.IsNullOrEmpty(context.ModelNamespace)) + { + throw new InvalidOperationException("global-key properties can not be used on entities inside the global namespace"); + } + if (globalKeyValue != null && !string.IsNullOrEmpty(globalKeyValue.PartitionId.NamespaceId)) + { + throw new InvalidOperationException("Unable to load property '" + propertyName + "' from entity; cross-namespace reference detected in array element."); + } + + return globalKeyValue; + } + + protected override Value ConvertToDatastoreElementValue( + DatastoreValueConvertToContext context, + string propertyName, + Type propertyClrElementType, + object propertyNonNullClrElementValue, + bool propertyIndexed) + { + var key = (Key)propertyNonNullClrElementValue; + + if (context.Model.Key != null && string.IsNullOrEmpty(context.Model.Key.PartitionId.NamespaceId)) + { + throw new InvalidOperationException("Attempted to use 'global-key' field type in entity that is in the global namespace."); + } + + if (!string.IsNullOrEmpty(key.PartitionId.NamespaceId)) + { + throw new InvalidOperationException("Non-global-namespace data write for key property '" + propertyName + "' in array element."); + } + + return new Value + { + KeyValue = key, + ExcludeFromIndexes = !propertyIndexed, + }; + } + + protected override object? ConvertFromJsonElementToken( + JsonValueConvertFromContext context, + string propertyName, + Type propertyClrElementType, + JToken propertyNonNullJsonElementToken) + { + var globalIdStr = propertyNonNullJsonElementToken.Value(); + if (globalIdStr == null) + { + return null; + } + else + { + var globalKeyValue = _globalPrefix.ParseInternal(string.Empty, globalIdStr); + + if (string.IsNullOrEmpty(context.ModelNamespace)) + { + throw new InvalidOperationException("global-key properties can not be used on entities inside the global namespace."); + } + + if (globalKeyValue != null && !string.IsNullOrEmpty(globalKeyValue.PartitionId.NamespaceId)) + { + throw new InvalidOperationException("Unable to load property '" + propertyName + "' from entity; cross-namespace reference detected in array element."); + } + + return globalKeyValue; + } + } + + protected override JToken ConvertFromJsonElementValue( + JsonValueConvertToContext context, + string propertyName, + Type propertyClrElementType, + object propertyNonNullClrElementValue) + { + var globalValue = (Key)propertyNonNullClrElementValue; + + if (string.IsNullOrEmpty(context.Model.Key.PartitionId.NamespaceId)) + { + throw new InvalidOperationException("Attempted to use 'global-key' field type in entity that is in the global namespace."); + } + + if (!string.IsNullOrEmpty(globalValue.PartitionId.NamespaceId)) + { + throw new InvalidOperationException("Non-global-namespace data write for key property '" + propertyName + "' in array element."); + } + + return _globalPrefix.CreateInternal(globalValue); + } + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Converters/Value/GlobalKeyValueConverter.cs b/UET/Redpoint.CloudFramework/Repository/Converters/Value/GlobalKeyValueConverter.cs new file mode 100644 index 00000000..880982ce --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Converters/Value/GlobalKeyValueConverter.cs @@ -0,0 +1,148 @@ +namespace Redpoint.CloudFramework.Repository.Converters.Value +{ + using Redpoint.CloudFramework.Models; + using Newtonsoft.Json.Linq; + using System; + using Type = System.Type; + using Google.Protobuf.WellKnownTypes; + using Value = Google.Cloud.Datastore.V1.Value; + using Google.Cloud.Datastore.V1; + using Redpoint.CloudFramework.Prefix; + using Redpoint.CloudFramework.Repository.Converters.Value.Context; + + internal class GlobalKeyValueConverter : IValueConverter + { + private readonly IGlobalPrefix _globalPrefix; + + public GlobalKeyValueConverter(IGlobalPrefix globalPrefix) + { + _globalPrefix = globalPrefix; + } + + public FieldType GetFieldType() + { + return FieldType.GlobalKey; + } + + public bool IsConverterForClrType(Type clrType) + { + return clrType == typeof(Key); + } + + public object? ConvertFromClrDefaultValue( + ClrValueConvertFromContext context, + string propertyName, + Type propertyClrType, + object propertyClrDefaultValue) + { + throw new InvalidOperationException("FieldType.GlobalKey does not support default values. These property must be nullable and omit [Default]."); + } + + public object? ConvertFromDatastoreValue( + DatastoreValueConvertFromContext context, + string propertyName, + Type propertyClrType, + Value propertyNonNullDatastoreValue, + AddConvertFromDelayedLoad addConvertFromDelayedLoad) + { + var globalKeyValue = propertyNonNullDatastoreValue.KeyValue; + + if (string.IsNullOrEmpty(context.ModelNamespace)) + { + throw new InvalidOperationException("global-key properties can not be used on entities inside the global namespace"); + } + if (globalKeyValue != null && !string.IsNullOrEmpty(globalKeyValue.PartitionId.NamespaceId)) + { + throw new InvalidOperationException("Unable to load property '" + propertyName + "' from entity; cross-namespace reference detected"); + } + + return globalKeyValue; + } + + public Value ConvertToDatastoreValue( + DatastoreValueConvertToContext context, + string propertyName, + Type propertyClrType, + object? propertyClrValue, + bool propertyIndexed) + { + var keyNullable = (Key?)propertyClrValue; + if (keyNullable == null) + { + return new Value + { + NullValue = NullValue.NullValue, + ExcludeFromIndexes = !propertyIndexed, + }; + } + else + { + if (context.Model.Key != null && string.IsNullOrEmpty(context.Model.Key.PartitionId.NamespaceId)) + { + throw new InvalidOperationException("Attempted to use 'global-key' in entity that is in the global namespace"); + } + + if (!string.IsNullOrEmpty(keyNullable.PartitionId.NamespaceId)) + { + throw new InvalidOperationException("Potential cross-namespace data write for key property '" + propertyName + "'"); + } + + return new Value + { + KeyValue = keyNullable, + ExcludeFromIndexes = !propertyIndexed, + }; + } + } + + public object? ConvertFromJsonToken( + JsonValueConvertFromContext context, + string propertyName, + Type propertyClrType, + JToken propertyNonNullJsonToken, + AddConvertFromDelayedLoad addConvertFromDelayedLoad) + { + var globalIdStr = propertyNonNullJsonToken.Value(); + if (globalIdStr == null) + { + return null; + } + else + { + var globalKeyValue = _globalPrefix.ParseInternal(string.Empty, globalIdStr); + + if (string.IsNullOrEmpty(context.ModelNamespace)) + { + throw new InvalidOperationException("global-key properties can not be used on entities inside the global namespace"); + } + if (globalKeyValue != null && !string.IsNullOrEmpty(globalKeyValue.PartitionId.NamespaceId)) + { + throw new InvalidOperationException("Unable to load property '" + propertyName + "' from entity; cross-namespace reference detected"); + } + + return globalKeyValue; + } + } + + public JToken ConvertToJsonToken( + JsonValueConvertToContext context, + string propertyName, + Type propertyClrType, + object propertyNonNullClrValue) + { + var globalValue = (Key)propertyNonNullClrValue; + + if (string.IsNullOrEmpty(context.Model.Key.PartitionId.NamespaceId)) + { + throw new InvalidOperationException("Attempted to use 'global-key' in entity that is in the global namespace"); + } + + if (!string.IsNullOrEmpty(globalValue.PartitionId.NamespaceId)) + { + throw new InvalidOperationException("Value for 'global-key' is not a key referencing an entity in the global namespace"); + } + + return _globalPrefix.CreateInternal(globalValue); + } + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Converters/Value/IValueConverter.cs b/UET/Redpoint.CloudFramework/Repository/Converters/Value/IValueConverter.cs new file mode 100644 index 00000000..07d69971 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Converters/Value/IValueConverter.cs @@ -0,0 +1,127 @@ +namespace Redpoint.CloudFramework.Repository.Converters.Value +{ + using Redpoint.CloudFramework.Models; + using Newtonsoft.Json.Linq; + using Redpoint.StringEnum; + using Type = System.Type; + using Value = Google.Cloud.Datastore.V1.Value; + using Redpoint.CloudFramework.Repository.Converters.Value.Context; + + /// + /// Represents a converter that can convert CLR values into Datastore and JSON values and vice versa. + /// + internal interface IValueConverter + { + /// + /// Returns the that this converter handles. + /// + /// + FieldType GetFieldType(); + + /// + /// Returns whether this converter should handle the specified CLR type. The property must already have set to the result of for this converter to be used. + /// + /// The CLR type of the property on the .NET model. + /// If true, this converter should handle this property. + bool IsConverterForClrType(Type clrType); + + /// + /// Converts a CLR value from a [Default] attribute into the real CLR value. Non-constant + /// expressions can not be provided to the [Default] attribute, so this allows the default + /// constant value to be converted to the desired type for types such as . + /// + /// + /// This method does not receive null CLR values, as it is not possible to set null as the default via a [Default] attribute (in this case, the [Default] attribute should be omitted entirely). + /// + /// The conversion context. + /// The name of the property in .NET and Datastore. + /// The CLR (.NET) type of the property. + /// The default value provided to the [Default] attribute. + /// The real CLR value to use as the default for model properties when Datastore or the JSON cache contains a null value. + object? ConvertFromClrDefaultValue( + ClrValueConvertFromContext context, + string propertyName, + Type propertyClrType, + object propertyClrDefaultValue); + + /// + /// Converts a Datastore value into a CLR value. + /// + /// + /// This method does not receive Datastore values that have + /// set to true, as these are automatically converted to the null CLR value. + /// + /// The conversion context. + /// The name of the property in .NET and Datastore. + /// The CLR (.NET) type of the property. + /// The non-null Datastore value to convert. + /// If this property needs to be delay loaded, this callback allows this converter to register a delay load callback for it. + /// The CLR value for the property. + object? ConvertFromDatastoreValue( + DatastoreValueConvertFromContext context, + string propertyName, + Type propertyClrType, + Value propertyNonNullDatastoreValue, + AddConvertFromDelayedLoad addConvertFromDelayedLoad); + + /// + /// Convert a CLR value into a Datastore value. + /// + /// + /// This method may receive null CLR values, as some Datastore types require setting + /// additional entity properties even for null values of the given . + /// + /// The conversion context. + /// The name of the property in .NET and Datastore. + /// The CLR (.NET) type of the property. + /// The possibly null CLR (.NET) value to convert. + /// If true, this property is indexed on the entity. + /// The Datastore value to store in this property. + Value ConvertToDatastoreValue( + DatastoreValueConvertToContext context, + string propertyName, + Type propertyClrType, + object? propertyClrValue, + bool propertyIndexed); + + /// + /// Converts a JSON value into a CLR value. + /// + /// + /// This method does not receive JSON values that are null (), + /// as these are automatically converted to the null CLR value. + /// + /// The conversion context. + /// The name of the property in .NET and Datastore. + /// The CLR (.NET) type of the property. + /// The non-null JSON token to convert. + /// If this property needs to be delay loaded, this callback allows this converter to register a delay load callback for it. + /// The CLR value for the property. + object? ConvertFromJsonToken( + JsonValueConvertFromContext context, + string propertyName, + Type propertyClrType, + JToken propertyNonNullJsonToken, + AddConvertFromDelayedLoad addConvertFromDelayedLoad); + + /// + /// Converts a CLR value into a JSON value. + /// + /// + /// Unlike , + /// this method can not receive null CLR values as they are automatically converted to the + /// result of . Value converters do not get the opportunity to set + /// additional fields in the JSON cache for null values. + /// + /// The conversion context. + /// The name of the property in .NET and Datastore. + /// The CLR (.NET) type of the property. + /// The non-null CLR (.NET) value to convert. + /// The JSON token to store in the cache for this property. + JToken ConvertToJsonToken( + JsonValueConvertToContext context, + string propertyName, + Type propertyClrType, + object propertyNonNullClrValue); + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Converters/Value/IValueConverterProvider.cs b/UET/Redpoint.CloudFramework/Repository/Converters/Value/IValueConverterProvider.cs new file mode 100644 index 00000000..2aa3e4d3 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Converters/Value/IValueConverterProvider.cs @@ -0,0 +1,12 @@ +namespace Redpoint.CloudFramework.Repository.Converters.Value +{ + using Redpoint.CloudFramework.Models; + using System; + + internal interface IValueConverterProvider + { + IValueConverter GetConverter( + FieldType fieldType, + Type propertyClrType); + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Converters/Value/IntegerValueConverter.cs b/UET/Redpoint.CloudFramework/Repository/Converters/Value/IntegerValueConverter.cs new file mode 100644 index 00000000..8c6db9de --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Converters/Value/IntegerValueConverter.cs @@ -0,0 +1,87 @@ +namespace Redpoint.CloudFramework.Repository.Converters.Value +{ + using Redpoint.CloudFramework.Models; + using Newtonsoft.Json.Linq; + using Type = System.Type; + using Google.Protobuf.WellKnownTypes; + using Value = Google.Cloud.Datastore.V1.Value; + using Redpoint.CloudFramework.Repository.Converters.Value.Context; + + internal class IntegerValueConverter : IValueConverter + { + public FieldType GetFieldType() + { + return FieldType.Integer; + } + + public bool IsConverterForClrType(Type clrType) + { + return clrType == typeof(long) || + clrType == typeof(long?); + } + + public object? ConvertFromClrDefaultValue( + ClrValueConvertFromContext context, + string propertyName, + Type propertyClrType, + object propertyClrDefaultValue) + { + return propertyClrDefaultValue; + } + + public object? ConvertFromDatastoreValue( + DatastoreValueConvertFromContext context, + string propertyName, + Type propertyClrType, + Value propertyNonNullDatastoreValue, + AddConvertFromDelayedLoad addConvertFromDelayedLoad) + { + return propertyNonNullDatastoreValue.IntegerValue; + } + + public Value ConvertToDatastoreValue( + DatastoreValueConvertToContext context, + string propertyName, + Type propertyClrType, + object? propertyClrValue, + bool propertyIndexed) + { + var nullable = (long?)propertyClrValue; + if (!nullable.HasValue) + { + return new Value + { + NullValue = NullValue.NullValue, + ExcludeFromIndexes = !propertyIndexed, + }; + } + else + { + return new Value + { + IntegerValue = nullable.Value, + ExcludeFromIndexes = !propertyIndexed, + }; + } + } + + public object? ConvertFromJsonToken( + JsonValueConvertFromContext context, + string propertyName, + Type propertyClrType, + JToken propertyNonNullJsonToken, + AddConvertFromDelayedLoad addConvertFromDelayedLoad) + { + return propertyNonNullJsonToken.Value(); + } + + public JToken ConvertToJsonToken( + JsonValueConvertToContext context, + string propertyName, + Type propertyClrType, + object propertyNonNullClrValue) + { + return new JValue((long)propertyNonNullClrValue); + } + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Converters/Value/JsonValueConverter.cs b/UET/Redpoint.CloudFramework/Repository/Converters/Value/JsonValueConverter.cs new file mode 100644 index 00000000..94fa0433 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Converters/Value/JsonValueConverter.cs @@ -0,0 +1,94 @@ +namespace Redpoint.CloudFramework.Repository.Converters.Value +{ + using Redpoint.CloudFramework.Models; + using Newtonsoft.Json.Linq; + using System; + using Type = System.Type; + using Value = Google.Cloud.Datastore.V1.Value; + using Newtonsoft.Json; + using Redpoint.CloudFramework.Repository.Converters.JsonHelpers; + using Redpoint.CloudFramework.Repository.Converters.Value.Context; + + internal class JsonValueConverter : IValueConverter + { + public FieldType GetFieldType() + { + return FieldType.Json; + } + + public bool IsConverterForClrType(Type clrType) + { + return true; + } + + public object? ConvertFromClrDefaultValue( + ClrValueConvertFromContext context, + string propertyName, + Type propertyClrType, + object propertyClrDefaultValue) + { + throw new InvalidOperationException("FieldType.Json does not support default values. These property must be nullable and omit [Default]."); + } + + public object? ConvertFromDatastoreValue( + DatastoreValueConvertFromContext context, + string propertyName, + Type propertyClrType, + Value propertyNonNullDatastoreValue, + AddConvertFromDelayedLoad addConvertFromDelayedLoad) + { + var rawJson = propertyNonNullDatastoreValue.StringValue; + if (rawJson == null) + { + // @note: I don't think this is possible; a null value would be NullValue instead. + return null; + } + else + { + return JsonConvert.DeserializeObject(rawJson, propertyClrType, new VersionedJsonConverter(), new NodaTimeInstantJsonConverter()); + } + } + + public Value ConvertToDatastoreValue( + DatastoreValueConvertToContext context, + string propertyName, + Type propertyClrType, + object? propertyClrValue, + bool propertyIndexed) + { + return new Value + { + StringValue = JsonConvert.SerializeObject(propertyClrValue, new VersionedJsonConverter(), new NodaTimeInstantJsonConverter()), + ExcludeFromIndexes = true /* no meaningful way to search this data in Datastore */ + }; + } + + public object? ConvertFromJsonToken( + JsonValueConvertFromContext context, + string propertyName, + Type propertyClrType, + JToken propertyNonNullJsonToken, + AddConvertFromDelayedLoad addConvertFromDelayedLoad) + { + string? rawJson = propertyNonNullJsonToken.Value(); + if (rawJson == null) + { + return null; + } + else + { + return JsonConvert.DeserializeObject(rawJson, propertyClrType, new +VersionedJsonConverter(), new NodaTimeInstantJsonConverter()); + } + } + + public JToken ConvertToJsonToken( + JsonValueConvertToContext context, + string propertyName, + Type propertyClrType, + object propertyNonNullClrValue) + { + return JsonConvert.SerializeObject(propertyNonNullClrValue, new VersionedJsonConverter(), new NodaTimeInstantJsonConverter()); + } + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Converters/Value/KeyArrayValueConverter.cs b/UET/Redpoint.CloudFramework/Repository/Converters/Value/KeyArrayValueConverter.cs new file mode 100644 index 00000000..1b73dfba --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Converters/Value/KeyArrayValueConverter.cs @@ -0,0 +1,132 @@ +namespace Redpoint.CloudFramework.Repository.Converters.Value +{ + using Redpoint.CloudFramework.Models; + using Newtonsoft.Json.Linq; + using Type = System.Type; + using Value = Google.Cloud.Datastore.V1.Value; + using Redpoint.CloudFramework.Repository.Converters.Value.Context; + using System.Collections; + using System.Collections.Generic; + using Google.Cloud.Datastore.V1; + using Redpoint.CloudFramework.Prefix; + + internal class KeyArrayValueConverter : BaseArrayValueConverter + { + private readonly IGlobalPrefix _globalPrefix; + + public KeyArrayValueConverter(IGlobalPrefix globalPrefix) + { + _globalPrefix = globalPrefix; + } + + public override FieldType GetFieldType() + { + return FieldType.KeyArray; + } + + public override bool IsConverterForClrType(Type propertyClrArrayType) + { + return propertyClrArrayType == typeof(Key[]) || + propertyClrArrayType == typeof(IReadOnlyList) || + propertyClrArrayType == typeof(List); + } + + protected override Type GetElementType(Type propertyClrArrayType) + { + return typeof(Key); + } + + protected override object ConstructClrValueFromArrayList( + Type propertyClrArrayType, + ArrayList arrayList) + { + return arrayList.Cast().ToArray(); + } + + protected override object? ConvertFromClrDefaultElementValue( + ClrValueConvertFromContext context, + string propertyName, + Type propertyClrElementType, + object? propertyNonNullClrDefaultElementValue) + { + throw new InvalidOperationException("FieldType.KeyArray does not support default values. These property must be nullable and omit [Default]."); + } + + protected override object? ConvertFromDatastoreElementValue( + DatastoreValueConvertFromContext context, + string propertyName, + Type propertyClrElementType, + Value propertyNonNullDatastoreElementValue) + { + var keyValue = propertyNonNullDatastoreElementValue.KeyValue; + + if (keyValue != null && keyValue.PartitionId.NamespaceId != context.ModelNamespace) + { + throw new InvalidOperationException("Unable to load property '" + propertyName + "' from entity; cross-namespace reference detected in array element."); + } + + return keyValue; + } + + protected override Value ConvertToDatastoreElementValue( + DatastoreValueConvertToContext context, + string propertyName, + Type propertyClrElementType, + object propertyNonNullClrElementValue, + bool propertyIndexed) + { + var key = (Key)propertyNonNullClrElementValue; + + if (key.PartitionId.NamespaceId != context.ModelNamespace) + { + throw new InvalidOperationException("Cross-namespace data write for key property '" + propertyName + "' in array element."); + } + + return new Value + { + KeyValue = key, + ExcludeFromIndexes = !propertyIndexed, + }; + } + + protected override object? ConvertFromJsonElementToken( + JsonValueConvertFromContext context, + string propertyName, + Type propertyClrElementType, + JToken propertyNonNullJsonElementToken) + { + var idStr = propertyNonNullJsonElementToken.Value(); + if (idStr == null) + { + return null; + } + else + { + var keyValue = _globalPrefix.ParseInternal(context.ModelNamespace, idStr); + + if (keyValue != null && keyValue.PartitionId.NamespaceId != context.ModelNamespace) + { + throw new InvalidOperationException("Unable to load property '" + propertyName + "' from entity; cross-namespace reference detected in array element."); + } + + return keyValue; + } + } + + protected override JToken ConvertFromJsonElementValue( + JsonValueConvertToContext context, + string propertyName, + Type propertyClrElementType, + object propertyNonNullClrElementValue) + { + var keyValue = (Key)propertyNonNullClrElementValue; + + if (keyValue.PartitionId.NamespaceId != context.ModelNamespace) + { + throw new InvalidOperationException("Cross-namespace data write for key property '" + propertyName + "' in array element."); + } + + return _globalPrefix.CreateInternal(keyValue); + } + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Converters/Value/KeyValueConverter.cs b/UET/Redpoint.CloudFramework/Repository/Converters/Value/KeyValueConverter.cs new file mode 100644 index 00000000..deaa9b96 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Converters/Value/KeyValueConverter.cs @@ -0,0 +1,130 @@ +namespace Redpoint.CloudFramework.Repository.Converters.Value +{ + using Redpoint.CloudFramework.Models; + using Newtonsoft.Json.Linq; + using System; + using Type = System.Type; + using Google.Protobuf.WellKnownTypes; + using Value = Google.Cloud.Datastore.V1.Value; + using Google.Cloud.Datastore.V1; + using Redpoint.CloudFramework.Prefix; + using Redpoint.CloudFramework.Repository.Converters.Value.Context; + + internal class KeyValueConverter : IValueConverter + { + private readonly IGlobalPrefix _globalPrefix; + + public KeyValueConverter(IGlobalPrefix globalPrefix) + { + _globalPrefix = globalPrefix; + } + + public FieldType GetFieldType() + { + return FieldType.Key; + } + + public bool IsConverterForClrType(Type clrType) + { + return clrType == typeof(Key); + } + + public object? ConvertFromClrDefaultValue( + ClrValueConvertFromContext context, + string propertyName, + Type propertyClrType, + object propertyClrDefaultValue) + { + throw new InvalidOperationException("FieldType.Key does not support default values. These property must be nullable and omit [Default]."); + } + + public object? ConvertFromDatastoreValue( + DatastoreValueConvertFromContext context, + string propertyName, + Type propertyClrType, + Value propertyNonNullDatastoreValue, + AddConvertFromDelayedLoad addConvertFromDelayedLoad) + { + var keyValue = propertyNonNullDatastoreValue.KeyValue; + + if (keyValue != null && keyValue.PartitionId.NamespaceId != context.ModelNamespace) + { + throw new InvalidOperationException("Unable to load property '" + propertyName + "' from entity; cross-namespace reference detected"); + } + + return keyValue; + } + + public Value ConvertToDatastoreValue( + DatastoreValueConvertToContext context, + string propertyName, + Type propertyClrType, + object? propertyClrValue, + bool propertyIndexed) + { + var keyNullable = (Key?)propertyClrValue; + if (keyNullable == null) + { + return new Value + { + NullValue = NullValue.NullValue, + ExcludeFromIndexes = !propertyIndexed, + }; + } + else + { + if (keyNullable.PartitionId.NamespaceId != context.ModelNamespace) + { + throw new InvalidOperationException("Potential cross-namespace data write for key property '" + propertyName + "'"); + } + + return new Value + { + KeyValue = keyNullable, + ExcludeFromIndexes = !propertyIndexed, + }; + } + } + + public object? ConvertFromJsonToken( + JsonValueConvertFromContext context, + string propertyName, + Type propertyClrType, + JToken propertyNonNullJsonToken, + AddConvertFromDelayedLoad addConvertFromDelayedLoad) + { + var idStr = propertyNonNullJsonToken.Value(); + if (idStr == null) + { + return null; + } + else + { + var keyValue = _globalPrefix.ParseInternal(context.ModelNamespace, idStr); + + if (keyValue != null && keyValue.PartitionId.NamespaceId != context.ModelNamespace) + { + throw new InvalidOperationException("Unable to load property '" + propertyName + "' from entity; cross-namespace reference detected"); + } + + return keyValue; + } + } + + public JToken ConvertToJsonToken( + JsonValueConvertToContext context, + string propertyName, + Type propertyClrType, + object propertyNonNullClrValue) + { + var keyValue = (Key)propertyNonNullClrValue; + + if (keyValue.PartitionId.NamespaceId != context.ModelNamespace) + { + throw new InvalidOperationException("Attempted to store cross-namespace key reference in 'key' property"); + } + + return _globalPrefix.CreateInternal(keyValue); + } + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Converters/Value/LocalKeyValueConverter.cs b/UET/Redpoint.CloudFramework/Repository/Converters/Value/LocalKeyValueConverter.cs new file mode 100644 index 00000000..0ea884cd --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Converters/Value/LocalKeyValueConverter.cs @@ -0,0 +1,169 @@ +namespace Redpoint.CloudFramework.Repository.Converters.Value +{ + using Redpoint.CloudFramework.Models; + using Newtonsoft.Json.Linq; + using System; + using Type = System.Type; + using Google.Protobuf.WellKnownTypes; + using Value = Google.Cloud.Datastore.V1.Value; + using Google.Cloud.Datastore.V1; + using Redpoint.CloudFramework.Prefix; + using Redpoint.CloudFramework.Repository.Converters.Value.Context; + + internal class LocalKeyValueConverter : IValueConverter + { + private readonly IGlobalPrefix _globalPrefix; + + public LocalKeyValueConverter(IGlobalPrefix globalPrefix) + { + _globalPrefix = globalPrefix; + } + + public FieldType GetFieldType() + { + return FieldType.LocalKey; + } + + public bool IsConverterForClrType(Type clrType) + { + return clrType == typeof(Key); + } + + public object? ConvertFromClrDefaultValue( + ClrValueConvertFromContext context, + string propertyName, + Type propertyClrType, + object propertyClrDefaultValue) + { + throw new InvalidOperationException("FieldType.LocalKey does not support default values. These property must be nullable and omit [Default]."); + } + + public object? ConvertFromDatastoreValue( + DatastoreValueConvertFromContext context, + string propertyName, + Type propertyClrType, + Value propertyNonNullDatastoreValue, + AddConvertFromDelayedLoad addConvertFromDelayedLoad) + { + var localKeyValue = propertyNonNullDatastoreValue.KeyValue; + + if (!string.IsNullOrEmpty(context.ModelNamespace)) + { + throw new InvalidOperationException("local-key properties can not be used on entities outside the global namespace"); + } + + // We can't assign yet because we need to check that the loaded namespace value is + // valid for GetDatastoreNamespaceForLocalKeys, but we can't use that method + // until everything else has been loaded. + addConvertFromDelayedLoad((@localNamespace) => + { + if (localKeyValue != null && localKeyValue.PartitionId.NamespaceId != localNamespace) + { + throw new InvalidOperationException("Unable to load property '" + propertyName + "' from entity; cross-namespace reference detected"); + } + + return localKeyValue; + }); + + return null; + } + + public Value ConvertToDatastoreValue( + DatastoreValueConvertToContext context, + string propertyName, + Type propertyClrType, + object? propertyClrValue, + bool propertyIndexed) + { + var keyNullable = (Key?)propertyClrValue; + if (keyNullable == null) + { + return new Value + { + NullValue = NullValue.NullValue, + ExcludeFromIndexes = !propertyIndexed, + }; + } + else + { + if (context.Model.Key != null && !string.IsNullOrEmpty(context.Model.Key.PartitionId.NamespaceId)) + { + throw new InvalidOperationException("Attempted to use 'local-key' in entity that is not in the global namespace"); + } + + if (keyNullable.PartitionId.NamespaceId != context.Model.GetDatastoreNamespaceForLocalKeys()) + { + throw new InvalidOperationException( + "Potential cross-namespace data write for key property '" + propertyName + + "' (got '" + keyNullable.PartitionId.NamespaceId + "', expected '" + context.Model.GetDatastoreNamespaceForLocalKeys() + "')" + ); + } + + return new Value + { + KeyValue = keyNullable, + ExcludeFromIndexes = !propertyIndexed, + }; + } + } + + public object? ConvertFromJsonToken( + JsonValueConvertFromContext context, + string propertyName, + Type propertyClrType, + JToken propertyJsonToken, + AddConvertFromDelayedLoad addConvertFromDelayedLoad) + { + var localIdStr = propertyJsonToken.Value(); + if (localIdStr == null) + { + return null; + } + else + { + var localKeyValue = _globalPrefix.ParseInternal(context.ModelNamespace, localIdStr); + + if (!string.IsNullOrEmpty(context.ModelNamespace)) + { + throw new InvalidOperationException("local-key properties can not be used on entities outside the global namespace"); + } + + // We can't call GetDatastoreNamespaceForLocalKeys on the model until we've set all the + // other properties, since determining the Datastore namespace for local keys might + // rely on other properties. + addConvertFromDelayedLoad((localNamespace) => + { + if (localKeyValue != null && localKeyValue.PartitionId.NamespaceId != localNamespace) + { + throw new InvalidOperationException("Unable to load property '" + propertyName + "' from entity; cross-namespace reference detected"); + } + + return localKeyValue; + }); + + return null; + } + } + + public JToken ConvertToJsonToken( + JsonValueConvertToContext context, + string propertyName, + Type propertyClrType, + object propertyNonNullClrValue) + { + var localValue = (Key)propertyNonNullClrValue; + + if (!string.IsNullOrEmpty(context.Model.Key.PartitionId.NamespaceId)) + { + throw new InvalidOperationException("Attempted to use 'local-key' in entity that is not in the global namespace"); + } + + if (localValue.PartitionId.NamespaceId != context.Model.GetDatastoreNamespaceForLocalKeys()) + { + throw new InvalidOperationException("Value for 'local-key' is not a key referencing an entity in the expected non-global namespace"); + } + + return _globalPrefix.CreateInternal(localValue); + } + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Converters/Value/StringArrayValueConverter.cs b/UET/Redpoint.CloudFramework/Repository/Converters/Value/StringArrayValueConverter.cs new file mode 100644 index 00000000..861f3665 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Converters/Value/StringArrayValueConverter.cs @@ -0,0 +1,87 @@ +namespace Redpoint.CloudFramework.Repository.Converters.Value +{ + using Redpoint.CloudFramework.Models; + using Newtonsoft.Json.Linq; + using Type = System.Type; + using Value = Google.Cloud.Datastore.V1.Value; + using Redpoint.CloudFramework.Repository.Converters.Value.Context; + using System.Collections; + using System.Collections.Generic; + + internal class StringArrayValueConverter : BaseArrayValueConverter + { + public override FieldType GetFieldType() + { + return FieldType.StringArray; + } + + public override bool IsConverterForClrType(Type propertyClrArrayType) + { + return propertyClrArrayType == typeof(string[]) || + propertyClrArrayType == typeof(IReadOnlyList) || + propertyClrArrayType == typeof(List); + } + + protected override Type GetElementType(Type propertyClrArrayType) + { + return typeof(string); + } + + protected override object ConstructClrValueFromArrayList( + Type propertyClrArrayType, + ArrayList arrayList) + { + return arrayList.Cast().ToArray(); + } + + protected override object? ConvertFromClrDefaultElementValue( + ClrValueConvertFromContext context, + string propertyName, + Type propertyClrElementType, + object propertyNonNullClrDefaultElementValue) + { + return propertyNonNullClrDefaultElementValue; + } + + protected override object? ConvertFromDatastoreElementValue( + DatastoreValueConvertFromContext context, + string propertyName, + Type propertyClrElementType, + Value propertyNonNullDatastoreElementValue) + { + return propertyNonNullDatastoreElementValue.StringValue; + } + + protected override Value ConvertToDatastoreElementValue( + DatastoreValueConvertToContext context, + string propertyName, + Type propertyClrElementType, + object propertyNonNullClrElementValue, + bool propertyIndexed) + { + return new Value + { + StringValue = (string)propertyNonNullClrElementValue, + ExcludeFromIndexes = !propertyIndexed + }; + } + + protected override object? ConvertFromJsonElementToken( + JsonValueConvertFromContext context, + string propertyName, + Type propertyClrElementType, + JToken propertyNonNullJsonElementToken) + { + return propertyNonNullJsonElementToken.Value(); + } + + protected override JToken ConvertFromJsonElementValue( + JsonValueConvertToContext context, + string propertyName, + Type propertyClrElementType, + object propertyNonNullClrElementValue) + { + return new JValue(propertyNonNullClrElementValue); + } + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Converters/Value/StringEnumArrayValueConverter.cs b/UET/Redpoint.CloudFramework/Repository/Converters/Value/StringEnumArrayValueConverter.cs new file mode 100644 index 00000000..a09c48de --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Converters/Value/StringEnumArrayValueConverter.cs @@ -0,0 +1,170 @@ +namespace Redpoint.CloudFramework.Repository.Converters.Value +{ + using Redpoint.CloudFramework.Models; + using Newtonsoft.Json.Linq; + using Type = System.Type; + using Value = Google.Cloud.Datastore.V1.Value; + using Redpoint.CloudFramework.Repository.Converters.Value.Context; + using System.Collections; + using System.Collections.Generic; + using Redpoint.StringEnum; + + internal class StringEnumArrayValueConverter : BaseArrayValueConverter + { + public override FieldType GetFieldType() + { + return FieldType.StringArray; + } + + public override bool IsConverterForClrType(Type propertyClrArrayType) + { + if (propertyClrArrayType.IsArray) + { + return DynamicStringEnumValue.IsStringEnumValueType(propertyClrArrayType.GetElementType()!); + } + else if (propertyClrArrayType.IsGenericType && + propertyClrArrayType.GetGenericTypeDefinition() == typeof(IReadOnlyList<>)) + { + return DynamicStringEnumValue.IsStringEnumValueType(propertyClrArrayType.GetGenericArguments()[0]); + } + else if (propertyClrArrayType.IsGenericType && + propertyClrArrayType.GetGenericTypeDefinition() == typeof(List<>)) + { + return DynamicStringEnumValue.IsStringEnumValueType(propertyClrArrayType.GetGenericArguments()[0]); + } + return false; + } + + protected override Type GetElementType(Type propertyClrArrayType) + { + if (propertyClrArrayType.IsArray) + { + return propertyClrArrayType.GetElementType()!; + } + else if (propertyClrArrayType.IsGenericType && + propertyClrArrayType.GetGenericTypeDefinition() == typeof(IReadOnlyList<>)) + { + return propertyClrArrayType.GetGenericArguments()[0]; + } + else if (propertyClrArrayType.IsGenericType && + propertyClrArrayType.GetGenericTypeDefinition() == typeof(List<>)) + { + return propertyClrArrayType.GetGenericArguments()[0]; + } + else + { + throw new NotSupportedException($"Can't support {propertyClrArrayType.FullName} in StringEnumArrayValueConverter.GetElementType"); + } + } + + protected override object ConstructClrValueFromArrayList( + Type propertyClrArrayType, + ArrayList arrayList) + { + if (propertyClrArrayType.IsArray) + { + return DynamicStringEnumValue.ConstructArrayFromValues( + propertyClrArrayType.GetGenericArguments()[0], + arrayList); + } + else if (propertyClrArrayType.IsGenericType && + propertyClrArrayType.GetGenericTypeDefinition() == typeof(IReadOnlyList<>)) + { + return DynamicStringEnumValue.ConstructListFromValues( + propertyClrArrayType.GetGenericArguments()[0], + arrayList); + } + else if (propertyClrArrayType.IsGenericType && + propertyClrArrayType.GetGenericTypeDefinition() == typeof(List<>)) + { + return DynamicStringEnumValue.ConstructListFromValues( + propertyClrArrayType.GetGenericArguments()[0], + arrayList); + } + else + { + throw new NotSupportedException($"Can't support {propertyClrArrayType.FullName} in StringEnumArrayValueConverter.ConstructClrValueFromArrayList"); + } + } + + protected override object? ConvertFromClrDefaultElementValue( + ClrValueConvertFromContext context, + string propertyName, + Type propertyClrElementType, + object propertyNonNullClrDefaultElementValue) + { + if (!DynamicStringEnumValue.TryParse(propertyClrElementType, propertyNonNullClrDefaultElementValue.ToString()!, out var parsedValue)) + { + throw new InvalidOperationException("Invalid default defined for property: " + propertyName + " (is not a permitted value for the StringEnum)"); + } + return parsedValue; + } + + protected override object? ConvertFromDatastoreElementValue( + DatastoreValueConvertFromContext context, + string propertyName, + Type propertyClrElementType, + Value propertyNonNullDatastoreElementValue) + { + var rawValue = propertyNonNullDatastoreElementValue.StringValue; + if (rawValue == null) + { + return null; + } + else + { + if (!DynamicStringEnumValue.TryParse(propertyClrElementType, rawValue, out var parsedValue)) + { + // If we can't parse, ensure value is null. + parsedValue = null; + } + return parsedValue; + } + } + + protected override Value ConvertToDatastoreElementValue( + DatastoreValueConvertToContext context, + string propertyName, + Type propertyClrElementType, + object propertyNonNullClrElementValue, + bool propertyIndexed) + { + return new Value + { + StringValue = propertyNonNullClrElementValue.ToString() ?? string.Empty, + ExcludeFromIndexes = !propertyIndexed || (propertyNonNullClrElementValue.ToString() ?? string.Empty).Length > 700, + }; + } + + protected override object? ConvertFromJsonElementToken( + JsonValueConvertFromContext context, + string propertyName, + Type propertyClrElementType, + JToken propertyNonNullJsonElementToken) + { + var rawValue = propertyNonNullJsonElementToken.Value(); + if (rawValue == null) + { + return null; + } + else + { + if (!DynamicStringEnumValue.TryParse(propertyClrElementType, rawValue, out var parsedValue)) + { + // If we can't parse, ensure value is null. + parsedValue = null; + } + return parsedValue; + } + } + + protected override JToken ConvertFromJsonElementValue( + JsonValueConvertToContext context, + string propertyName, + Type propertyClrElementType, + object propertyNonNullClrElementValue) + { + return new JValue(propertyNonNullClrElementValue.ToString()); + } + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Converters/Value/StringEnumSetValueConverter.cs b/UET/Redpoint.CloudFramework/Repository/Converters/Value/StringEnumSetValueConverter.cs new file mode 100644 index 00000000..970eee1d --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Converters/Value/StringEnumSetValueConverter.cs @@ -0,0 +1,156 @@ +namespace Redpoint.CloudFramework.Repository.Converters.Value +{ + using Redpoint.CloudFramework.Models; + using Newtonsoft.Json.Linq; + using Type = System.Type; + using Value = Google.Cloud.Datastore.V1.Value; + using Redpoint.CloudFramework.Repository.Converters.Value.Context; + using System.Collections; + using System.Collections.Generic; + using Redpoint.StringEnum; + + internal class StringEnumSetValueConverter : BaseArrayValueConverter + { + public override FieldType GetFieldType() + { + return FieldType.StringArray; + } + + public override bool IsConverterForClrType(Type propertyClrArrayType) + { + if (propertyClrArrayType.IsGenericType && + propertyClrArrayType.GetGenericTypeDefinition() == typeof(IReadOnlySet<>)) + { + return DynamicStringEnumValue.IsStringEnumValueType(propertyClrArrayType.GetGenericArguments()[0]); + } + else if (propertyClrArrayType.IsGenericType && + propertyClrArrayType.GetGenericTypeDefinition() == typeof(HashSet<>)) + { + return DynamicStringEnumValue.IsStringEnumValueType(propertyClrArrayType.GetGenericArguments()[0]); + } + return false; + } + + protected override Type GetElementType(Type propertyClrArrayType) + { + if (propertyClrArrayType.IsGenericType && + propertyClrArrayType.GetGenericTypeDefinition() == typeof(IReadOnlySet<>)) + { + return propertyClrArrayType.GetGenericArguments()[0]; + } + else if (propertyClrArrayType.IsGenericType && + propertyClrArrayType.GetGenericTypeDefinition() == typeof(HashSet<>)) + { + return propertyClrArrayType.GetGenericArguments()[0]; + } + else + { + throw new NotSupportedException($"Can't support {propertyClrArrayType.FullName} in StringEnumSetValueConverter.GetElementType"); + } + } + + protected override object ConstructClrValueFromArrayList( + Type propertyClrArrayType, + ArrayList arrayList) + { + if (propertyClrArrayType.IsGenericType && + propertyClrArrayType.GetGenericTypeDefinition() == typeof(IReadOnlySet<>)) + { + return DynamicStringEnumValue.ConstructSetFromValues( + propertyClrArrayType.GetGenericArguments()[0], + arrayList); + } + else if (propertyClrArrayType.IsGenericType && + propertyClrArrayType.GetGenericTypeDefinition() == typeof(HashSet<>)) + { + return DynamicStringEnumValue.ConstructSetFromValues( + propertyClrArrayType.GetGenericArguments()[0], + arrayList); + } + else + { + throw new NotSupportedException($"Can't support {propertyClrArrayType.FullName} in StringEnumSetValueConverter.ConstructClrValueFromArrayList"); + } + } + + protected override object? ConvertFromClrDefaultElementValue( + ClrValueConvertFromContext context, + string propertyName, + Type propertyClrElementType, + object propertyNonNullClrDefaultElementValue) + { + if (!DynamicStringEnumValue.TryParse(propertyClrElementType, propertyNonNullClrDefaultElementValue.ToString()!, out var parsedValue)) + { + throw new InvalidOperationException("Invalid default defined for property: " + propertyName + " (is not a permitted value for the StringEnum)"); + } + return parsedValue; + } + + protected override object? ConvertFromDatastoreElementValue( + DatastoreValueConvertFromContext context, + string propertyName, + Type propertyClrElementType, + Value propertyNonNullDatastoreElementValue) + { + var rawValue = propertyNonNullDatastoreElementValue.StringValue; + if (rawValue == null) + { + return null; + } + else + { + if (!DynamicStringEnumValue.TryParse(propertyClrElementType, rawValue, out var parsedValue)) + { + // If we can't parse, ensure value is null. + parsedValue = null; + } + return parsedValue; + } + } + + protected override Value ConvertToDatastoreElementValue( + DatastoreValueConvertToContext context, + string propertyName, + Type propertyClrElementType, + object propertyNonNullClrElementValue, + bool propertyIndexed) + { + return new Value + { + StringValue = propertyNonNullClrElementValue.ToString() ?? string.Empty, + ExcludeFromIndexes = !propertyIndexed || (propertyNonNullClrElementValue.ToString() ?? string.Empty).Length > 700, + }; + } + + protected override object? ConvertFromJsonElementToken( + JsonValueConvertFromContext context, + string propertyName, + Type propertyClrElementType, + JToken propertyNonNullJsonElementToken) + { + var rawValue = propertyNonNullJsonElementToken.Value(); + if (rawValue == null) + { + return null; + } + else + { + if (!DynamicStringEnumValue.TryParse(propertyClrElementType, rawValue, out var parsedValue)) + { + // If we can't parse, ensure value is null. + parsedValue = null; + } + return parsedValue; + } + } + + protected override JToken ConvertFromJsonElementValue( + JsonValueConvertToContext context, + string propertyName, + Type propertyClrElementType, + object propertyNonNullClrElementValue) + { + return new JValue(propertyNonNullClrElementValue.ToString()); + } + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Converters/Value/StringEnumValueConverter.cs b/UET/Redpoint.CloudFramework/Repository/Converters/Value/StringEnumValueConverter.cs new file mode 100644 index 00000000..f79afd33 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Converters/Value/StringEnumValueConverter.cs @@ -0,0 +1,117 @@ +namespace Redpoint.CloudFramework.Repository.Converters.Value +{ + using Redpoint.CloudFramework.Models; + using Newtonsoft.Json.Linq; + using Redpoint.StringEnum; + using System; + using Type = System.Type; + using Google.Protobuf.WellKnownTypes; + using Value = Google.Cloud.Datastore.V1.Value; + using Redpoint.CloudFramework.Repository.Converters.Value.Context; + + internal class StringEnumValueConverter : IValueConverter + { + public FieldType GetFieldType() + { + return FieldType.String; + } + + public bool IsConverterForClrType(Type clrType) + { + return DynamicStringEnumValue.IsStringEnumValueType(clrType); + } + + public object? ConvertFromClrDefaultValue( + ClrValueConvertFromContext context, + string propertyName, + Type propertyClrType, + object propertyClrDefaultValue) + { + if (!DynamicStringEnumValue.TryParse(propertyClrType, propertyClrDefaultValue.ToString()!, out var parsedValue)) + { + throw new InvalidOperationException("Invalid default defined for property: " + propertyName + " (is not a permitted value for the StringEnum)"); + } + return parsedValue; + } + + public object? ConvertFromDatastoreValue( + DatastoreValueConvertFromContext context, + string propertyName, + Type propertyClrType, + Value propertyNonNullDatastoreValue, + AddConvertFromDelayedLoad addConvertFromDelayedLoad) + { + var rawValue = propertyNonNullDatastoreValue.StringValue; + if (rawValue == null) + { + return null; + } + else + { + if (!DynamicStringEnumValue.TryParse(propertyClrType, rawValue, out var parsedValue)) + { + // If we can't parse, ensure value is null. + parsedValue = null; + } + return parsedValue; + } + } + + public Value ConvertToDatastoreValue( + DatastoreValueConvertToContext context, + string propertyName, + Type propertyClrType, + object? propertyClrValue, + bool propertyIndexed) + { + if (propertyClrValue == null) + { + return new Value + { + NullValue = NullValue.NullValue, + ExcludeFromIndexes = !propertyIndexed, + }; + } + else + { + return new Value + { + StringValue = propertyClrValue.ToString() ?? string.Empty, + ExcludeFromIndexes = !propertyIndexed || (propertyClrValue.ToString() ?? string.Empty).Length > 700, + }; + } + } + + public object? ConvertFromJsonToken( + JsonValueConvertFromContext context, + string propertyName, + Type propertyClrType, + JToken propertyJsonToken, + AddConvertFromDelayedLoad addConvertFromDelayedLoad) + { + var rawValue = propertyJsonToken.Value(); + if (rawValue == null) + { + return null; + } + else + { + if (!DynamicStringEnumValue.TryParse(propertyClrType, rawValue, out var parsedValue)) + { + // If we can't parse, ensure value is null. + parsedValue = null; + } + return parsedValue; + } + } + + public JToken ConvertToJsonToken( + JsonValueConvertToContext context, + string propertyName, + Type propertyClrType, + object propertyNonNullClrValue) + { + return new JValue(propertyNonNullClrValue.ToString()); + } + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Converters/Value/StringValueConverter.cs b/UET/Redpoint.CloudFramework/Repository/Converters/Value/StringValueConverter.cs new file mode 100644 index 00000000..fd9cf4cc --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Converters/Value/StringValueConverter.cs @@ -0,0 +1,85 @@ +namespace Redpoint.CloudFramework.Repository.Converters.Value +{ + using Redpoint.CloudFramework.Models; + using Newtonsoft.Json.Linq; + using Type = System.Type; + using Google.Protobuf.WellKnownTypes; + using Value = Google.Cloud.Datastore.V1.Value; + using Redpoint.CloudFramework.Repository.Converters.Value.Context; + + internal class StringValueConverter : IValueConverter + { + public FieldType GetFieldType() + { + return FieldType.String; + } + + public bool IsConverterForClrType(Type clrType) + { + return clrType == typeof(string); + } + + public object? ConvertFromClrDefaultValue( + ClrValueConvertFromContext context, + string propertyName, + Type propertyClrType, + object propertyClrDefaultValue) + { + return propertyClrDefaultValue; + } + + public object? ConvertFromDatastoreValue( + DatastoreValueConvertFromContext context, + string propertyName, + Type propertyClrType, + Value propertyNonNullDatastoreValue, + AddConvertFromDelayedLoad addConvertFromDelayedLoad) + { + return propertyNonNullDatastoreValue.StringValue; + } + + public Value ConvertToDatastoreValue( + DatastoreValueConvertToContext context, + string propertyName, + Type propertyClrType, + object? propertyClrValue, + bool propertyIndexed) + { + if (propertyClrValue == null) + { + return new Value + { + NullValue = NullValue.NullValue, + ExcludeFromIndexes = !propertyIndexed, + }; + } + else + { + return new Value + { + StringValue = (string)propertyClrValue, + ExcludeFromIndexes = !propertyIndexed || ((string)propertyClrValue).Length > 700, + }; + } + } + + public object? ConvertFromJsonToken( + JsonValueConvertFromContext context, + string propertyName, + Type propertyClrType, + JToken propertyJsonToken, + AddConvertFromDelayedLoad addConvertFromDelayedLoad) + { + return propertyJsonToken.Value(); + } + + public JToken ConvertToJsonToken( + JsonValueConvertToContext context, + string propertyName, + Type propertyClrType, + object propertyNonNullClrValue) + { + return new JValue((string)propertyNonNullClrValue); + } + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Converters/Value/TimestampValueConverter.cs b/UET/Redpoint.CloudFramework/Repository/Converters/Value/TimestampValueConverter.cs new file mode 100644 index 00000000..876cdefa --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Converters/Value/TimestampValueConverter.cs @@ -0,0 +1,86 @@ +namespace Redpoint.CloudFramework.Repository.Converters.Value +{ + using Redpoint.CloudFramework.Models; + using Newtonsoft.Json.Linq; + using System; + using Type = System.Type; + using Value = Google.Cloud.Datastore.V1.Value; + using NodaTime; + using Redpoint.CloudFramework.Repository.Converters.Timestamp; + using Redpoint.CloudFramework.Repository.Converters.Value.Context; + + internal class TimestampValueConverter : IValueConverter + { + private readonly IInstantTimestampConverter _instantTimestampConverter; + private readonly IInstantTimestampJsonConverter _instantTimestampJsonConverter; + + public TimestampValueConverter( + IInstantTimestampConverter instantTimestampConverter, + IInstantTimestampJsonConverter instantTimestampJsonConverter) + { + _instantTimestampConverter = instantTimestampConverter; + _instantTimestampJsonConverter = instantTimestampJsonConverter; + } + + public FieldType GetFieldType() + { + return FieldType.Timestamp; + } + + public bool IsConverterForClrType(Type clrType) + { + return clrType == typeof(Instant?); + } + + public object? ConvertFromClrDefaultValue( + ClrValueConvertFromContext context, + string propertyName, + Type propertyClrType, + object propertyClrDefaultValue) + { + throw new InvalidOperationException("FieldType.Timestamp does not support default values. These property must be nullable and omit [Default]."); + } + + public object? ConvertFromDatastoreValue( + DatastoreValueConvertFromContext context, + string propertyName, + Type propertyClrType, + Value propertyNonNullDatastoreValue, + AddConvertFromDelayedLoad addConvertFromDelayedLoad) + { + return _instantTimestampConverter.FromDatastoreValueToNodaTimeInstant(propertyNonNullDatastoreValue); + } + + public Value ConvertToDatastoreValue( + DatastoreValueConvertToContext context, + string propertyName, + Type propertyClrType, + object? propertyClrValue, + bool propertyIndexed) + { + var instantNullable = (Instant?)propertyClrValue; + return _instantTimestampConverter.FromNodaTimeInstantToDatastoreValue( + instantNullable, + !propertyIndexed); + } + + public object? ConvertFromJsonToken( + JsonValueConvertFromContext context, + string propertyName, + Type propertyClrType, + JToken propertyJsonToken, + AddConvertFromDelayedLoad addConvertFromDelayedLoad) + { + return _instantTimestampJsonConverter.FromJsonCacheToNodaTimeInstant(propertyJsonToken); + } + + public JToken ConvertToJsonToken( + JsonValueConvertToContext context, + string propertyName, + Type propertyClrType, + object propertyNonNullClrValue) + { + return _instantTimestampJsonConverter.FromNodaTimeInstantToJsonCache((Instant?)propertyNonNullClrValue); + } + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Converters/Value/UnsafeKeyValueConverter.cs b/UET/Redpoint.CloudFramework/Repository/Converters/Value/UnsafeKeyValueConverter.cs new file mode 100644 index 00000000..67ce50e1 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Converters/Value/UnsafeKeyValueConverter.cs @@ -0,0 +1,104 @@ +namespace Redpoint.CloudFramework.Repository.Converters.Value +{ + using Redpoint.CloudFramework.Models; + using Newtonsoft.Json.Linq; + using System; + using Type = System.Type; + using Google.Protobuf.WellKnownTypes; + using Value = Google.Cloud.Datastore.V1.Value; + using Google.Cloud.Datastore.V1; + using Redpoint.CloudFramework.Prefix; + using Redpoint.CloudFramework.Repository.Converters.Value.Context; + + internal class UnsafeKeyValueConverter : IValueConverter + { + private readonly IGlobalPrefix _globalPrefix; + + public UnsafeKeyValueConverter(IGlobalPrefix globalPrefix) + { + _globalPrefix = globalPrefix; + } + + public FieldType GetFieldType() + { + return FieldType.UnsafeKey; + } + + public bool IsConverterForClrType(Type clrType) + { + return clrType == typeof(Key); + } + + public object? ConvertFromClrDefaultValue( + ClrValueConvertFromContext context, + string propertyName, + Type propertyClrType, + object propertyClrDefaultValue) + { + throw new InvalidOperationException("FieldType.UnsafeKey does not support default values. These property must be nullable and omit [Default]."); + } + + public object? ConvertFromDatastoreValue( + DatastoreValueConvertFromContext context, + string propertyName, + Type propertyClrType, + Value propertyNonNullDatastoreValue, + AddConvertFromDelayedLoad addConvertFromDelayedLoad) + { + return propertyNonNullDatastoreValue.KeyValue; + } + + public Value ConvertToDatastoreValue( + DatastoreValueConvertToContext context, + string propertyName, + Type propertyClrType, + object? propertyClrValue, + bool propertyIndexed) + { + var keyNullable = (Key?)propertyClrValue; + if (keyNullable == null) + { + return new Value + { + NullValue = NullValue.NullValue, + ExcludeFromIndexes = !propertyIndexed, + }; + } + else + { + return new Value + { + KeyValue = keyNullable, + ExcludeFromIndexes = !propertyIndexed, + }; + } + } + + public object? ConvertFromJsonToken( + JsonValueConvertFromContext context, + string propertyName, + Type propertyClrType, + JToken propertyJsonToken, + AddConvertFromDelayedLoad addConvertFromDelayedLoad) + { + var unsafeIdStr = propertyJsonToken.Value(); + if (unsafeIdStr == null) + { + return null; + } + else + { + return _globalPrefix.ParseInternal(string.Empty, unsafeIdStr); + } + } + + public JToken ConvertToJsonToken( + JsonValueConvertToContext context, + string propertyName, + Type propertyClrType, + object propertyNonNullClrValue) + { + return _globalPrefix.CreateInternal((Key)propertyNonNullClrValue); + } + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Converters/Value/UnsignedIntegerArrayValueConverter.cs b/UET/Redpoint.CloudFramework/Repository/Converters/Value/UnsignedIntegerArrayValueConverter.cs new file mode 100644 index 00000000..f8326c02 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Converters/Value/UnsignedIntegerArrayValueConverter.cs @@ -0,0 +1,87 @@ +namespace Redpoint.CloudFramework.Repository.Converters.Value +{ + using Redpoint.CloudFramework.Models; + using Newtonsoft.Json.Linq; + using Type = System.Type; + using Value = Google.Cloud.Datastore.V1.Value; + using Redpoint.CloudFramework.Repository.Converters.Value.Context; + using System.Collections; + using System.Collections.Generic; + + internal class UnsignedIntegerArrayValueConverter : BaseArrayValueConverter + { + public override FieldType GetFieldType() + { + return FieldType.UnsignedIntegerArray; + } + + public override bool IsConverterForClrType(Type propertyClrArrayType) + { + return propertyClrArrayType == typeof(ulong[]) || + propertyClrArrayType == typeof(IReadOnlyList) || + propertyClrArrayType == typeof(List); + } + + protected override Type GetElementType(Type propertyClrArrayType) + { + return typeof(ulong); + } + + protected override object ConstructClrValueFromArrayList( + Type propertyClrArrayType, + ArrayList arrayList) + { + return arrayList.Cast().ToArray(); + } + + protected override object? ConvertFromClrDefaultElementValue( + ClrValueConvertFromContext context, + string propertyName, + Type propertyClrElementType, + object propertyNonNullClrDefaultElementValue) + { + return propertyNonNullClrDefaultElementValue; + } + + protected override object? ConvertFromDatastoreElementValue( + DatastoreValueConvertFromContext context, + string propertyName, + Type propertyClrElementType, + Value propertyNonNullDatastoreElementValue) + { + return unchecked((ulong)propertyNonNullDatastoreElementValue.IntegerValue); + } + + protected override Value ConvertToDatastoreElementValue( + DatastoreValueConvertToContext context, + string propertyName, + Type propertyClrElementType, + object propertyNonNullClrElementValue, + bool propertyIndexed) + { + return new Value + { + IntegerValue = unchecked((long)(ulong)propertyNonNullClrElementValue), + ExcludeFromIndexes = !propertyIndexed + }; + } + + protected override object? ConvertFromJsonElementToken( + JsonValueConvertFromContext context, + string propertyName, + Type propertyClrElementType, + JToken propertyNonNullJsonElementToken) + { + return propertyNonNullJsonElementToken.Value(); + } + + protected override JToken ConvertFromJsonElementValue( + JsonValueConvertToContext context, + string propertyName, + Type propertyClrElementType, + object propertyNonNullClrElementValue) + { + return new JValue((ulong)propertyNonNullClrElementValue); + } + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Converters/Value/UnsignedIntegerValueConverter.cs b/UET/Redpoint.CloudFramework/Repository/Converters/Value/UnsignedIntegerValueConverter.cs new file mode 100644 index 00000000..2042dc49 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Converters/Value/UnsignedIntegerValueConverter.cs @@ -0,0 +1,87 @@ +namespace Redpoint.CloudFramework.Repository.Converters.Value +{ + using Redpoint.CloudFramework.Models; + using Newtonsoft.Json.Linq; + using Type = System.Type; + using Google.Protobuf.WellKnownTypes; + using Value = Google.Cloud.Datastore.V1.Value; + using Redpoint.CloudFramework.Repository.Converters.Value.Context; + + internal class UnsignedIntegerValueConverter : IValueConverter + { + public FieldType GetFieldType() + { + return FieldType.UnsignedInteger; + } + + public bool IsConverterForClrType(Type clrType) + { + return clrType == typeof(ulong) || + clrType == typeof(ulong?); + } + + public object? ConvertFromClrDefaultValue( + ClrValueConvertFromContext context, + string propertyName, + Type propertyClrType, + object propertyClrDefaultValue) + { + return propertyClrDefaultValue; + } + + public object? ConvertFromDatastoreValue( + DatastoreValueConvertFromContext context, + string propertyName, + Type propertyClrType, + Value propertyNonNullDatastoreValue, + AddConvertFromDelayedLoad addConvertFromDelayedLoad) + { + return unchecked((ulong)propertyNonNullDatastoreValue.IntegerValue); + } + + public Value ConvertToDatastoreValue( + DatastoreValueConvertToContext context, + string propertyName, + Type propertyClrType, + object? propertyClrValue, + bool propertyIndexed) + { + var nullable = (ulong?)propertyClrValue; + if (!nullable.HasValue) + { + return new Value + { + NullValue = NullValue.NullValue, + ExcludeFromIndexes = !propertyIndexed, + }; + } + else + { + return new Value + { + IntegerValue = unchecked((long)nullable.Value), + ExcludeFromIndexes = !propertyIndexed, + }; + } + } + + public object? ConvertFromJsonToken( + JsonValueConvertFromContext context, + string propertyName, + Type propertyClrType, + JToken propertyNonNullJsonToken, + AddConvertFromDelayedLoad addConvertFromDelayedLoad) + { + return propertyNonNullJsonToken.Value(); + } + + public JToken ConvertToJsonToken( + JsonValueConvertToContext context, + string propertyName, + Type propertyClrType, + object propertyNonNullClrValue) + { + return new JValue((ulong)propertyNonNullClrValue); + } + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Datastore/DatastoreGlobalRepository.cs b/UET/Redpoint.CloudFramework/Repository/Datastore/DatastoreGlobalRepository.cs new file mode 100644 index 00000000..67c53024 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Datastore/DatastoreGlobalRepository.cs @@ -0,0 +1,225 @@ +namespace Redpoint.CloudFramework.Repository.Datastore +{ + using Google.Cloud.Datastore.V1; + using System; + using System.Collections.Generic; + using System.Linq; + using System.Threading.Tasks; + using Redpoint.CloudFramework.Models; + using System.Threading; + using System.Linq.Expressions; + using Redpoint.CloudFramework.Repository.Layers; + using Redpoint.CloudFramework.Repository.Converters.Timestamp; + using Redpoint.CloudFramework.Repository.Transaction; + using Redpoint.CloudFramework.Repository.Pagination; + using Redpoint.CloudFramework.Repository.Metrics; + using Microsoft.Extensions.Configuration; + using Redpoint.CloudFramework.Collections.Batching; + + internal class DatastoreGlobalRepository : IGlobalRepository + { + private readonly IRedisCacheRepositoryLayer _redisCacheRepositoryLayer; + private readonly IDatastoreRepositoryLayer _datastoreRepositoryLayer; + private readonly IConfiguration _configuration; + + // NOTE: This is used by the legacy extension methods in GlobalRepositoryLegacyExtensions. + internal readonly IInstantTimestampConverter _instantTimestampConverter; + + public DatastoreGlobalRepository( + IRedisCacheRepositoryLayer redisCacheRepositoryLayer, + IDatastoreRepositoryLayer datastoreRepositoryLayer, + IInstantTimestampConverter instantTimestampConverter, + IConfiguration configuration) + { + _redisCacheRepositoryLayer = redisCacheRepositoryLayer; + _datastoreRepositoryLayer = datastoreRepositoryLayer; + _instantTimestampConverter = instantTimestampConverter; + _configuration = configuration; + } + + internal IRepositoryLayer Layer + { + get + { + return _redisCacheRepositoryLayer; + } + } + + public IBatchedAsyncEnumerable QueryAsync( + string @namespace, + Expression> where, + Expression>? order = null, + int? limit = null, + IModelTransaction? transaction = null, + RepositoryOperationMetrics? metrics = null, + CancellationToken cancellationToken = default) where T : Model, new() + { + return Layer.QueryAsync(@namespace, where, order, limit, transaction, metrics, cancellationToken); + } + + public Task> QueryPaginatedAsync( + string @namespace, + PaginatedQueryCursor cursor, + int limit, + Expression> where, + Expression>? order = null, + IModelTransaction? transaction = null, + RepositoryOperationMetrics? metrics = null, + CancellationToken cancellationToken = default) where T : Model, new() + { + return Layer.QueryPaginatedAsync(@namespace, cursor, limit, where, order, transaction, metrics, cancellationToken); + } + + public Task LoadAsync( + string @namespace, + Key key, + IModelTransaction? transaction = null, + RepositoryOperationMetrics? metrics = null, + CancellationToken cancellationToken = default) where T : Model, new() + { + return Layer.LoadAsync(@namespace, key, transaction, metrics, cancellationToken); + } + + public IBatchedAsyncEnumerable> LoadAsync( + string @namespace, + IAsyncEnumerable keys, + IModelTransaction? transaction = null, + RepositoryOperationMetrics? metrics = null, + CancellationToken cancellationToken = default) where T : Model, new() + { + return Layer.LoadAsync(@namespace, keys, transaction, metrics, cancellationToken); + } + + public IAsyncEnumerable> LoadAcrossNamespacesAsync( + IAsyncEnumerable keys, + RepositoryOperationMetrics? metrics = null, + CancellationToken cancellationToken = default) where T : Model, new() + { + return Layer.LoadAcrossNamespacesAsync(keys, metrics, cancellationToken); + } + + public async Task CreateAsync( + string @namespace, + T model, + IModelTransaction? transaction = null, + RepositoryOperationMetrics? metrics = null, + CancellationToken cancellationToken = default) where T : Model, new() + { + return await Layer.CreateAsync(@namespace, new[] { model }.ToAsyncEnumerable(), transaction, metrics, cancellationToken).FirstAsync(cancellationToken: cancellationToken).ConfigureAwait(false); + } + + public IAsyncEnumerable CreateAsync( + string @namespace, + IAsyncEnumerable models, + IModelTransaction? transaction = null, + RepositoryOperationMetrics? metrics = null, + CancellationToken cancellationToken = default) where T : Model, new() + { + return Layer.CreateAsync(@namespace, models, transaction, metrics, cancellationToken); + } + + public async Task UpsertAsync( + string @namespace, + T model, + IModelTransaction? transaction = null, + RepositoryOperationMetrics? metrics = null, + CancellationToken cancellationToken = default) where T : Model, new() + { + return await Layer.UpsertAsync(@namespace, new[] { model }.ToAsyncEnumerable(), transaction, metrics, cancellationToken).FirstAsync(cancellationToken: cancellationToken).ConfigureAwait(false); + } + + public IAsyncEnumerable UpsertAsync( + string @namespace, + IAsyncEnumerable models, + IModelTransaction? transaction = null, + RepositoryOperationMetrics? metrics = null, + CancellationToken cancellationToken = default) where T : Model, new() + { + return Layer.UpsertAsync(@namespace, models, transaction, metrics, cancellationToken); + } + + public async Task UpdateAsync( + string @namespace, + T model, + IModelTransaction? transaction = null, + RepositoryOperationMetrics? metrics = null, + CancellationToken cancellationToken = default) where T : Model, new() + { + return await Layer.UpdateAsync(@namespace, new[] { model }.ToAsyncEnumerable(), transaction, metrics, cancellationToken).FirstAsync(cancellationToken: cancellationToken).ConfigureAwait(false); + } + + public IAsyncEnumerable UpdateAsync( + string @namespace, + IAsyncEnumerable models, + IModelTransaction? transaction = null, + RepositoryOperationMetrics? metrics = null, + CancellationToken cancellationToken = default) where T : Model, new() + { + return Layer.UpdateAsync(@namespace, models, transaction, metrics, cancellationToken); + } + + public Task DeleteAsync( + string @namespace, + T model, + IModelTransaction? transaction = null, + RepositoryOperationMetrics? metrics = null, + CancellationToken cancellationToken = default) where T : Model, new() + { + return Layer.DeleteAsync(@namespace, new[] { model }.ToAsyncEnumerable(), transaction, metrics, cancellationToken); + } + + public Task DeleteAsync( + string @namespace, + IAsyncEnumerable models, + IModelTransaction? transaction = null, + RepositoryOperationMetrics? metrics = null, + CancellationToken cancellationToken = default) where T : Model, new() + { + return Layer.DeleteAsync(@namespace, models, transaction, metrics, cancellationToken); + } + + public Task AllocateKeyAsync( + string @namespace, + IModelTransaction? transaction, + RepositoryOperationMetrics? metrics = null, + CancellationToken cancellationToken = default) where T : Model, new() + { + return Layer.AllocateKeyAsync(@namespace, transaction, metrics, cancellationToken); + } + + public Task GetKeyFactoryAsync( + string @namespace, + RepositoryOperationMetrics? metrics = null, + CancellationToken cancellationToken = default) where T : Model, new() + { + return Layer.GetKeyFactoryAsync(@namespace, metrics, cancellationToken); + } + + public Task BeginTransactionAsync( + string @namespace, + TransactionMode mode = TransactionMode.ReadWrite, + RepositoryOperationMetrics? metrics = null, + CancellationToken cancellationToken = default) + { + return Layer.BeginTransactionAsync(@namespace, mode, metrics, cancellationToken); + } + + public Task CommitAsync( + string @namespace, + IModelTransaction transaction, + RepositoryOperationMetrics? metrics = null, + CancellationToken cancellationToken = default) + { + return Layer.CommitAsync(@namespace, transaction, metrics, cancellationToken); + } + + public Task RollbackAsync( + string @namespace, + IModelTransaction transaction, + RepositoryOperationMetrics? metrics = null, + CancellationToken cancellationToken = default) + { + return Layer.RollbackAsync(@namespace, transaction, metrics, cancellationToken); + } + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Datastore/DatastoreRepository.cs b/UET/Redpoint.CloudFramework/Repository/Datastore/DatastoreRepository.cs new file mode 100644 index 00000000..29757050 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Datastore/DatastoreRepository.cs @@ -0,0 +1,221 @@ +namespace Redpoint.CloudFramework.Repository.Datastore +{ + using Google.Cloud.Datastore.V1; + using Redpoint.CloudFramework.Collections.Batching; + using Redpoint.CloudFramework.Models; + using Redpoint.CloudFramework.Repository.Metrics; + using Redpoint.CloudFramework.Repository.Pagination; + using Redpoint.CloudFramework.Repository.Transaction; + using System; + using System.Collections.Generic; + using System.Linq; + using System.Linq.Expressions; + using System.Runtime.CompilerServices; + using System.Threading; + using System.Threading.Tasks; + + internal class DatastoreRepository : IRepository + { + internal readonly IGlobalRepository _globalDatastore; + private readonly ICurrentTenantService _currentTenant; + + public DatastoreRepository(IGlobalRepository globalDatastore, ICurrentTenantService currentTenant) + { + _globalDatastore = globalDatastore; + _currentTenant = currentTenant; + } + + internal async Task GetDatastoreNamespace() + { + var currentTenant = await _currentTenant.GetTenant().ConfigureAwait(false); + if (currentTenant == null) + { + throw new InvalidOperationException("IRepository can not be used without a tenant."); + } + return currentTenant.DatastoreNamespace; + } + + public IBatchedAsyncEnumerable QueryAsync( + Expression> where, + Expression>? order = null, + int? limit = null, + IModelTransaction? transaction = null, + RepositoryOperationMetrics? metrics = null, + CancellationToken cancellationToken = default) where T : Model, new() + => BatchedQueryAsync(where, order, limit, transaction, metrics, cancellationToken).AsBatchedAsyncEnumerable(); + + private async IAsyncEnumerable> BatchedQueryAsync( + Expression> where, + Expression>? order = null, + int? limit = null, + IModelTransaction? transaction = null, + RepositoryOperationMetrics? metrics = null, + [EnumeratorCancellation] CancellationToken cancellationToken = default) where T : Model, new() + { + await foreach (var batch in _globalDatastore.QueryAsync(await GetDatastoreNamespace().ConfigureAwait(false), where, order, limit, transaction, metrics, cancellationToken).AsBatches().ConfigureAwait(false)) + { + yield return batch; + } + } + + public async Task> QueryPaginatedAsync( + PaginatedQueryCursor cursor, + int limit, + Expression> where, + Expression>? order = null, + IModelTransaction? transaction = null, + RepositoryOperationMetrics? metrics = null, + CancellationToken cancellationToken = default) where T : Model, new() + { + return await _globalDatastore.QueryPaginatedAsync(await GetDatastoreNamespace().ConfigureAwait(false), cursor, limit, where, order, transaction, metrics, cancellationToken).ConfigureAwait(false); + } + + public async Task LoadAsync( + Key key, + IModelTransaction? transaction = null, + RepositoryOperationMetrics? metrics = null, + CancellationToken cancellationToken = default) where T : Model, new() + { + return await _globalDatastore.LoadAsync(await GetDatastoreNamespace().ConfigureAwait(false), key, transaction, metrics, cancellationToken).ConfigureAwait(false); + } + + public IBatchedAsyncEnumerable> LoadAsync( + IAsyncEnumerable keys, + IModelTransaction? transaction = null, + RepositoryOperationMetrics? metrics = null, + CancellationToken cancellationToken = default) where T : Model, new() => + BatchedLoadAsync(keys, transaction, metrics, cancellationToken).AsBatchedAsyncEnumerable(); + + public async IAsyncEnumerable>> BatchedLoadAsync( + IAsyncEnumerable keys, + IModelTransaction? transaction = null, + RepositoryOperationMetrics? metrics = null, + [EnumeratorCancellation] CancellationToken cancellationToken = default) where T : Model, new() + { + await foreach (var batch in _globalDatastore.LoadAsync(await GetDatastoreNamespace().ConfigureAwait(false), keys, transaction, metrics, cancellationToken).AsBatches().ConfigureAwait(false)) + { + yield return batch; + } + } + + public async Task CreateAsync( + T model, + IModelTransaction? transaction = null, + RepositoryOperationMetrics? metrics = null, + CancellationToken cancellationToken = default) where T : Model, new() + { + return await _globalDatastore.CreateAsync(await GetDatastoreNamespace().ConfigureAwait(false), new[] { model }.ToAsyncEnumerable(), transaction, metrics, cancellationToken).FirstAsync(cancellationToken: cancellationToken).ConfigureAwait(false); + } + + public async IAsyncEnumerable CreateAsync( + IAsyncEnumerable models, + IModelTransaction? transaction = null, + RepositoryOperationMetrics? metrics = null, + [EnumeratorCancellation] CancellationToken cancellationToken = default) where T : Model, new() + { + await foreach (var value in _globalDatastore.CreateAsync(await GetDatastoreNamespace().ConfigureAwait(false), models, transaction, metrics, cancellationToken).ConfigureAwait(false)) + { + yield return value; + } + } + + public async Task UpsertAsync( + T model, + IModelTransaction? transaction = null, + RepositoryOperationMetrics? metrics = null, + CancellationToken cancellationToken = default) where T : Model, new() + { + return await _globalDatastore.UpsertAsync(await GetDatastoreNamespace().ConfigureAwait(false), new[] { model }.ToAsyncEnumerable(), transaction, metrics, cancellationToken).FirstAsync(cancellationToken: cancellationToken).ConfigureAwait(false); + } + + public async IAsyncEnumerable UpsertAsync( + IAsyncEnumerable models, + IModelTransaction? transaction = null, + RepositoryOperationMetrics? metrics = null, + [EnumeratorCancellation] CancellationToken cancellationToken = default) where T : Model, new() + { + await foreach (var value in _globalDatastore.UpsertAsync(await GetDatastoreNamespace().ConfigureAwait(false), models, transaction, metrics, cancellationToken).ConfigureAwait(false)) + { + yield return value; + } + } + + public async Task UpdateAsync( + T model, + IModelTransaction? transaction = null, + RepositoryOperationMetrics? metrics = null, + CancellationToken cancellationToken = default) where T : Model, new() + { + return await _globalDatastore.UpdateAsync(await GetDatastoreNamespace().ConfigureAwait(false), new[] { model }.ToAsyncEnumerable(), transaction, metrics, cancellationToken).FirstAsync(cancellationToken: cancellationToken).ConfigureAwait(false); + } + + public async IAsyncEnumerable UpdateAsync( + IAsyncEnumerable models, + IModelTransaction? transaction = null, + RepositoryOperationMetrics? metrics = null, + [EnumeratorCancellation] CancellationToken cancellationToken = default) where T : Model, new() + { + await foreach (var value in _globalDatastore.UpdateAsync(await GetDatastoreNamespace().ConfigureAwait(false), models, transaction, metrics, cancellationToken).ConfigureAwait(false)) + { + yield return value; + } + } + + public async Task DeleteAsync( + T model, + IModelTransaction? transaction = null, + RepositoryOperationMetrics? metrics = null, + CancellationToken cancellationToken = default) where T : Model, new() + { + await _globalDatastore.DeleteAsync(await GetDatastoreNamespace().ConfigureAwait(false), new[] { model }.ToAsyncEnumerable(), transaction, metrics, cancellationToken).ConfigureAwait(false); + } + + public async Task DeleteAsync( + IAsyncEnumerable models, + IModelTransaction? transaction = null, + RepositoryOperationMetrics? metrics = null, + CancellationToken cancellationToken = default) where T : Model, new() + { + await _globalDatastore.DeleteAsync(await GetDatastoreNamespace().ConfigureAwait(false), models, transaction, metrics, cancellationToken).ConfigureAwait(false); + } + + public async Task AllocateKeyAsync( + IModelTransaction? transaction, + RepositoryOperationMetrics? metrics = null, + CancellationToken cancellationToken = default) where T : Model, new() + { + return await _globalDatastore.AllocateKeyAsync(await GetDatastoreNamespace().ConfigureAwait(false), transaction, metrics, cancellationToken).ConfigureAwait(false); + } + + public async Task GetKeyFactoryAsync( + RepositoryOperationMetrics? metrics = null, + CancellationToken cancellationToken = default) where T : Model, new() + { + return await _globalDatastore.GetKeyFactoryAsync(await GetDatastoreNamespace().ConfigureAwait(false), metrics, cancellationToken).ConfigureAwait(false); + } + + public async Task BeginTransactionAsync( + TransactionMode mode = TransactionMode.ReadWrite, + RepositoryOperationMetrics? metrics = null, + CancellationToken cancellationToken = default) + { + return await _globalDatastore.BeginTransactionAsync(await GetDatastoreNamespace().ConfigureAwait(false), mode, metrics, cancellationToken).ConfigureAwait(false); + } + + public async Task CommitAsync( + IModelTransaction transaction, + RepositoryOperationMetrics? metrics = null, + CancellationToken cancellationToken = default) + { + await _globalDatastore.CommitAsync(await GetDatastoreNamespace().ConfigureAwait(false), transaction, metrics, cancellationToken).ConfigureAwait(false); + } + + public async Task RollbackAsync( + IModelTransaction transaction, + RepositoryOperationMetrics? metrics = null, + CancellationToken cancellationToken = default) + { + await _globalDatastore.RollbackAsync(await GetDatastoreNamespace().ConfigureAwait(false), transaction, metrics, cancellationToken).ConfigureAwait(false); + } + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Extensions/ExceptionExtensions.cs b/UET/Redpoint.CloudFramework/Repository/Extensions/ExceptionExtensions.cs new file mode 100644 index 00000000..dc7ae952 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Extensions/ExceptionExtensions.cs @@ -0,0 +1,34 @@ +namespace Redpoint.CloudFramework.Repository +{ + using Grpc.Core; + + public static class ExceptionExtensions + { + public static bool IsContentionException(this RpcException ex) + { + ArgumentNullException.ThrowIfNull(ex); + + if (ex.Status.StatusCode == StatusCode.Aborted && + (ex.Status.Detail.Contains("Aborted due to cross-transaction contention.", StringComparison.Ordinal) || + ex.Status.Detail.Contains("too much contention on these datastore entities", StringComparison.Ordinal))) + { + return true; + } + + return false; + } + + public static bool IsTransactionExpiryException(this RpcException ex) + { + ArgumentNullException.ThrowIfNull(ex); + + if (ex.Status.StatusCode == StatusCode.InvalidArgument && + ex.Status.Detail.Contains("transaction has expired", StringComparison.Ordinal)) + { + return true; + } + + return false; + } + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Extensions/KeyExtensions.cs b/UET/Redpoint.CloudFramework/Repository/Extensions/KeyExtensions.cs new file mode 100644 index 00000000..849ca94c --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Extensions/KeyExtensions.cs @@ -0,0 +1,22 @@ +namespace Redpoint.CloudFramework.Repository +{ + using Google.Cloud.Datastore.V1; + using System.Linq; + + public static class KeyExtensions + { + public static long GetIdFromKey(this Key key) + { + ArgumentNullException.ThrowIfNull(key); + + return key.Path.Last().Id; + } + + public static string GetNameFromKey(this Key key) + { + ArgumentNullException.ThrowIfNull(key); + + return key.Path.Last().Name; + } + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Extensions/RepositoryExtensions.cs b/UET/Redpoint.CloudFramework/Repository/Extensions/RepositoryExtensions.cs new file mode 100644 index 00000000..b52d42c7 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Extensions/RepositoryExtensions.cs @@ -0,0 +1,24 @@ +namespace Redpoint.CloudFramework.Repository +{ + using Google.Cloud.Datastore.V1; + + public static class RepositoryExtensions + { + public static bool HasAncestor(this Key key, Key? parent) + { + ArgumentNullException.ThrowIfNull(key); + + var keyParent = key.GetParent(); + if (keyParent != null) + { + return keyParent.Equals(parent); + } + return false; + } + + public static bool IsAnyString(this string[]? values, string target) + { + return values != null && values.Contains(target); + } + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Geographic/GeoConstants.cs b/UET/Redpoint.CloudFramework/Repository/Geographic/GeoConstants.cs new file mode 100644 index 00000000..b5889821 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Geographic/GeoConstants.cs @@ -0,0 +1,10 @@ +namespace Redpoint.CloudFramework.Repository.Geographic +{ + internal class GeoConstants + { +#pragma warning disable IDE1006 // Naming Styles + internal const string GeoHashPropertySuffix = ":geohash"; + internal const string HashKeyPropertySuffix = ":hashkey"; +#pragma warning restore IDE1006 // Naming Styles + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Geographic/GeoExtensions.cs b/UET/Redpoint.CloudFramework/Repository/Geographic/GeoExtensions.cs new file mode 100644 index 00000000..37ca5ad2 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Geographic/GeoExtensions.cs @@ -0,0 +1,68 @@ +namespace Redpoint.CloudFramework.Repository +{ + using Google.Type; + using System; + + public static class GeoExtensions + { + private const double _kmToNmUnit = 1.0 / 1.852; + private const double _nmToLatLngUnit = 1.0 / 60.0; + private const double _kmToLatLngUnit = _kmToNmUnit * _nmToLatLngUnit; + + internal static double HaversineDistance(LatLng pos1, LatLng pos2) + { + double R = 6371; + var lat = (pos2.Latitude - pos1.Latitude) * (Math.PI / 180); + var lng = (pos2.Longitude - pos1.Longitude) * (Math.PI / 180); + var h1 = Math.Sin(lat / 2) * Math.Sin(lat / 2) + + Math.Cos(pos1.Latitude * (Math.PI / 180)) * Math.Cos(pos2.Latitude * (Math.PI / 180)) * + Math.Sin(lng / 2) * Math.Sin(lng / 2); + var h2 = 2 * Math.Asin(Math.Min(1, Math.Sqrt(h1))); + return R * h2; + } + + internal static LatLng GetRectangularMinPoint(LatLng centerPoint, float distanceKilometers) + { + return new LatLng + { + Latitude = centerPoint.Latitude - (distanceKilometers * _kmToLatLngUnit), + Longitude = centerPoint.Longitude - (distanceKilometers * _kmToLatLngUnit), + }; + } + + internal static LatLng GetRectangularMaxPoint(LatLng centerPoint, float distanceKilometers) + { + return new LatLng + { + Latitude = centerPoint.Latitude + (distanceKilometers * _kmToLatLngUnit), + Longitude = centerPoint.Longitude + (distanceKilometers * _kmToLatLngUnit), + }; + } + + public static bool WithinKilometers(this LatLng latLng, LatLng centerPoint, float distanceKilometers) + { + ArgumentNullException.ThrowIfNull(latLng); + ArgumentNullException.ThrowIfNull(centerPoint); + + return HaversineDistance(latLng, centerPoint) < distanceKilometers; + } + + /// + /// Sort the models by the nearest location. This method only has an effect when used + /// as part of a sort expression in QueryAsync<>. + /// + public static bool Nearest(this LatLng latLng) + { + return false; + } + + /// + /// Sort the models by the furthest location. This method only has an effect when used + /// as part of a sort expression in QueryAsync<>. + /// + public static bool Furthest(this LatLng latLng) + { + return false; + } + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Geographic/IGeoModel.cs b/UET/Redpoint.CloudFramework/Repository/Geographic/IGeoModel.cs new file mode 100644 index 00000000..64b104a9 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Geographic/IGeoModel.cs @@ -0,0 +1,9 @@ +namespace Redpoint.CloudFramework.Repository.Geographic +{ + using System.Collections.Generic; + + public interface IGeoModel + { + Dictionary GetHashKeyLengthsForGeopointFields(); + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Geographic/S2Manager.cs b/UET/Redpoint.CloudFramework/Repository/Geographic/S2Manager.cs new file mode 100644 index 00000000..a9db44fd --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Geographic/S2Manager.cs @@ -0,0 +1,83 @@ +namespace Redpoint.CloudFramework.Repository.Geographic +{ + using Google.Common.Geometry; + using Google.Type; + using System; + using System.Collections.Generic; + using System.Globalization; + + internal static class S2Manager + { + public static ulong GenerateGeohash(LatLng geopoint) + { + var latLng = S2LatLng.FromDegrees(geopoint.Latitude, geopoint.Longitude); + var cell = new S2Cell(latLng); + return cell.Id.Id; + } + + public static ulong GenerateGeohashKey(ulong geohash, int geohashKeyLength) + { + var geohashString = geohash.ToString(CultureInfo.InvariantCulture); + var denominator = (ulong)Math.Pow(10, geohashString.Length - geohashKeyLength); + return geohash / denominator; + } + + public static S2LatLngRect LatLngRectFromQueryRectangleInput(LatLng minPoint, LatLng maxPoint) + { + return S2LatLngRect.FromPointPair( + S2LatLng.FromDegrees(minPoint.Latitude, minPoint.Longitude), + S2LatLng.FromDegrees(maxPoint.Latitude, maxPoint.Longitude) + ); + } + + public struct GeohashRange + { + public ulong RangeMin; + + public ulong RangeMax; + } + + public static GeohashRange[] GetGeohashRanges(S2LatLngRect rect, int geohashKeyLength) + { + var ranges = new List(); + var covering = new S2RegionCoverer().GetCovering(rect); + foreach (var outerRange in covering) + { + var rangeMin = outerRange.RangeMin.Id; + var rangeMax = outerRange.RangeMax.Id; + var minHashKey = S2Manager.GenerateGeohashKey(rangeMin, geohashKeyLength); + var maxHashKey = S2Manager.GenerateGeohashKey(rangeMax, geohashKeyLength); + var denominator = (ulong)Math.Pow(10, rangeMin.ToString(CultureInfo.InvariantCulture).Length - minHashKey.ToString(CultureInfo.InvariantCulture).Length); + + if (minHashKey.Equals(maxHashKey)) + { + ranges.Add(new GeohashRange { RangeMin = rangeMin, RangeMax = rangeMax }); + } + else + { + for (var l = minHashKey; l <= maxHashKey; l++) + { + if (l > 0) + { + ranges.Add(new GeohashRange { + RangeMin= l == minHashKey ? rangeMin : (l * denominator), +RangeMax= l == maxHashKey ? rangeMax : (((l + 1) * denominator) - 1) + }); + } + else + { + ranges.Add(new GeohashRange { + RangeMin = l == minHashKey ? rangeMin : (((l - 1) * denominator) + 1), + RangeMax = l == maxHashKey ? rangeMax : (l * denominator) + }); + } + } + } + } + return ranges.ToArray(); + } + + // TOOD: filterByRectangle + // https://github.com/damack/datastore-geo/blob/master/src/S2Manager.js + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Hooks/IGlobalRepositoryHook.cs b/UET/Redpoint.CloudFramework/Repository/Hooks/IGlobalRepositoryHook.cs new file mode 100644 index 00000000..fe7eb6d7 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Hooks/IGlobalRepositoryHook.cs @@ -0,0 +1,17 @@ +namespace Redpoint.CloudFramework.Repository.Hooks +{ + using Google.Cloud.Datastore.V1; + using Redpoint.CloudFramework.Models; + using Redpoint.CloudFramework.Repository.Transaction; + using System.Threading.Tasks; + + public interface IGlobalRepositoryHook + { + Task PostCreate(string @namespace, T model, IModelTransaction? transaction) where T : Model, new(); + Task PostUpsert(string @namespace, T model, IModelTransaction? transaction) where T : Model, new(); + Task PostUpdate(string @namespace, T model, IModelTransaction? transaction) where T : Model, new(); + Task PostDelete(string @namespace, T model, IModelTransaction? transaction) where T : Model, new(); + + Task MutateEntityBeforeWrite(string @namespace, Entity entity); + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/IDistributedCacheExtended.cs b/UET/Redpoint.CloudFramework/Repository/IDistributedCacheExtended.cs new file mode 100644 index 00000000..71ab8ed0 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/IDistributedCacheExtended.cs @@ -0,0 +1,12 @@ +namespace Redpoint.CloudFramework.Repository +{ + using System.Collections.Generic; + using System.Threading.Tasks; + + public interface IDistributedCacheExtended + { + Task ClearAsync(); + Task> GetKeysAsync(); + Task RemoveAsync(string[] keys); + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/IGlobalRepository.cs b/UET/Redpoint.CloudFramework/Repository/IGlobalRepository.cs new file mode 100644 index 00000000..5864dfdd --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/IGlobalRepository.cs @@ -0,0 +1,140 @@ +namespace Redpoint.CloudFramework.Repository +{ + using Google.Cloud.Datastore.V1; + using Redpoint.CloudFramework.Collections.Batching; + using Redpoint.CloudFramework.Models; + using Redpoint.CloudFramework.Repository.Metrics; + using Redpoint.CloudFramework.Repository.Pagination; + using Redpoint.CloudFramework.Repository.Transaction; + using System; + using System.Collections.Generic; + using System.Linq.Expressions; + using System.Threading; + using System.Threading.Tasks; + + public interface IGlobalRepository + { + IBatchedAsyncEnumerable QueryAsync( + string @namespace, + Expression> where, + Expression>? order = null, + int? limit = null, + IModelTransaction? transaction = null, + RepositoryOperationMetrics? metrics = null, + CancellationToken cancellationToken = default) where T : Model, new(); + + Task> QueryPaginatedAsync( + string @namespace, + PaginatedQueryCursor cursor, + int limit, + Expression> where, + Expression>? order = null, + IModelTransaction? transaction = null, + RepositoryOperationMetrics? metrics = null, + CancellationToken cancellationToken = default) where T : Model, new(); + + Task LoadAsync( + string @namespace, + Key key, + IModelTransaction? transaction = null, + RepositoryOperationMetrics? metrics = null, + CancellationToken cancellationToken = default) where T : Model, new(); + + IBatchedAsyncEnumerable> LoadAsync( + string @namespace, + IAsyncEnumerable keys, + IModelTransaction? transaction = null, + RepositoryOperationMetrics? metrics = null, + CancellationToken cancellationToken = default) where T : Model, new(); + + IAsyncEnumerable> LoadAcrossNamespacesAsync( + IAsyncEnumerable keys, + RepositoryOperationMetrics? metrics = null, + CancellationToken cancellationToken = default) where T : Model, new(); + + Task CreateAsync( + string @namespace, + T model, + IModelTransaction? transaction = null, + RepositoryOperationMetrics? metrics = null, + CancellationToken cancellationToken = default) where T : Model, new(); + + IAsyncEnumerable CreateAsync( + string @namespace, + IAsyncEnumerable models, + IModelTransaction? transaction = null, + RepositoryOperationMetrics? metrics = null, + CancellationToken cancellationToken = default) where T : Model, new(); + + Task UpsertAsync( + string @namespace, + T model, + IModelTransaction? transaction = null, + RepositoryOperationMetrics? metrics = null, + CancellationToken cancellationToken = default) where T : Model, new(); + + IAsyncEnumerable UpsertAsync( + string @namespace, + IAsyncEnumerable models, + IModelTransaction? transaction = null, + RepositoryOperationMetrics? metrics = null, + CancellationToken cancellationToken = default) where T : Model, new(); + + Task UpdateAsync( + string @namespace, + T model, + IModelTransaction? transaction = null, + RepositoryOperationMetrics? metrics = null, + CancellationToken cancellationToken = default) where T : Model, new(); + + IAsyncEnumerable UpdateAsync( + string @namespace, + IAsyncEnumerable models, + IModelTransaction? transaction = null, + RepositoryOperationMetrics? metrics = null, + CancellationToken cancellationToken = default) where T : Model, new(); + + Task DeleteAsync( + string @namespace, + T model, + IModelTransaction? transaction = null, + RepositoryOperationMetrics? metrics = null, + CancellationToken cancellationToken = default) where T : Model, new(); + + Task DeleteAsync( + string @namespace, + IAsyncEnumerable models, + IModelTransaction? transaction = null, + RepositoryOperationMetrics? metrics = null, + CancellationToken cancellationToken = default) where T : Model, new(); + + Task AllocateKeyAsync( + string @namespace, + IModelTransaction? transaction, + RepositoryOperationMetrics? metrics = null, + CancellationToken cancellationToken = default) where T : Model, new(); + + Task GetKeyFactoryAsync( + string @namespace, + RepositoryOperationMetrics? metrics = null, + CancellationToken cancellationToken = default) where T : Model, new(); + + Task BeginTransactionAsync( + string @namespace, + TransactionMode mode = TransactionMode.ReadWrite, + RepositoryOperationMetrics? metrics = null, + CancellationToken cancellationToken = default); + + Task CommitAsync( + string @namespace, + IModelTransaction transaction, + RepositoryOperationMetrics? metrics = null, + CancellationToken cancellationToken = default); + + Task RollbackAsync( + string @namespace, + IModelTransaction transaction, + RepositoryOperationMetrics? metrics = null, + CancellationToken cancellationToken = default); + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/IRepository.cs b/UET/Redpoint.CloudFramework/Repository/IRepository.cs new file mode 100644 index 00000000..640a8eeb --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/IRepository.cs @@ -0,0 +1,118 @@ +namespace Redpoint.CloudFramework.Repository +{ + using System; + using System.Collections.Generic; + using System.Linq.Expressions; + using System.Threading; + using System.Threading.Tasks; + using Google.Cloud.Datastore.V1; + using Redpoint.CloudFramework.Collections.Batching; + using Redpoint.CloudFramework.Models; + using Redpoint.CloudFramework.Repository.Metrics; + using Redpoint.CloudFramework.Repository.Pagination; + using Redpoint.CloudFramework.Repository.Transaction; + + public interface IRepository + { + IBatchedAsyncEnumerable QueryAsync( + Expression> where, + Expression>? order = null, + int? limit = null, + IModelTransaction? transaction = null, + RepositoryOperationMetrics? metrics = null, + CancellationToken cancellationToken = default) where T : Model, new(); + + Task> QueryPaginatedAsync( + PaginatedQueryCursor cursor, + int limit, + Expression> where, + Expression>? order = null, + IModelTransaction? transaction = null, + RepositoryOperationMetrics? metrics = null, + CancellationToken cancellationToken = default) where T : Model, new(); + + Task LoadAsync( + Key key, + IModelTransaction? transaction = null, + RepositoryOperationMetrics? metrics = null, + CancellationToken cancellationToken = default) where T : Model, new(); + + IBatchedAsyncEnumerable> LoadAsync( + IAsyncEnumerable keys, + IModelTransaction? transaction = null, + RepositoryOperationMetrics? metrics = null, + CancellationToken cancellationToken = default) where T : Model, new(); + + Task CreateAsync( + T model, + IModelTransaction? transaction = null, + RepositoryOperationMetrics? metrics = null, + CancellationToken cancellationToken = default) where T : Model, new(); + + IAsyncEnumerable CreateAsync( + IAsyncEnumerable models, + IModelTransaction? transaction = null, + RepositoryOperationMetrics? metrics = null, + CancellationToken cancellationToken = default) where T : Model, new(); + + Task UpsertAsync( + T model, + IModelTransaction? transaction = null, + RepositoryOperationMetrics? metrics = null, + CancellationToken cancellationToken = default) where T : Model, new(); + + IAsyncEnumerable UpsertAsync( + IAsyncEnumerable models, + IModelTransaction? transaction = null, + RepositoryOperationMetrics? metrics = null, + CancellationToken cancellationToken = default) where T : Model, new(); + + Task UpdateAsync( + T model, + IModelTransaction? transaction = null, + RepositoryOperationMetrics? metrics = null, + CancellationToken cancellationToken = default) where T : Model, new(); + + IAsyncEnumerable UpdateAsync( + IAsyncEnumerable models, + IModelTransaction? transaction = null, + RepositoryOperationMetrics? metrics = null, + CancellationToken cancellationToken = default) where T : Model, new(); + + Task DeleteAsync( + T model, + IModelTransaction? transaction = null, + RepositoryOperationMetrics? metrics = null, + CancellationToken cancellationToken = default) where T : Model, new(); + + Task DeleteAsync( + IAsyncEnumerable models, + IModelTransaction? transaction = null, + RepositoryOperationMetrics? metrics = null, + CancellationToken cancellationToken = default) where T : Model, new(); + + Task AllocateKeyAsync( + IModelTransaction? transaction, + RepositoryOperationMetrics? metrics = null, + CancellationToken cancellationToken = default) where T : Model, new(); + + Task GetKeyFactoryAsync( + RepositoryOperationMetrics? metrics = null, + CancellationToken cancellationToken = default) where T : Model, new(); + + Task BeginTransactionAsync( + TransactionMode mode = TransactionMode.ReadWrite, + RepositoryOperationMetrics? metrics = null, + CancellationToken cancellationToken = default); + + Task CommitAsync( + IModelTransaction transaction, + RepositoryOperationMetrics? metrics = null, + CancellationToken cancellationToken = default); + + Task RollbackAsync( + IModelTransaction transaction, + RepositoryOperationMetrics? metrics = null, + CancellationToken cancellationToken = default); + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Layers/DatastoreRepositoryLayer.cs b/UET/Redpoint.CloudFramework/Repository/Layers/DatastoreRepositoryLayer.cs new file mode 100644 index 00000000..774ed994 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Layers/DatastoreRepositoryLayer.cs @@ -0,0 +1,1518 @@ +namespace Redpoint.CloudFramework.Repository.Layers +{ + using Google.Cloud.Datastore.V1; + using Grpc.Core; + using Microsoft.Extensions.Caching.Memory; + using Microsoft.Extensions.Hosting; + using Microsoft.Extensions.Logging; + using Redpoint.CloudFramework.Collections.Batching; + using Redpoint.CloudFramework.GoogleInfrastructure; + using Redpoint.CloudFramework.Metric; + using Redpoint.CloudFramework.Models; + using Redpoint.CloudFramework.Repository.Converters.Expression; + using Redpoint.CloudFramework.Repository.Converters.Model; + using Redpoint.CloudFramework.Repository.Geographic; + using Redpoint.CloudFramework.Repository.Hooks; + using Redpoint.CloudFramework.Repository.Metrics; + using Redpoint.CloudFramework.Repository.Pagination; + using Redpoint.CloudFramework.Repository.Transaction; + using Redpoint.CloudFramework.Tracing; + using Redpoint.Collections; + using Redpoint.Concurrency; + using System; + using System.Collections.Generic; + using System.Diagnostics; + using System.Globalization; + using System.Linq; + using System.Linq.Expressions; + using System.Runtime.CompilerServices; + using System.Threading; + using System.Threading.Tasks; + + internal class DatastoreRepositoryLayer : IDatastoreRepositoryLayer + { + private readonly IModelConverter _entityConverter; + private readonly IHostEnvironment _hostEnvironment; + private readonly IGoogleServices _googleServices; + private readonly IManagedTracer _managedTracer; + private readonly IGlobalRepositoryHook[] _hooks; + private readonly IMemoryCache _memoryCache; + private readonly IExpressionConverter _expressionConverter; + private readonly ILogger _logger; + private readonly IMetricService _metricService; + private readonly DatastoreClient _client; + + private readonly MemoryCacheEntryOptions _memoryCacheOptions = + new MemoryCacheEntryOptions() + .SetSlidingExpiration(TimeSpan.FromMinutes(30)) + .SetAbsoluteExpiration(TimeSpan.FromHours(12)); + + private const string _datastoreEntityOperationCount = "rcf/datastore/operations"; + private const string _datastoreEntityEntityReadCount = "rcf/datastore/entity_reads"; + + public DatastoreRepositoryLayer( + IModelConverter entityConverter, + IHostEnvironment hostEnvironment, + IGoogleServices googleServices, + IManagedTracer managedTracer, + IGlobalRepositoryHook[] hooks, + IMemoryCache memoryCache, + IExpressionConverter expressionConverter, + ILogger logger, + IMetricService metricService) + { + _entityConverter = entityConverter; + _hostEnvironment = hostEnvironment; + _googleServices = googleServices; + _managedTracer = managedTracer; + _hooks = hooks; + _memoryCache = memoryCache; + _expressionConverter = expressionConverter; + _logger = logger; + _metricService = metricService; + + _client = _googleServices.Build( + DatastoreClient.DefaultEndpoint, + DatastoreClient.DefaultScopes); + } + + public AsyncEvent OnNonTransactionalEntitiesModified { get; } = new AsyncEvent(); + + private DatastoreDb GetDbForNamespace(string @namespace) + { + using (_managedTracer.StartSpan($"db.datastore.get_datastore_for_current_site", @namespace)) + { + var db = _memoryCache.Get("db:" + @namespace); + if (db != null) + { + return db; + } + + db = DatastoreDb.Create(_googleServices.ProjectId, @namespace, _client); + _memoryCache.Set("db:" + @namespace, db, _memoryCacheOptions); + return db; + } + } + + private async IAsyncEnumerable> BatchedQueryAsync( + string @namespace, + Expression> where, + Expression>? order, + int? limit, + IModelTransaction? transaction, + RepositoryOperationMetrics? metrics, + [EnumeratorCancellation] CancellationToken cancellationToken) where T : Model, new() + { + using (var span = _managedTracer.StartSpan("db.datastore.query", $"{@namespace},{typeof(T).Name}")) + { + ArgumentNullException.ThrowIfNull(@namespace, nameof(@namespace)); + + ArgumentNullException.ThrowIfNull(where); + + long totalEntitiesRead = 0; + + Stopwatch? stopwatch = null; + if (metrics != null) + { + stopwatch = Stopwatch.StartNew(); + } + try + { + GeoQueryParameters? geoQuery = null; + + var hasAncestorQuery = false; + var referenceModel = new T(); + var filter = _expressionConverter.SimplifyFilter(_expressionConverter.ConvertExpressionToFilter(where.Body, where.Parameters[0], referenceModel, ref geoQuery, ref hasAncestorQuery)); + var sort = order == null ? null : _expressionConverter.ConvertExpressionToOrder(order.Body, order.Parameters[0], referenceModel, ref geoQuery); + + if (geoQuery == null) + { + var query = new Query(referenceModel.GetKind()); + query.Filter = filter; + query.Limit = limit; + if (sort != null) + { + query.Order.AddRange(sort); + } + + cancellationToken.ThrowIfCancellationRequested(); + + if (transaction != null && !hasAncestorQuery && (_hostEnvironment.IsDevelopment() || _hostEnvironment.IsStaging())) + { + _logger.LogWarning("Detected a transactional query without an ancestor filter in development. The datastore emulator does not support ancestor-less transactional queries, even though this is supported in production. The query will be non-transactional in development, but transactionality and correctness will be enforced in production."); + transaction = null; + } + + var db = GetDbForNamespace(@namespace); + AsyncLazyDatastoreQuery lazyQuery; + if (transaction == null) + { + lazyQuery = db.RunQueryLazilyAsync(query); + } + else + { + lazyQuery = transaction.Transaction.RunQueryLazilyAsync(query); + } + + int entitiesRead = 0; + await foreach (var response in lazyQuery.AsResponses().WithCancellation(cancellationToken)) + { + entitiesRead += response.Batch.EntityResults.Count; + if (metrics != null) + { + metrics.DatastoreEntitiesRead += response.Batch.EntityResults.Count; + } + cancellationToken.ThrowIfCancellationRequested(); + + yield return + response.Batch.EntityResults + .Select(x => _entityConverter.From(@namespace, x.Entity)) + .WhereNotNull() + .ToList(); + } + + await _metricService.AddPoint(_datastoreEntityOperationCount, 1, null, new Dictionary + { + { "operation", "query" }, + { "kind", referenceModel.GetKind() }, + { "namespace", @namespace }, + }).ConfigureAwait(false); + await _metricService.AddPoint(_datastoreEntityEntityReadCount, entitiesRead, null, new Dictionary + { + { "kind", referenceModel.GetKind() }, + { "namespace", @namespace }, + }).ConfigureAwait(false); + totalEntitiesRead += entitiesRead; + } + else + { + var keyLength = ((IGeoModel)referenceModel).GetHashKeyLengthsForGeopointFields()[geoQuery.GeoFieldName]; + var latLngRect = S2Manager.LatLngRectFromQueryRectangleInput(geoQuery.MinPoint, geoQuery.MaxPoint); + var ranges = S2Manager.GetGeohashRanges(latLngRect, keyLength); + + var entityBatches = ranges.ToAsyncEnumerable() + .SelectMany(range => QueryGeohashRange( + @namespace, + referenceModel, + filter!, // @note: If filter is null, then we won't be doing a geographic query anyway. + range, + keyLength, + geoQuery, + transaction, + metrics, + cancellationToken)); + + if (geoQuery.SortDirection.HasValue) + { + // @note: We return these all as one giant batch; we can't propagate the Datastore + // batching to the receiver due to the sorting. + var entities = entityBatches.SelectMany(x => x.ToAsyncEnumerable()); + if (geoQuery.SortDirection.Value == PropertyOrder.Types.Direction.Ascending) + { + entities = entities.OrderBy(x => GeoExtensions.HaversineDistance(geoQuery.CenterPoint, geoQuery.ServerSideAccessor(x))); + } + else + { + entities = entities.OrderByDescending(x => GeoExtensions.HaversineDistance(geoQuery.CenterPoint, geoQuery.ServerSideAccessor(x))); + } + if (limit != null) + { + entities = entities.Take(limit.Value); + } + yield return await entities.ToListAsync(cancellationToken).ConfigureAwait(false); + } + else + { + // Only pull as many batches as we need to satisfy the query. + var emittedEntityCount = 0; + await foreach (var batch in entityBatches.ConfigureAwait(false)) + { + cancellationToken.ThrowIfCancellationRequested(); + var nextEntityCount = emittedEntityCount + batch.Count; + if (limit != null && nextEntityCount > limit.Value) + { + yield return batch.Take(nextEntityCount - emittedEntityCount).ToList(); + yield break; + } + else + { + yield return batch; + } + emittedEntityCount += batch.Count; + } + } + } + } + finally + { + if (metrics != null) + { + metrics.DatastoreElapsedMilliseconds = stopwatch!.ElapsedMilliseconds; + } + } + + if (totalEntitiesRead > 2500) + { + _logger.LogWarning($"QueryAsync operation returned more than 2500 '{new T().GetKind()}' entities, which is likely to cause high Datastore costs. Please optimize your application."); + } + + span.SetTag("TotalEntitiesRead", totalEntitiesRead.ToString(CultureInfo.InvariantCulture)); + span.SetExtra("TotalEntitiesRead", totalEntitiesRead); + } + } + + public IBatchedAsyncEnumerable QueryAsync( + string @namespace, + Expression> where, + Expression>? order, + int? limit, + IModelTransaction? transaction, + RepositoryOperationMetrics? metrics, + CancellationToken cancellationToken) where T : Model, new() => + BatchedQueryAsync( + @namespace, + where, + order, + limit, + transaction, + metrics, + cancellationToken).AsBatchedAsyncEnumerable(); + + private async IAsyncEnumerable> QueryGeohashRange( + string @namespace, + T referenceModel, + Filter filter, + S2Manager.GeohashRange range, + ushort keyLength, + GeoQueryParameters geoQuery, + IModelTransaction? transaction, + RepositoryOperationMetrics? metrics, + [EnumeratorCancellation] CancellationToken cancellationToken) where T : Model, new() + { + using (_managedTracer.StartSpan($"db.datastore.query_geohash_range", $"{@namespace},{typeof(T).Name}")) + { + var hashKey = S2Manager.GenerateGeohashKey(range.RangeMin, keyLength); + var hashKeyString = hashKey.ToString(CultureInfo.InvariantCulture); + + var filtersGeographic = Filter.And( + Filter.GreaterThan(geoQuery.GeoFieldName + GeoConstants.GeoHashPropertySuffix, new Value { StringValue = range.RangeMin.ToString(CultureInfo.InvariantCulture) }), + Filter.LessThan(geoQuery.GeoFieldName + GeoConstants.GeoHashPropertySuffix, new Value { StringValue = range.RangeMax.ToString(CultureInfo.InvariantCulture) }), + Filter.Equal(geoQuery.GeoFieldName + GeoConstants.HashKeyPropertySuffix, new Value { IntegerValue = (long)hashKey }) + ); + + var query = new Query(referenceModel.GetKind()); + if (filter == null) + { + query.Filter = filtersGeographic; + } + else + { + query.Filter = _expressionConverter.SimplifyFilter(Filter.And(filter, filtersGeographic)); + } + + cancellationToken.ThrowIfCancellationRequested(); + + var db = GetDbForNamespace(@namespace); + AsyncLazyDatastoreQuery lazyQuery; + if (transaction == null) + { + lazyQuery = db.RunQueryLazilyAsync(query); + } + else + { + lazyQuery = transaction.Transaction.RunQueryLazilyAsync(query); + } + + int entitiesRead = 0; + await foreach (var response in lazyQuery.AsResponses().WithCancellation(cancellationToken)) + { + entitiesRead += response.Batch.EntityResults.Count; + if (metrics != null) + { + metrics.DatastoreEntitiesRead += response.Batch.EntityResults.Count; + } + cancellationToken.ThrowIfCancellationRequested(); + + yield return + response.Batch.EntityResults + .Select(x => _entityConverter.From(@namespace, x.Entity)) + .WhereNotNull() + .Where(geoQuery.ServerSideFilter) + .ToList(); + } + + await _metricService.AddPoint(_datastoreEntityOperationCount, 1, null, new Dictionary + { + { "operation", "querygeo" }, + { "hashkey", hashKeyString }, + { "kind", referenceModel.GetKind() }, + { "namespace", @namespace }, + }).ConfigureAwait(false); + await _metricService.AddPoint(_datastoreEntityEntityReadCount, entitiesRead, null, new Dictionary + { + { "kind", referenceModel.GetKind() }, + { "namespace", @namespace }, + }).ConfigureAwait(false); + } + } + + public async Task> QueryPaginatedAsync( + string @namespace, + PaginatedQueryCursor cursor, + int limit, + Expression> where, + Expression>? order, + IModelTransaction? transaction, + RepositoryOperationMetrics? metrics, + CancellationToken cancellationToken) where T : Model, new() + { + using (_managedTracer.StartSpan($"db.datastore.query_paginated", $"{@namespace},{typeof(T).Name}")) + { + ArgumentNullException.ThrowIfNull(@namespace, nameof(@namespace)); + + ArgumentNullException.ThrowIfNull(where); + + Stopwatch? stopwatch = null; + if (metrics != null) + { + stopwatch = Stopwatch.StartNew(); + } + try + { + GeoQueryParameters? geoQuery = null; + + var hasAncestorQuery = false; + var referenceModel = new T(); + var filter = _expressionConverter.SimplifyFilter(_expressionConverter.ConvertExpressionToFilter(where.Body, where.Parameters[0], referenceModel, ref geoQuery, ref hasAncestorQuery)); + if (geoQuery != null) + { + throw new InvalidOperationException("Geographic queries can not be used with QueryPaginatedAsync, because there is no way to paginate geographic queries. Use QueryAsync<> instead."); + } + var sort = order == null ? null : _expressionConverter.ConvertExpressionToOrder(order.Body, order.Parameters[0], referenceModel, ref geoQuery); + + var query = new Query(referenceModel.GetKind()); + query.Filter = filter; + query.Limit = limit; + query.StartCursor = cursor; + if (sort != null) + { + query.Order.AddRange(sort); + } + + cancellationToken.ThrowIfCancellationRequested(); + + var db = GetDbForNamespace(@namespace); + DatastoreQueryResults results; + if (transaction == null) + { + results = await db.RunQueryAsync(query).ConfigureAwait(false); + } + else + { + results = await transaction.Transaction.RunQueryAsync(query).ConfigureAwait(false); + } + + await _metricService.AddPoint(_datastoreEntityOperationCount, 1, null, new Dictionary + { + { "operation", "querypage" }, + { "kind", referenceModel.GetKind() }, + { "namespace", @namespace }, + }).ConfigureAwait(false); + await _metricService.AddPoint(_datastoreEntityEntityReadCount, results.Entities.Count, null, new Dictionary + { + { "kind", referenceModel.GetKind() }, + { "namespace", @namespace }, + }).ConfigureAwait(false); + + // The Datastore emulator has a bug where it will always return MoreResultsAfterLimit + // even when the paginated query is complete. Handle this scenario with the emulator. + if ((_hostEnvironment.IsDevelopment() || _hostEnvironment.IsStaging()) && + results.Entities.Count < limit && + results.MoreResults == QueryResultBatch.Types.MoreResultsType.MoreResultsAfterLimit) + { + return new PaginatedQueryResult + { + Results = results.Entities.Select(x => _entityConverter.From(@namespace, x)).WhereNotNull().ToList(), + NextCursor = null, + }; + } + + return new PaginatedQueryResult + { + Results = results.Entities.Select(x => _entityConverter.From(@namespace, x)).WhereNotNull().ToList(), + NextCursor = results.MoreResults == QueryResultBatch.Types.MoreResultsType.MoreResultsAfterLimit ? new PaginatedQueryCursor(results.EndCursor) : null, + }; + } + finally + { + if (metrics != null) + { + metrics.DatastoreElapsedMilliseconds = stopwatch!.ElapsedMilliseconds; + } + } + } + } + + public async Task LoadAsync( + string @namespace, + Key key, + IModelTransaction? transaction, + RepositoryOperationMetrics? metrics, + CancellationToken cancellationToken) where T : Model, new() + { + using (_managedTracer.StartSpan($"db.datastore.load", $"{@namespace},{typeof(T).Name}")) + { + ArgumentNullException.ThrowIfNull(@namespace, nameof(@namespace)); + + ArgumentNullException.ThrowIfNull(key); + + Stopwatch? stopwatch = null; + if (metrics != null) + { + stopwatch = Stopwatch.StartNew(); + } + try + { + var db = GetDbForNamespace(@namespace); + + Entity entity; + if (transaction == null) + { + entity = await db.LookupAsync(key).ConfigureAwait(false); + } + else + { + entity = await transaction.Transaction.LookupAsync(key).ConfigureAwait(false); + } + + if (metrics != null) + { + metrics.DatastoreEntitiesRead++; + } + + await _metricService.AddPoint(_datastoreEntityOperationCount, 1, null, new Dictionary + { + { "operation", "load" }, + { "kind", key.Path.Last().Kind }, + { "namespace", @namespace }, + }).ConfigureAwait(false); + if (entity != null) + { + await _metricService.AddPoint(_datastoreEntityEntityReadCount, 1, null, new Dictionary + { + { "kind", key.Path.Last().Kind }, + { "namespace", @namespace }, + }).ConfigureAwait(false); + } + + cancellationToken.ThrowIfCancellationRequested(); + + return entity == null ? null : _entityConverter.From(@namespace, entity); + } + finally + { + if (metrics != null) + { + metrics.DatastoreElapsedMilliseconds = stopwatch!.ElapsedMilliseconds; + } + } + } + } + + public IBatchedAsyncEnumerable> LoadAsync( + string @namespace, + IAsyncEnumerable keys, + IModelTransaction? transaction, + RepositoryOperationMetrics? metrics, + CancellationToken cancellationToken) where T : Model, new() + => BatchedLoadAsync( + @namespace, + keys, + transaction, + metrics, + cancellationToken).AsBatchedAsyncEnumerable(); + + private async IAsyncEnumerable>> BatchedLoadAsync( + string @namespace, + IAsyncEnumerable keys, + IModelTransaction? transaction, + RepositoryOperationMetrics? metrics, + [EnumeratorCancellation] CancellationToken cancellationToken) where T : Model, new() + { + using (_managedTracer.StartSpan($"db.datastore.load", $"{@namespace},{typeof(T).Name}")) + { + ArgumentNullException.ThrowIfNull(@namespace, nameof(@namespace)); + + ArgumentNullException.ThrowIfNull(keys); + + long totalEntitiesRead = 0; + + Stopwatch? stopwatch = null; + if (metrics != null) + { + stopwatch = Stopwatch.StartNew(); + } + string? kind = null; + try + { + var db = GetDbForNamespace(@namespace); + + var batch = new HashSet(); + var hasWrittenOpMetric = false; + await foreach (var key in keys.Distinct().WithCancellation(cancellationToken)) + { + if (key == null) + { + throw new ArgumentNullException(nameof(keys), "One or more keys passed to LoadAsync was null."); + } + + batch.Add(key); + + if (kind == null) + { + kind = key.Path.Last().Kind; + if (!hasWrittenOpMetric && kind != null) + { + await _metricService.AddPoint(_datastoreEntityOperationCount, 1, null, new Dictionary + { + { "operation", "load" }, + { "kind", kind }, + { "namespace", @namespace }, + }).ConfigureAwait(false); + } + } + + if (batch.Count == 1000) + { + IReadOnlyList entities; + if (transaction == null) + { + entities = await db.LookupAsync(batch).ConfigureAwait(false); + } + else + { + entities = await transaction.Transaction.LookupAsync(batch).ConfigureAwait(false); + } + + if (metrics != null) + { + metrics.DatastoreEntitiesRead += batch.Count; + } + + totalEntitiesRead += entities.Count; + await _metricService.AddPoint(_datastoreEntityEntityReadCount, entities.Count, null, new Dictionary + { + { "kind", kind }, + { "namespace", @namespace }, + }).ConfigureAwait(false); + + var expectedKeys = new List(batch); + batch.Clear(); + + cancellationToken.ThrowIfCancellationRequested(); + + var batchResults = new List>(); + for (int i = 0; i < expectedKeys.Count; i++) + { + if (entities.Count <= i || + entities[i] == null) + { + batchResults.Add(new KeyValuePair(expectedKeys[i], null)); + } + else + { + batchResults.Add(new KeyValuePair(entities[i].Key, _entityConverter.From(@namespace, entities[i]))); + } + } + yield return batchResults; + } + } + + if (!hasWrittenOpMetric && kind != null) + { + await _metricService.AddPoint(_datastoreEntityOperationCount, 1, null, new Dictionary + { + { "operation", "load" }, + { "kind", kind }, + { "namespace", @namespace }, + }).ConfigureAwait(false); + } + + if (batch.Count > 0) + { + IReadOnlyList entities; + if (transaction == null) + { + entities = await db.LookupAsync(batch).ConfigureAwait(false); + } + else + { + entities = await transaction.Transaction.LookupAsync(batch).ConfigureAwait(false); + } + + if (metrics != null) + { + metrics.DatastoreEntitiesRead += batch.Count; + } + + totalEntitiesRead += entities.Count; + await _metricService.AddPoint(_datastoreEntityEntityReadCount, entities.Count, null, new Dictionary + { + { "kind", kind }, + { "namespace", @namespace }, + }).ConfigureAwait(false); + + var expectedKeys = new List(batch); + batch.Clear(); + + cancellationToken.ThrowIfCancellationRequested(); + + var batchResults = new List>(); + for (int i = 0; i < expectedKeys.Count; i++) + { + if (entities.Count <= i || + entities[i] == null) + { + batchResults.Add(new KeyValuePair(expectedKeys[i], null)); + } + else + { + batchResults.Add(new KeyValuePair(entities[i].Key, _entityConverter.From(@namespace, entities[i]))); + } + } + yield return batchResults; + } + } + finally + { + if (metrics != null) + { + metrics.DatastoreElapsedMilliseconds = stopwatch!.ElapsedMilliseconds; + } + } + + if (totalEntitiesRead > 2500) + { + _logger.LogWarning($"LoadAsync operation returned more than 2500 '{kind}' entities, which is likely to cause high Datastore costs. Please optimize your application."); + } + } + } + + public async IAsyncEnumerable> LoadAcrossNamespacesAsync( + IAsyncEnumerable keys, + RepositoryOperationMetrics? metrics, + [EnumeratorCancellation] CancellationToken cancellationToken) where T : Model, new() + { + using (_managedTracer.StartSpan($"db.datastore.load_across_namespaces", $"{typeof(T).Name}")) + { + ArgumentNullException.ThrowIfNull(keys); + + Stopwatch? stopwatch = null; + if (metrics != null) + { + stopwatch = Stopwatch.StartNew(); + } + try + { + // We don't currently process keys asynchronously; we eagerly fetch them all so we can do + // our Any/GroupBy operations. + var keysList = await keys.ToListAsync(cancellationToken: cancellationToken).ConfigureAwait(false); + var kind = keysList.FirstOrDefault()?.Path?.Last()?.Kind; + + if (keysList.Any(x => x == null)) + { + throw new ArgumentNullException(nameof(keys), "One or more keys passed to LoadAcrossNamespacesAsync was null."); + } + + foreach (var keyGroup in keysList.GroupBy(x => x.PartitionId.NamespaceId).ToDictionary(k => k.Key, v => v.ToArray())) + { + var @namespace = keyGroup.Key; + // Datastore APIs enforce that the namespace can't be null, so we don't need to check and throw ArgumentNullException. + + var batches = new List(); + if (keyGroup.Value.Length <= 1000) + { + batches.Add(keyGroup.Value); + } + else + { + for (int i = 0; i < keyGroup.Value.Length; i += 1000) + { + var batchSize = Math.Min(1000, keyGroup.Value.Length - i); + var batch = new Key[batchSize]; + Array.Copy(keyGroup.Value, i, batch, 0, batchSize); + batches.Add(batch); + } + } + + var db = GetDbForNamespace(@namespace); + + await _metricService.AddPoint(_datastoreEntityOperationCount, 1, null, new Dictionary + { + { "operation", "loadacrossns" }, + { "kind", kind }, + { "namespace", @namespace }, + }).ConfigureAwait(false); + + foreach (var batch in batches) + { + cancellationToken.ThrowIfCancellationRequested(); + + var entities = await db.LookupAsync(batch).ConfigureAwait(false); + + if (metrics != null) + { + metrics.DatastoreEntitiesRead += batch.LongLength; + } + + await _metricService.AddPoint(_datastoreEntityEntityReadCount, entities.Count, null, new Dictionary + { + { "kind", kind }, + { "namespace", @namespace }, + }).ConfigureAwait(false); + + cancellationToken.ThrowIfCancellationRequested(); + + for (int i = 0; i < batch.Length; i++) + { + if (entities.Count <= i || + entities[i] == null) + { + yield return new KeyValuePair(batch[i], null); + } + else + { + yield return new KeyValuePair(entities[i].Key, _entityConverter.From(@namespace, entities[i])); + } + } + } + } + } + finally + { + if (metrics != null) + { + metrics.DatastoreElapsedMilliseconds = stopwatch!.ElapsedMilliseconds; + } + } + } + } + + public async IAsyncEnumerable CreateAsync( + string @namespace, + IAsyncEnumerable models, + IModelTransaction? transaction, + RepositoryOperationMetrics? metrics, + [EnumeratorCancellation] CancellationToken cancellationToken) where T : Model, new() + { + using (_managedTracer.StartSpan($"db.datastore.create", $"{@namespace},{typeof(T).Name}")) + { + ArgumentNullException.ThrowIfNull(@namespace, nameof(@namespace)); + + Stopwatch? stopwatch = null; + if (metrics != null) + { + stopwatch = Stopwatch.StartNew(); + } + try + { + var db = GetDbForNamespace(@namespace); + + List entities = new List(); + List modelBuffer = new List(); + KeyFactory? keyFactory = null; + await foreach (var model in models.ConfigureAwait(false)) + { + cancellationToken.ThrowIfCancellationRequested(); + + if (model == null) + { + throw new ArgumentNullException(nameof(models), "Input models contained a null value; filter nulls out of the input enumerable before calling CreateAsync()."); + } + + if (keyFactory == null) + { + keyFactory = db.CreateKeyFactory(model.GetKind()); + } + + var entity = _entityConverter.To(@namespace, model, true, _ => keyFactory.CreateIncompleteKey()); + if (entity.Key.PartitionId.NamespaceId != @namespace) + { + throw new InvalidOperationException($"Cross-namespace data write attempted (CreateAsync called with namespace '{@namespace}', but entity had namespace '{entity.Key.PartitionId.NamespaceId}')."); + } + entities.Add(entity); + modelBuffer.Add(model); + } + + cancellationToken.ThrowIfCancellationRequested(); + + if (transaction == null) + { + foreach (var hook in _hooks) + { + foreach (var entity in entities) + { + await hook.MutateEntityBeforeWrite(@namespace, entity).ConfigureAwait(false); + } + } + + var keys = new List(); + foreach (var batch in entities.BatchInto(500)) + { + keys.AddRange(await db.InsertAsync(batch).ConfigureAwait(false)); + } + if (metrics != null) + { + metrics.DatastoreEntitiesWritten += entities.Count; + } + + for (int i = 0; i < keys.Count; i++) + { + if (keys[i] != null) + { + modelBuffer[i].Key = keys[i]; + } + } + + foreach (var hook in _hooks) + { + foreach (var model in modelBuffer) + { + await hook.PostCreate(@namespace, model, transaction).ConfigureAwait(false); + } + } + + await OnNonTransactionalEntitiesModified.BroadcastAsync(new EntitiesModifiedEventArgs + { + Keys = modelBuffer.Select(x => x.Key).ToArray(), + Metrics = metrics, + }, cancellationToken).ConfigureAwait(false); + } + else + { + foreach (var entity in entities) + { + if (entity.Key.Path.Last().IdTypeCase == Key.Types.PathElement.IdTypeOneofCase.None) + { + entity.Key = await db.AllocateIdAsync(entity.Key).ConfigureAwait(false); + } + } + + foreach (var hook in _hooks) + { + foreach (var entity in entities) + { + await hook.MutateEntityBeforeWrite(@namespace, entity).ConfigureAwait(false); + } + } + + foreach (var batch in entities.BatchInto(500)) + { + transaction.Transaction.Insert(batch); + } + transaction.ModifiedModelsList.AddRange(modelBuffer); + transaction.QueuedPreCommitOperationsList.Add(async () => + { + foreach (var hook in _hooks) + { + foreach (var model in modelBuffer) + { + await hook.PostCreate(@namespace, model, transaction).ConfigureAwait(false); + } + } + }); + + for (int i = 0; i < entities.Count; i++) + { + modelBuffer[i].Key = entities[i].Key; + } + } + + foreach (var model in modelBuffer) + { + yield return model; + } + } + finally + { + if (metrics != null) + { + metrics.DatastoreElapsedMilliseconds = stopwatch!.ElapsedMilliseconds; + } + } + } + } + + public async IAsyncEnumerable UpsertAsync( + string @namespace, + IAsyncEnumerable models, + IModelTransaction? transaction, + RepositoryOperationMetrics? metrics, + [EnumeratorCancellation] CancellationToken cancellationToken) where T : Model, new() + { + using (_managedTracer.StartSpan($"db.datastore.upsert", $"{@namespace},{typeof(T).Name}")) + { + ArgumentNullException.ThrowIfNull(@namespace, nameof(@namespace)); + + Stopwatch? stopwatch = null; + if (metrics != null) + { + stopwatch = Stopwatch.StartNew(); + } + try + { + var db = GetDbForNamespace(@namespace); + + List entities = new List(); + List modelBuffer = new List(); + HashSet seenKeys = new HashSet(); + KeyFactory? keyFactory = null; + await foreach (var model in models.ConfigureAwait(false)) + { + cancellationToken.ThrowIfCancellationRequested(); + + if (model == null) + { + throw new ArgumentNullException(nameof(models), "Input models contained a null value; filter nulls out of the input enumerable before calling UpsertAsync()."); + } + + if (keyFactory == null) + { + keyFactory = db.CreateKeyFactory(model.GetKind()); + } + + var entity = _entityConverter.To(@namespace, model, false, _ => keyFactory.CreateIncompleteKey()); + if (entity.Key.PartitionId.NamespaceId != @namespace) + { + throw new InvalidOperationException($"Cross-namespace data write attempted (UpsertAsync called with namespace '{@namespace}', but entity had namespace '{entity.Key.PartitionId.NamespaceId}')."); + } + + if (model.Key != null && seenKeys.Contains(entity.Key)) + { + continue; + } + + entities.Add(entity); + modelBuffer.Add(model); + seenKeys.Add(entity.Key); + } + + cancellationToken.ThrowIfCancellationRequested(); + + if (transaction == null) + { + foreach (var hook in _hooks) + { + foreach (var entity in entities) + { + await hook.MutateEntityBeforeWrite(@namespace, entity).ConfigureAwait(false); + } + } + + var keys = new List(); + foreach (var batch in entities.BatchInto(500)) + { + keys.AddRange(await db.UpsertAsync(batch).ConfigureAwait(false)); + } + if (metrics != null) + { + metrics.DatastoreEntitiesWritten += entities.Count; + } + + for (int i = 0; i < keys.Count; i++) + { + if (keys[i] != null) + { + modelBuffer[i].Key = keys[i]; + } + } + + foreach (var hook in _hooks) + { + foreach (var model in modelBuffer) + { + await hook.PostUpsert(@namespace, model, transaction).ConfigureAwait(false); + } + } + + await OnNonTransactionalEntitiesModified.BroadcastAsync(new EntitiesModifiedEventArgs + { + Keys = modelBuffer.Select(x => x.Key).ToArray(), + Metrics = metrics, + }, cancellationToken).ConfigureAwait(false); + } + else + { + foreach (var entity in entities) + { + if (entity.Key.Path.Last().IdTypeCase == Key.Types.PathElement.IdTypeOneofCase.None) + { + entity.Key = await db.AllocateIdAsync(entity.Key).ConfigureAwait(false); + } + } + + foreach (var hook in _hooks) + { + foreach (var entity in entities) + { + await hook.MutateEntityBeforeWrite(@namespace, entity).ConfigureAwait(false); + } + } + + foreach (var batch in entities.BatchInto(500)) + { + transaction.Transaction.Upsert(batch); + } + transaction.ModifiedModelsList.AddRange(modelBuffer); + transaction.QueuedPreCommitOperationsList.Add(async () => + { + foreach (var hook in _hooks) + { + foreach (var model in modelBuffer) + { + await hook.PostUpsert(@namespace, model, transaction).ConfigureAwait(false); + } + } + }); + + for (int i = 0; i < entities.Count; i++) + { + modelBuffer[i].Key = entities[i].Key; + } + } + + foreach (var model in modelBuffer) + { + yield return model; + } + } + finally + { + if (metrics != null) + { + metrics.DatastoreElapsedMilliseconds = stopwatch!.ElapsedMilliseconds; + } + } + } + } + + public async IAsyncEnumerable UpdateAsync( + string @namespace, + IAsyncEnumerable models, + IModelTransaction? transaction, + RepositoryOperationMetrics? metrics, + [EnumeratorCancellation] CancellationToken cancellationToken) where T : Model, new() + { + using (_managedTracer.StartSpan($"db.datastore.update", $"{@namespace},{typeof(T).Name}")) + { + ArgumentNullException.ThrowIfNull(@namespace, nameof(@namespace)); + + Stopwatch? stopwatch = null; + if (metrics != null) + { + stopwatch = Stopwatch.StartNew(); + } + try + { + var db = GetDbForNamespace(@namespace); + + List entities = new List(); + List modelBuffer = new List(); + HashSet seenKeys = new HashSet(); + KeyFactory? keyFactory = null; + await foreach (var model in models.ConfigureAwait(false)) + { + cancellationToken.ThrowIfCancellationRequested(); + + if (model == null || model.Key == null) + { + throw new ArgumentNullException(nameof(models), "Input models contained a null value or had a null Key on the model; filter nulls out of the input enumerable before calling UpdateAsync()."); + } + + if (keyFactory == null) + { + keyFactory = db.CreateKeyFactory(model.GetKind()); + } + + var entity = _entityConverter.To(@namespace, model, false, _ => keyFactory.CreateIncompleteKey()); + if (entity.Key.PartitionId.NamespaceId != @namespace) + { + throw new InvalidOperationException($"Cross-namespace data write attempted (UpdateAsync called with namespace '{@namespace}', but entity had namespace '{entity.Key.PartitionId.NamespaceId}')."); + } + + if (model.Key != null && seenKeys.Contains(entity.Key)) + { + continue; + } + + entities.Add(entity); + modelBuffer.Add(model); + seenKeys.Add(entity.Key); + } + + cancellationToken.ThrowIfCancellationRequested(); + + if (transaction == null) + { + foreach (var hook in _hooks) + { + foreach (var entity in entities) + { + await hook.MutateEntityBeforeWrite(@namespace, entity).ConfigureAwait(false); + } + } + + foreach (var batch in entities.BatchInto(500)) + { + await db.UpdateAsync(batch).ConfigureAwait(false); + } + + if (metrics != null) + { + metrics.DatastoreEntitiesWritten += entities.Count; + } + + foreach (var hook in _hooks) + { + foreach (var model in modelBuffer) + { + await hook.PostUpdate(@namespace, model, transaction).ConfigureAwait(false); + } + } + + await OnNonTransactionalEntitiesModified.BroadcastAsync(new EntitiesModifiedEventArgs + { + Keys = modelBuffer.Select(x => x.Key).ToArray(), + Metrics = metrics, + }, cancellationToken).ConfigureAwait(false); + } + else + { + foreach (var hook in _hooks) + { + foreach (var entity in entities) + { + await hook.MutateEntityBeforeWrite(@namespace, entity).ConfigureAwait(false); + } + } + + foreach (var batch in entities.BatchInto(500)) + { + transaction.Transaction.Update(batch); + } + transaction.ModifiedModelsList.AddRange(modelBuffer); + transaction.QueuedPreCommitOperationsList.Add(async () => + { + foreach (var hook in _hooks) + { + foreach (var model in modelBuffer) + { + await hook.PostUpdate(@namespace, model, transaction).ConfigureAwait(false); + } + } + }); + } + + foreach (var model in modelBuffer) + { + yield return model; + } + } + finally + { + if (metrics != null) + { + metrics.DatastoreElapsedMilliseconds = stopwatch!.ElapsedMilliseconds; + } + } + } + } + + public async Task DeleteAsync( + string @namespace, + IAsyncEnumerable models, + IModelTransaction? transaction, + RepositoryOperationMetrics? metrics, + CancellationToken cancellationToken) where T : Model, new() + { + using (_managedTracer.StartSpan($"db.datastore.delete", $"{@namespace},{typeof(T).Name}")) + { + ArgumentNullException.ThrowIfNull(@namespace, nameof(@namespace)); + + Stopwatch? stopwatch = null; + if (metrics != null) + { + stopwatch = Stopwatch.StartNew(); + } + try + { + var db = GetDbForNamespace(@namespace); + + List entities = new List(); + List modelBuffer = new List(); + HashSet seenKeys = new HashSet(); + KeyFactory? keyFactory = null; + await foreach (var model in models.ConfigureAwait(false)) + { + cancellationToken.ThrowIfCancellationRequested(); + + if (model == null || model.Key == null) + { + throw new ArgumentNullException(nameof(models), "Input models contained a null value or had a null Key on the model; filter nulls out of the input enumerable before calling DeleteAsync()."); + } + + if (keyFactory == null) + { + keyFactory = db.CreateKeyFactory(model.GetKind()); + } + + var entity = _entityConverter.To(@namespace, model, false, _ => keyFactory.CreateIncompleteKey()); + if (entity.Key.PartitionId.NamespaceId != @namespace) + { + throw new InvalidOperationException($"Cross-namespace data write attempted (DeleteAsync called with namespace '{@namespace}', but entity had namespace '{entity.Key.PartitionId.NamespaceId}')."); + } + + if (model.Key != null && seenKeys.Contains(entity.Key)) + { + continue; + } + + entities.Add(entity); + modelBuffer.Add(model); + seenKeys.Add(entity.Key); + } + + cancellationToken.ThrowIfCancellationRequested(); + + if (transaction == null) + { + foreach (var hook in _hooks) + { + foreach (var entity in entities) + { + await hook.MutateEntityBeforeWrite(@namespace, entity).ConfigureAwait(false); + } + } + + foreach (var batch in entities.BatchInto(500)) + { + await db.DeleteAsync(batch).ConfigureAwait(false); + } + if (metrics != null) + { + metrics.DatastoreEntitiesDeleted += entities.Count; + } + + foreach (var hook in _hooks) + { + foreach (var model in modelBuffer) + { + await hook.PostDelete(@namespace, model, transaction).ConfigureAwait(false); + } + } + + await OnNonTransactionalEntitiesModified.BroadcastAsync(new EntitiesModifiedEventArgs + { + Keys = modelBuffer.Select(x => x.Key).ToArray(), + Metrics = metrics, + }, cancellationToken).ConfigureAwait(false); + } + else + { + foreach (var hook in _hooks) + { + foreach (var entity in entities) + { + await hook.MutateEntityBeforeWrite(@namespace, entity).ConfigureAwait(false); + } + } + + foreach (var batch in entities.BatchInto(500)) + { + transaction.Transaction.Delete(batch); + } + transaction.ModifiedModelsList.AddRange(modelBuffer); + transaction.QueuedPreCommitOperationsList.Add(async () => + { + foreach (var hook in _hooks) + { + foreach (var model in modelBuffer) + { + await hook.PostDelete(@namespace, model, transaction).ConfigureAwait(false); + } + } + }); + } + } + finally + { + if (metrics != null) + { + metrics.DatastoreElapsedMilliseconds = stopwatch!.ElapsedMilliseconds; + } + } + } + } + + public Task AllocateKeyAsync( + string @namespace, + IModelTransaction? transaction, + RepositoryOperationMetrics? metrics, + CancellationToken cancellationToken) where T : Model, new() + { + using (_managedTracer.StartSpan($"db.datastore.allocate_key", $"{@namespace},{typeof(T).Name}")) + { + ArgumentNullException.ThrowIfNull(@namespace, nameof(@namespace)); + + var referenceModel = new T(); + var db = GetDbForNamespace(@namespace); + var factory = db.CreateKeyFactory(referenceModel.GetKind()); + var key = factory.CreateIncompleteKey(); + return db.AllocateIdAsync(key); + } + } + + public Task GetKeyFactoryAsync( + string @namespace, + RepositoryOperationMetrics? metrics, + CancellationToken cancellationToken) where T : Model, new() + { + using (_managedTracer.StartSpan($"db.datastore.get_key_factory", $"{@namespace},{typeof(T).Name}")) + { + ArgumentNullException.ThrowIfNull(@namespace, nameof(@namespace)); + + var referenceModel = new T(); + var db = GetDbForNamespace(@namespace); + return Task.FromResult(db.CreateKeyFactory(referenceModel.GetKind())); + } + } + + public async Task BeginTransactionAsync( + string @namespace, + TransactionMode mode, + RepositoryOperationMetrics? metrics, + CancellationToken cancellationToken) + { + using (_managedTracer.StartSpan($"db.datastore.begin_transaction", @namespace)) + { + ArgumentNullException.ThrowIfNull(@namespace, nameof(@namespace)); + + return new TopLevelModelTransaction( + @namespace, + await GetDbForNamespace(@namespace) + .BeginTransactionAsync( + mode == TransactionMode.ReadOnly + ? TransactionOptions.CreateReadOnly() + : TransactionOptions.CreateReadWrite()) + .ConfigureAwait(false), + this); + } + } + + public async Task RollbackAsync( + string @namespace, + IModelTransaction transaction, + RepositoryOperationMetrics? metrics, + CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(@namespace, nameof(@namespace)); + ArgumentNullException.ThrowIfNull(transaction); + + if (transaction.IsNestedTransaction) + { + throw new ArgumentException("You can not called RollbackAsync on a nested transaction; check IsNestedTransaction before calling RollbackAsync!", nameof(transaction)); + } + + using (_managedTracer.StartSpan($"db.datastore.rollback", @namespace)) + { + if (transaction.HasCommitted) + { + throw new ArgumentException("This transaction has already been committed!", nameof(transaction)); + } + if (transaction.HasRolledBack) + { + throw new ArgumentException("This transaction has already been rolled back!", nameof(transaction)); + } + + Stopwatch? stopwatch = null; + if (metrics != null) + { + stopwatch = Stopwatch.StartNew(); + } + try + { + try + { + await transaction.Transaction.RollbackAsync().ConfigureAwait(false); + } + catch (RpcException ex) when (ex.IsTransactionExpiryException()) + { + // Rollback isn't needed, continue. + } + + transaction.HasRolledBack = true; + } + finally + { + if (metrics != null) + { + metrics.DatastoreElapsedMilliseconds = stopwatch!.ElapsedMilliseconds; + } + } + } + } + + public async Task CommitAsync( + string @namespace, + IModelTransaction transaction, + RepositoryOperationMetrics? metrics, + CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(@namespace, nameof(@namespace)); + ArgumentNullException.ThrowIfNull(transaction); + + if (transaction.IsNestedTransaction) + { + throw new ArgumentException("You can not called CommitAsync on a nested transaction; check IsNestedTransaction before calling CommitAsync!", nameof(transaction)); + } + + using (_managedTracer.StartSpan($"db.datastore.commit", @namespace)) + { + if (transaction.HasCommitted) + { + throw new ArgumentException("This transaction has already been committed!", nameof(transaction)); + } + if (transaction.HasRolledBack) + { + throw new ArgumentException("This transaction has already been rolled back!", nameof(transaction)); + } + + Stopwatch? stopwatch = null; + if (metrics != null) + { + stopwatch = Stopwatch.StartNew(); + } + try + { + foreach (var action in transaction.QueuedPreCommitOperations) + { + await action().ConfigureAwait(false); + } + + await transaction.Transaction.CommitAsync().ConfigureAwait(false); + + transaction.HasCommitted = true; + + // TODO: Figure out the number of entities written/deleted and write them into the metrics. + } + finally + { + if (metrics != null) + { + metrics.DatastoreElapsedMilliseconds = stopwatch!.ElapsedMilliseconds; + } + } + } + } + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Layers/IDatastoreRepositoryLayer.cs b/UET/Redpoint.CloudFramework/Repository/Layers/IDatastoreRepositoryLayer.cs new file mode 100644 index 00000000..003e4cda --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Layers/IDatastoreRepositoryLayer.cs @@ -0,0 +1,6 @@ +namespace Redpoint.CloudFramework.Repository.Layers +{ + internal interface IDatastoreRepositoryLayer : IRepositoryLayer + { + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Layers/IRedisCacheRepositoryLayer.cs b/UET/Redpoint.CloudFramework/Repository/Layers/IRedisCacheRepositoryLayer.cs new file mode 100644 index 00000000..f34c661d --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Layers/IRedisCacheRepositoryLayer.cs @@ -0,0 +1,6 @@ +namespace Redpoint.CloudFramework.Repository.Layers +{ + internal interface IRedisCacheRepositoryLayer : IRepositoryLayer + { + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Layers/IRepositoryLayer.cs b/UET/Redpoint.CloudFramework/Repository/Layers/IRepositoryLayer.cs new file mode 100644 index 00000000..aebb242f --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Layers/IRepositoryLayer.cs @@ -0,0 +1,114 @@ +namespace Redpoint.CloudFramework.Repository.Layers +{ + using Google.Cloud.Datastore.V1; + using Redpoint.CloudFramework.Collections.Batching; + using Redpoint.CloudFramework.Models; + using Redpoint.CloudFramework.Repository.Metrics; + using Redpoint.CloudFramework.Repository.Pagination; + using Redpoint.CloudFramework.Repository.Transaction; + using Redpoint.Concurrency; + using System; + using System.Collections.Generic; + using System.Linq.Expressions; + using System.Threading; + using System.Threading.Tasks; + + internal class EntitiesModifiedEventArgs : EventArgs + { + /// + /// The keys that were modified or deleted. + /// + public required Key[] Keys { get; init; } + + /// + /// If there were metrics passed into the original operation that caused this event to be + /// raised, this is the metrics object that was passed in. + /// + public required RepositoryOperationMetrics? Metrics { get; init; } + } + + internal interface IRepositoryLayer + { + /// + /// Fired when non-transactional entities are updated by this repository layer, and the parent layers should + /// flush any appropriate caches. + /// + /// Only non-transactional entities have this event fired, since the parent layer will be able to determine + /// when entities in transactions are affected since it will also be handling the CommitAsync() function. + /// + AsyncEvent OnNonTransactionalEntitiesModified { get; } + + /// + /// Executes a query asynchronously. + /// + /// The type of model to query for. + /// The namespace the model is located in; use string.Empty for the default namespace. + /// + /// The filter that must be met for the returned models. You can build composite queries using &&, and filter + /// on properties using the ==, less/greater than and less/greater or equal than operators. + /// + /// You can't use != or || operators in the where clause. + /// + /// If you want to add a "has ancestor" clause, use an expression like 'x.Key.HasAncestor(ancestor)', using + /// the HasAncestor extension method in Redpoint.CloudFramework.Repository.RepositoryExtensions. The + /// HasAncestor extension method can also be used outside queries if you want to evaluate ancestors + /// on the client. + /// + /// If you want to retrieve every model, use an expression like 'x => true'. + /// + /// + /// The sort order for the returned models. You can specify multiple sort orders using the bitwise | operator. + /// For example, the expression x.first > x.first | x.second < x.second means + /// "sort by 'first' descending, then sort by 'second' ascending". + /// + /// + /// The limit on the number of models to return. If null, an unlimited number of models can be fetched. + /// + /// The transaction this query is part of. + /// The metrics object to report to, or null if metrics data should not be tracked. + /// The cancellation token for the asynchronous operation. + /// An asynchronous enumerable that you can iterate over to receive results. + IBatchedAsyncEnumerable QueryAsync( + string @namespace, + Expression> where, + Expression>? order, + int? limit, + IModelTransaction? transaction, + RepositoryOperationMetrics? metrics, + CancellationToken cancellationToken) where T : Model, new(); + + Task> QueryPaginatedAsync( + string @namespace, + PaginatedQueryCursor cursor, + int limit, + Expression> where, + Expression>? order, + IModelTransaction? transaction, + RepositoryOperationMetrics? metrics, + CancellationToken cancellationToken) where T : Model, new(); + + Task LoadAsync(string @namespace, Key key, IModelTransaction? transaction, RepositoryOperationMetrics? metrics, CancellationToken cancellationToken) where T : Model, new(); + + IBatchedAsyncEnumerable> LoadAsync(string @namespace, IAsyncEnumerable keys, IModelTransaction? transaction, RepositoryOperationMetrics? metrics, CancellationToken cancellationToken) where T : Model, new(); + + IAsyncEnumerable> LoadAcrossNamespacesAsync(IAsyncEnumerable keys, RepositoryOperationMetrics? metrics, CancellationToken cancellationToken) where T : Model, new(); + + IAsyncEnumerable CreateAsync(string @namespace, IAsyncEnumerable models, IModelTransaction? transaction, RepositoryOperationMetrics? metrics, CancellationToken cancellationToken) where T : Model, new(); + + IAsyncEnumerable UpsertAsync(string @namespace, IAsyncEnumerable models, IModelTransaction? transaction, RepositoryOperationMetrics? metrics, CancellationToken cancellationToken) where T : Model, new(); + + IAsyncEnumerable UpdateAsync(string @namespace, IAsyncEnumerable models, IModelTransaction? transaction, RepositoryOperationMetrics? metrics, CancellationToken cancellationToken) where T : Model, new(); + + Task DeleteAsync(string @namespace, IAsyncEnumerable models, IModelTransaction? transaction, RepositoryOperationMetrics? metrics, CancellationToken cancellationToken) where T : Model, new(); + + Task AllocateKeyAsync(string @namespace, IModelTransaction? transaction, RepositoryOperationMetrics? metrics, CancellationToken cancellationToken) where T : Model, new(); + + Task GetKeyFactoryAsync(string @namespace, RepositoryOperationMetrics? metrics, CancellationToken cancellationToken) where T : Model, new(); + + Task BeginTransactionAsync(string @namespace, TransactionMode mode, RepositoryOperationMetrics? metrics, CancellationToken cancellationToken); + + Task CommitAsync(string @namespace, IModelTransaction transaction, RepositoryOperationMetrics? metrics, CancellationToken cancellationToken); + + Task RollbackAsync(string @namespace, IModelTransaction transaction, RepositoryOperationMetrics? metrics, CancellationToken cancellationToken); + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Layers/RedisCacheRepositoryLayer.cs b/UET/Redpoint.CloudFramework/Repository/Layers/RedisCacheRepositoryLayer.cs new file mode 100644 index 00000000..15832956 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Layers/RedisCacheRepositoryLayer.cs @@ -0,0 +1,1799 @@ +namespace Redpoint.CloudFramework.Repository.Layers +{ + using Google.Cloud.Datastore.V1; + using Google.Type; + using Microsoft.Extensions.Caching.Distributed; + using Newtonsoft.Json; + using NodaTime; + using Redpoint.Collections; + using Redpoint.CloudFramework.Metric; + using Redpoint.CloudFramework.Models; + using Redpoint.CloudFramework.Repository.Converters.Expression; + using Redpoint.CloudFramework.Repository.Converters.Model; + using Redpoint.CloudFramework.Repository.Converters.Timestamp; + using Redpoint.CloudFramework.Repository.Metrics; + using Redpoint.CloudFramework.Repository.Pagination; + using Redpoint.CloudFramework.Repository.Transaction; + using Redpoint.CloudFramework.Tracing; + using StackExchange.Redis; + using System; + using System.Collections.Concurrent; + using System.Collections.Generic; + using System.Diagnostics; + using System.Linq; + using System.Linq.Expressions; + using System.Reflection; + using System.Runtime.CompilerServices; + using System.Security.Cryptography; + using System.Text; + using System.Threading; + using System.Threading.Tasks; + using static Google.Cloud.Datastore.V1.Key.Types; + using Redpoint.Concurrency; + using Redpoint.CloudFramework.Collections.Batching; + + internal class RedisCacheRepositoryLayer : IRedisCacheRepositoryLayer + { + private readonly IDatastoreRepositoryLayer _datastoreRepositoryLayer; + private readonly IConnectionMultiplexer _redis; + private readonly IInstantTimestampConverter _instantTimestampConverter; + private readonly IExpressionConverter _expressionConverter; + private readonly IModelConverter _jsonConverter; + private readonly IMetricService _metricService; + private readonly IManagedTracer _managedTracer; + + private const string _cacheQueries = "rcf/cache/queries"; + private const string _cacheLookups = "rcf/cache/lookups"; + private const string _cacheInvalidations = "rcf/cache/invalidations"; + + public RedisCacheRepositoryLayer( + IDatastoreRepositoryLayer datastoreRepositoryLayer, + IDistributedCache distributedCache, + IConnectionMultiplexer redis, + IInstantTimestampConverter instantTimestampConverter, + IExpressionConverter expressionConverter, + IModelConverter jsonConverter, + IMetricService metricService, + IManagedTracer managedTracer) + { + _datastoreRepositoryLayer = datastoreRepositoryLayer; + _redis = redis; + _instantTimestampConverter = instantTimestampConverter; + _expressionConverter = expressionConverter; + _jsonConverter = jsonConverter; + _metricService = metricService; + _managedTracer = managedTracer; + _datastoreRepositoryLayer.OnNonTransactionalEntitiesModified.Add(this.ClearEntitiesFromCache); + } + + private const string _purgeQueries = @" +local hashes = redis.call('SMEMBERS', KEYS[1]) +local cacheKeys = {} +local queriesCleared = 0 +for i, hash in ipairs(hashes) do + local key_queryCache = 'QUERYCACHE:' .. hash + local key_queryRefCount = 'QUERYRC:' .. hash + local key_queryWriterCount = 'QUERYWC:' .. hash + local key_queryData = 'QUERYDATA:' .. hash + + if redis.call('EXISTS', key_queryWriterCount) > 0 then + -- Someone is writing into this cache value right now, but we've already invalidated + -- the data they've partially pulled. Tell them their results are invalid. + -- They will clean up their partially written entries. + redis.call('SET', key_queryWriterCount, 'INVALIDATED') + else + local readers = tonumber(redis.call('GET', key_queryRefCount)) + if readers == nil then + readers = 0 + end + if readers > 0 then + -- There are current readers. Just delete the QUERYCACHE, and leave the + -- data to be cleaned up the by readers. + table.insert(cacheKeys, key_queryCache) + else + -- There are no current readers, we can just purge it all. + table.insert(cacheKeys, key_queryCache) + table.insert(cacheKeys, key_queryRefCount) + table.insert(cacheKeys, key_queryWriterCount) + table.insert(cacheKeys, key_queryData) + end + end + + queriesCleared = queriesCleared + 1 +end +if table.getn(cacheKeys) > 0 then + redis.call('UNLINK', unpack(cacheKeys)) +end +return queriesCleared +"; + + private const string _purgeColumns = @" +local queriesCleared = 0 +for x, key in ipairs(KEYS) do + local hashes = redis.call('SMEMBERS', key) + local cacheKeys = {} + for i, hash in ipairs(hashes) do + local key_queryCache = 'QUERYCACHE:' .. hash + local key_queryRefCount = 'QUERYRC:' .. hash + local key_queryWriterCount = 'QUERYWC:' .. hash + local key_queryData = 'QUERYDATA:' .. hash + + if redis.call('EXISTS', key_queryWriterCount) > 0 then + -- Someone is writing into this cache value right now, but we've already invalidated + -- the data they've partially pulled. Tell them their results are invalid. + -- They will clean up their partially written entries. + redis.call('SET', key_queryWriterCount, 'INVALIDATED') + else + local readers = tonumber(redis.call('GET', key_queryRefCount)) + if readers == nil then + readers = 0 + end + if readers > 0 then + -- There are current readers. Just delete the QUERYCACHE, and leave the + -- data to be cleaned up the by readers. + table.insert(cacheKeys, key_queryCache) + else + -- There are no current readers, we can just purge it all. + table.insert(cacheKeys, key_queryCache) + table.insert(cacheKeys, key_queryRefCount) + table.insert(cacheKeys, key_queryWriterCount) + table.insert(cacheKeys, key_queryData) + end + end + + queriesCleared = queriesCleared + 1 + end + if table.getn(cacheKeys) > 0 then + for i = 1, table.getn(cacheKeys), 1000 do + local e = math.min(table.getn(cacheKeys), i + 1000 - 1) + local unlinkBatch = {} + for a = i, e do + table.insert(unlinkBatch, cacheKeys[a]) + end + redis.call('UNLINK', unpack(unlinkBatch)) + end + end +end +return queriesCleared +"; + + private async Task ClearEntitiesFromCache(EntitiesModifiedEventArgs ev, CancellationToken cancellationToken) + { + using (_managedTracer.StartSpan($"db.rediscache.clear_entities_from_cache")) + { + var db = _redis.GetDatabase(); + + // Clear simple cache keys. + var keys = ev.Keys.Select(GetSimpleCacheKey).ToArray(); + for (int i = 0; i < keys.Length; i += 50) + { + var buffer = new RedisKey[(int)Math.Min(i + 50, keys.Length - i)]; + for (int x = 0; x < buffer.Length; x++) + { + buffer[x] = new RedisKey(keys[i + x]); + } + var removedCount = await db.KeyDeleteAsync(buffer).ConfigureAwait(false); + if (ev.Metrics != null) + { + ev.Metrics.CacheQueriesFlushed += removedCount; + } + } + + // Clear complex caches. + foreach (var key in ev.Keys.Select(GetSimpleCachedInKey)) + { + using (_managedTracer.StartSpan("db.rediscache.cache.purge_queries", $"{key}")) + { + var queriesFlushed = await db.ScriptEvaluateAsync(_purgeQueries, new[] { new RedisKey(key) }).ConfigureAwait(false); + if (ev.Metrics != null) + { + ev.Metrics.CacheQueriesFlushed += ((long)queriesFlushed); + } + } + } + } + } + + private string SerializePathElement(PathElement pe) + { + var kind = pe.Kind.Contains('-', StringComparison.Ordinal) ? Convert.ToBase64String(Encoding.UTF8.GetBytes(pe.Kind)) : pe.Kind; + if (pe.IdTypeCase == PathElement.IdTypeOneofCase.None) + { + return $"{kind}-none"; + } + else if (pe.IdTypeCase == PathElement.IdTypeOneofCase.Id) + { + return $"{kind}-id-{pe.Id}"; + } + else if (pe.IdTypeCase == PathElement.IdTypeOneofCase.Name) + { + return $"{kind}-name-{Convert.ToBase64String(Encoding.UTF8.GetBytes(pe.Name))}"; + } + throw new NotImplementedException(); + } + + private string GetSimpleCacheKey(Key key) + { + ArgumentNullException.ThrowIfNull(key); + if (key.PartitionId == null) throw new ArgumentNullException("key.PartitionId"); + if (key.PartitionId.ProjectId == null) throw new ArgumentNullException("key.PartitionId.ProjectId"); + if (key.PartitionId.NamespaceId == null) throw new ArgumentNullException("key.PartitionId.NamespaceId"); + if (key.Path == null) throw new ArgumentNullException("key.Path"); + return $"KEY:{key.PartitionId.ProjectId}/{key.PartitionId.NamespaceId}:{string.Join(":", key.Path.Select(SerializePathElement))}"; + } + + private string GetSimpleCachedInKey(Key key) + { + ArgumentNullException.ThrowIfNull(key); + if (key.PartitionId == null) throw new ArgumentNullException("key.PartitionId"); + if (key.PartitionId.ProjectId == null) throw new ArgumentNullException("key.PartitionId.ProjectId"); + if (key.PartitionId.NamespaceId == null) throw new ArgumentNullException("key.PartitionId.NamespaceId"); + if (key.Path == null) throw new ArgumentNullException("key.Path"); + return $"KEYCACHEDIN:{key.PartitionId.ProjectId}/{key.PartitionId.NamespaceId}:{string.Join(":", key.Path.Select(SerializePathElement))}"; + } + + private class ComplexCacheKeyFilterJson + { + [JsonProperty("field", Order = 1)] + public required string Field { get; set; } + + [JsonProperty("op", Order = 2)] + public required string Op { get; set; } + + [JsonProperty("value", Order = 3)] + public required string Value { get; set; } + } + + private class ComplexCacheKeySortJson + { + [JsonProperty("field", Order = 1)] + public required string Field { get; set; } + + [JsonProperty("direction", Order = 2)] + public required string Direction { get; set; } + } + + private class ComplexCacheKeyGeoJson + { + [JsonProperty("field", Order = 1)] + public required string Field { get; set; } + + [JsonProperty("op", Order = 2)] + public required string Op { get; set; } + + [JsonProperty("centerPointLat", Order = 3)] + public required double CenterPointLat { get; set; } + + [JsonProperty("centerPointLng", Order = 4)] + public required double CenterPointLng { get; set; } + + [JsonProperty("distanceKm", Order = 5)] + public required double DistanceKm { get; set; } + } + + private class ComplexCacheKeyJson + { + [JsonProperty("namespace", Order = 1)] + public string? Namespace { get; set; } + + [JsonProperty("kind", Order = 2)] + public string? Kind { get; set; } + + [JsonProperty("filter", Order = 3)] + public ComplexCacheKeyFilterJson[]? Filter { get; set; } + + [JsonProperty("sort", Order = 4)] + public ComplexCacheKeySortJson[]? Sort { get; set; } + + [JsonProperty("geo", Order = 5)] + public ComplexCacheKeyGeoJson? Geo { get; set; } + + [JsonProperty("limit", Order = 5)] + public int? Limit { get; set; } + } + + private Task<(string cacheHash, string[] columns)> GetComplexCacheHashAndColumns( + string @namespace, + Expression> where, + Expression>? order, + int? limit) where T : Model, new() + { + using (_managedTracer.StartSpan($"db.rediscache.get_complex_cache_hash_and_columns", $"{@namespace},{typeof(T).Name}")) + { + GeoQueryParameters? geoQuery = null; + var referenceModel = new T(); + var hasAncestorQuery = false; + var filter = _expressionConverter.SimplifyFilter(_expressionConverter.ConvertExpressionToFilter(where.Body, where.Parameters[0], referenceModel, ref geoQuery, ref hasAncestorQuery)); + var sort = order == null ? null : _expressionConverter.ConvertExpressionToOrder(order.Body, order.Parameters[0], referenceModel, ref geoQuery)?.ToList(); + + Filter[] filters; + if (filter == null) + { + filters = Array.Empty(); + } + else + { + switch (filter.FilterTypeCase) + { + case Filter.FilterTypeOneofCase.CompositeFilter: + filters = filter.CompositeFilter.Filters.ToArray(); + break; + case Filter.FilterTypeOneofCase.PropertyFilter: + filters = new[] { filter }; + break; + case Filter.FilterTypeOneofCase.None: + default: + filters = Array.Empty(); + break; + } + } + + var columns = new HashSet(); + if (filters.Length == 0) + { + columns.Add($"KEYALL:{@namespace}:{referenceModel.GetKind()}"); + } + foreach (var f in filters) + { + columns.Add($"KEYCOLUMN:{@namespace}:{referenceModel.GetKind()}:{f.PropertyFilter.Property.Name}"); + } + if (sort != null) + { + foreach (var s in sort) + { + columns.Add($"KEYCOLUMN:{@namespace}:{referenceModel.GetKind()}:{s.Property.Name}"); + } + } + + var cacheKeyJson = new ComplexCacheKeyJson + { + Namespace = @namespace, + Kind = referenceModel.GetKind(), + Filter = filters.OrderBy(x => x.PropertyFilter.Property.Name).Select(x => new ComplexCacheKeyFilterJson + { + Field = x.PropertyFilter.Property.Name, + Op = RedisCacheRepositoryLayer.SerializeOp(x.PropertyFilter.Op), + Value = SerializeValue(x.PropertyFilter.Value), + }).ToArray(), + Sort = sort == null ? null : sort.Select(x => new ComplexCacheKeySortJson + { + Field = x.Property.Name, + Direction = x.Direction == PropertyOrder.Types.Direction.Ascending ? "asc" : "desc", + }).ToArray(), + Geo = geoQuery == null ? null : new ComplexCacheKeyGeoJson + { + Field = geoQuery.GeoFieldName, + CenterPointLat = geoQuery.CenterPoint.Latitude, + CenterPointLng = geoQuery.CenterPoint.Longitude, + DistanceKm = geoQuery.DistanceKm, + Op = "within-km", + }, + Limit = limit, + }; + var cacheHash = Convert.ToHexString(SHA256.HashData(Encoding.UTF8.GetBytes(JsonConvert.SerializeObject(cacheKeyJson)))).ToLowerInvariant(); + return Task.FromResult((cacheHash, columns.ToArray())); + } + } + + private static string SerializeOp(PropertyFilter.Types.Operator op) + { + switch (op) + { + case PropertyFilter.Types.Operator.Unspecified: + return "unspecified"; + case PropertyFilter.Types.Operator.LessThan: + return "lt"; + case PropertyFilter.Types.Operator.LessThanOrEqual: + return "le"; + case PropertyFilter.Types.Operator.GreaterThan: + return "gt"; + case PropertyFilter.Types.Operator.GreaterThanOrEqual: + return "ge"; + case PropertyFilter.Types.Operator.HasAncestor: + return "has-ancestor"; + case PropertyFilter.Types.Operator.Equal: + return "eq"; + default: + throw new NotImplementedException(); + } + } + + private string SerializeValue(Value value) + { + switch (value.ValueTypeCase) + { + case Value.ValueTypeOneofCase.NullValue: + return "null"; + case Value.ValueTypeOneofCase.None: + return "none"; + case Value.ValueTypeOneofCase.BooleanValue: + return "bool:" + JsonConvert.SerializeObject(value.BooleanValue); + case Value.ValueTypeOneofCase.IntegerValue: + return "int:" + JsonConvert.SerializeObject(value.IntegerValue); + case Value.ValueTypeOneofCase.DoubleValue: + return "double:" + JsonConvert.SerializeObject(value.DoubleValue); + case Value.ValueTypeOneofCase.KeyValue: + return $"key:{value.KeyValue.PartitionId.ProjectId}/{value.KeyValue.PartitionId.NamespaceId}/{string.Join("/", value.KeyValue.Path.Select(SerializePathElement))}"; + case Value.ValueTypeOneofCase.EntityValue: + throw new NotSupportedException(); + case Value.ValueTypeOneofCase.GeoPointValue: + return $"geo:{value.GeoPointValue.Latitude}:{value.GeoPointValue.Longitude}"; + case Value.ValueTypeOneofCase.ArrayValue: + throw new NotSupportedException(); + case Value.ValueTypeOneofCase.TimestampValue: + return $"ts:{_instantTimestampConverter.FromDatastoreValueToNodaTimeInstant(value.TimestampValue)!.Value.ToUnixTimeTicks()}"; + case Value.ValueTypeOneofCase.StringValue: + return $"string:" + JsonConvert.SerializeObject(value.StringValue); + case Value.ValueTypeOneofCase.BlobValue: + throw new NotSupportedException(); + default: + throw new NotImplementedException(); + } + } + + public AsyncEvent OnNonTransactionalEntitiesModified => _datastoreRepositoryLayer.OnNonTransactionalEntitiesModified; + + private const string _tryObtainComplexCache = @" +if redis.call('EXISTS', KEYS[4]) > 0 then + -- There is another writer writing into this cache value. Do not conflict + -- with it. + return 'nocache-nostore' +end + +if redis.call('EXISTS', KEYS[1]) == 0 then + -- The key that controls expiry doesn't exist. See if we should clean it up + -- in case the associated data is stale. + if redis.call('EXISTS', KEYS[3]) == 0 then + redis.call('UNLINK', KEYS[2]) + end + + -- No cached data available, or it's stale. + local readers = tonumber(redis.call('GET', KEYS[2])) + if readers == nil then + readers = 0 + end + if readers > 0 then + -- The cached data that is present is stale, and we can't store our + -- fresh results because there's another reader currently using the + -- cache. + return 'nocache-nostore' + else + -- There's no cached data, or it's stale with no readers. Read from + -- Datastore and write into the cache. Tell other queries that we + -- now have an exclusive writer lock on this cache entry. + redis.call('SET', KEYS[4], 'WRITING') + return 'nocache-store' + end +end + +-- Increment the reference counter to prevent the expiry key from causing the +-- data to be released. Also, mark the keys for persistence while we're reading +-- cache data so they don't get deleted. +redis.call('INCR', KEYS[2]) +redis.call('PERSIST', KEYS[1]) +redis.call('PERSIST', KEYS[2]) +redis.call('PERSIST', KEYS[3]) + +-- Cached data is available and we obtained a handle to iterate on it. +return 'cache' +"; + private const string _releaseComplexCache = @" +-- Decrement the cache reference counter. +redis.call('DECR', KEYS[2]) + +-- If the cache reference counter is at 0, then there are no operations currently +-- reading from this cache data, so turn back on expiry. +local readers = tonumber(redis.call('GET', KEYS[2])) +if readers == 0 then + redis.call('EXPIRE', KEYS[1], 120) + redis.call('UNLINK', KEYS[2]) + redis.call('EXPIRE', KEYS[3], 240) +end +return readers +"; + private const string _writeCachedEntityIntoCache = @" +if redis.call('GET', KEYS[2]) ~= ARGV[2] then + -- Read data is now stale, do not write to cache. + return +end +for i = 3, table.getn(ARGV) do + -- Add the entity JSON to the cache data. + redis.call('LPUSH', KEYS[1], ARGV[i]) + + -- Add the query hash to the entity's cache key, which allows the query + -- data to be purged when the entity is modified. + redis.call('SADD', KEYS[i], ARGV[1]) +end +"; + private const string _finalizeCacheWriting = @" +-- Check to see if the read data was invalidated through a concurrent write. +if redis.call('GET', KEYS[5]) ~= ARGV[2] then + redis.call('UNLINK', KEYS[1], KEYS[2], KEYS[3], KEYS[4]) + return 'invalidated' +end + +-- Check to see if our write was explicitly invalidated. +if redis.call('GET', KEYS[4]) == 'INVALIDATED' then + redis.call('UNLINK', KEYS[1], KEYS[2], KEYS[3], KEYS[4]) + return 'invalidated' +end + +-- Add our query to the specified column keys. +for i = 6, table.getn(KEYS) do + redis.call('SADD', KEYS[i], ARGV[1]) +end + +-- Our write was finalized properly. Set up the expiries. +redis.call('SETEX', KEYS[1], 120, '0') +redis.call('EXPIRE', KEYS[2], 240) +redis.call('EXPIRE', KEYS[3], 240) + +-- Release our writer lock. +redis.call('UNLINK', KEYS[4]) +return 'written' +"; + private const string _writeSingleCachedEntityIntoCache = @" +if redis.call('GET', KEYS[2]) ~= ARGV[2] then + -- Read data is now stale, do not write to cache. + return 'invalidated' +end + +-- Add the entity to the cache. +redis.call('SET', KEYS[1], ARGV[1]) +return 'written' +"; + + private async Task<(RedisKey key, string lastWrite)> GetLastWriteAsync(IDatabase cache, string @namespace, Model model) + { + string queryLastWriteValue = "0"; + var queryLastWriteKey = new RedisKey($"LASTWRITE:{model.GetKind()}"); + using (_managedTracer.StartSpan("db.rediscache.load.get_last_write", $"{@namespace},{model.GetType().Name}")) + { + var lastWriteValue = await cache.StringGetAsync(queryLastWriteKey).ConfigureAwait(false); + if (lastWriteValue.HasValue) + { + queryLastWriteValue = (string)lastWriteValue!; + } + else + { + queryLastWriteValue = "0"; + } + } + return (queryLastWriteKey, queryLastWriteValue); + } + + private static async Task IncrementLastWriteAsync(IDatabase cache, Model model) + { + await cache.StringIncrementAsync($"LASTWRITE:{model.GetKind()}").ConfigureAwait(false); + } + + private static async Task IncrementLastWriteAsync(IDatabase cache, string kind) + { + await cache.StringIncrementAsync($"LASTWRITE:{kind}").ConfigureAwait(false); + } + + public IBatchedAsyncEnumerable QueryAsync( + string @namespace, + Expression> where, + Expression>? order, + int? limit, + IModelTransaction? transaction, + RepositoryOperationMetrics? metrics, + CancellationToken cancellationToken) where T : Model, new() + => BatchedQueryAsync( + @namespace, + where, + order, + limit, + transaction, + metrics, + cancellationToken).AsBatchedAsyncEnumerable(); + + private async IAsyncEnumerable> BatchedQueryAsync( + string @namespace, + Expression> where, + Expression>? order, + int? limit, + IModelTransaction? transaction, + RepositoryOperationMetrics? metrics, + [EnumeratorCancellation] CancellationToken cancellationToken) where T : Model, new() + { + using (_managedTracer.StartSpan($"db.rediscache.query", $"{@namespace},{typeof(T).Name}")) + { + Stopwatch? stopwatch = null; + if (metrics != null) + { + stopwatch = Stopwatch.StartNew(); + } + try + { + if (transaction != null) + { + // Transactional queries must hit Datastore so that Datastore can enforce transactionality. If + // an entity that was read is written to before the current transaction finishes, Datastore will + // force our application to retry the transaction. If we were to hit the cache in these scenarios + // Datastore would not be able to detect concurrency issues and would not throw the appropriate + // exception. + if (metrics != null) + { + metrics.CacheDidRead = false; + metrics.CacheDidWrite = false; + metrics.CacheCompatible = false; + } + await foreach (var batch in _datastoreRepositoryLayer.QueryAsync( + @namespace, + where, + order, + limit, + transaction, + metrics, + cancellationToken).AsBatches().ConfigureAwait(false)) + { + yield return batch; + } + } + else + { + var (cacheHash, columns) = await GetComplexCacheHashAndColumns(@namespace, where, order, limit).ConfigureAwait(false); + + var queryCache = new RedisKey($"QUERYCACHE:{cacheHash}"); + var queryRefCount = new RedisKey($"QUERYRC:{cacheHash}"); + var queryWriterCount = new RedisKey($"QUERYWC:{cacheHash}"); + var queryData = new RedisKey($"QUERYDATA:{cacheHash}"); + + if (metrics != null) + { + metrics.CacheCompatible = true; + metrics.CacheHash = cacheHash; + } + + var cache = _redis.GetDatabase(); + var (queryLastWriteKey, queryLastWriteValue) = await GetLastWriteAsync(cache, @namespace, new T()).ConfigureAwait(false); + RedisResult obtainCacheResult; + using (_managedTracer.StartSpan("db.rediscache.cache.try_obtain_complex_cache", $"{@namespace},{typeof(T).Name}")) + { + obtainCacheResult = await cache.ScriptEvaluateAsync(_tryObtainComplexCache, keys: new[] + { + queryCache, + queryRefCount, + queryData, + queryWriterCount + }).ConfigureAwait(false); + } + await _metricService.AddPoint(_cacheQueries, 1, null, new Dictionary + { + { "kind", (new T()).GetKind() }, + { "namespace", @namespace }, + { "result", (string)obtainCacheResult! }, + }).ConfigureAwait(false); + switch ((string)obtainCacheResult!) + { + case "cache": + { + if (metrics != null) + { + metrics.CacheDidRead = true; + } + try + { + // Read the entity data from Redis instead of Datastore. + var length = await cache.ListLengthAsync(queryData).ConfigureAwait(false); + for (var start = 0; start < length; start += 200) + { + var stop = Math.Min(start + 200, length); + var results = await cache.ListRangeAsync(queryData, start, stop - 1).ConfigureAwait(false); + var batchResults = new List(); + foreach (var result in results) + { + var model = _jsonConverter.From(@namespace, result!); + if (model != null) + { + batchResults.Add(model); + } + } + yield return batchResults; + } + } + finally + { + using (_managedTracer.StartSpan("db.rediscache.cache.release_complex_cache", $"{@namespace},{typeof(T).Name}")) + { + await cache.ScriptEvaluateAsync(_releaseComplexCache, keys: new[] + { + queryCache, + queryRefCount, + queryData, + }).ConfigureAwait(false); + } + } + } + break; + case "nocache-store": + { + await cache.KeyDeleteAsync(new[] { + queryCache, + queryRefCount, + queryData, + }).ConfigureAwait(false); + + // TODO: We should have a better strategy for partial queries; that is queries that have things like + // FirstOrDefaultAsync() applied to them. We currently pull all of the data from Datastore that would + // match the query, even if something like FirstOrDefault stops elements being pulled for the caller. + // + // This is because our caching logic can't yet handle a partial dataset in the cache. A future strategy + // for handling partial queries (e.g. FirstOrDefaultAsync) without pulling all of the data during + // "nocache-store": + // + // - If we fall into the finally here mark the result set as partial. + // - If we're pulling a partial result set in the "cache" block above, and we go beyond the entities + // available in Redis, we then run a Datastore query. + // - We have to try to obtain a CACHEWC lock on the data set we're reading cached results for. That is, + // there can't be any other readers that are also concurrently reading the cached data. + // - If there are no other readers, then "cache" becomes "nocache-store", but excluding entities we + // were already able to enumerate from Redis. We store further entities as we come across them. + // - If there are other readers, then "cache" becomes "nocache-nostore", but excluding entities we + // were already able to enumerate from Redis. + + var pullSemaphore = new SemaphoreSlim(0); + var pullBatches = new ConcurrentQueue>(); + Exception? pullException = null; + var puller = Task.Run(async () => + { + var didFinish = false; + try + { + // There is no cache data for this query, and we're going to store it as we + // read our data from Datastore. + await foreach (var batch in _datastoreRepositoryLayer.QueryAsync( + @namespace, + where, + order, + limit, + transaction, + metrics, + CancellationToken.None).AsBatches().ConfigureAwait(false)) + { + using (_managedTracer.StartSpan("db.rediscache.cache.batch_process", $"{@namespace},{typeof(T).Name}")) + { + var keys = new List + { + queryData, + queryLastWriteKey, + }; + var values = new List + { + new RedisValue(cacheHash), + (RedisValue)queryLastWriteValue, + }; + foreach (var entity in batch) + { + var cachedEntity = _jsonConverter.To(@namespace, entity, false, null); + + keys.Add(new RedisKey(GetSimpleCachedInKey(entity.Key))); + values.Add(new RedisValue(cachedEntity)); + } + + using (_managedTracer.StartSpan("db.rediscache.cache.write_cached_entity_to_cache", $"{@namespace},{typeof(T).Name}")) + { + await cache.ScriptEvaluateAsync( + _writeCachedEntityIntoCache, + keys.ToArray(), + values.ToArray()).ConfigureAwait(false); + } + } + + using (_managedTracer.StartSpan("db.rediscache.cache.batch_emit", $"{@namespace},{typeof(T).Name}")) + { + if (pullBatches != null) + { + pullBatches.Enqueue(batch); + pullSemaphore.Release(); + } + } + } + + RedisResult finalizeResult; + using (_managedTracer.StartSpan("db.rediscache.cache.finalize_cache_writing", $"{@namespace},{typeof(T).Name}")) + { + finalizeResult = await cache.ScriptEvaluateAsync( + _finalizeCacheWriting, + new[] + { + queryCache, + queryRefCount, + queryData, + queryWriterCount, + queryLastWriteKey, + }.Concat(columns.Select(x => new RedisKey(x))).ToArray(), + new[] + { + new RedisValue(cacheHash), + (RedisValue)queryLastWriteValue, + }).ConfigureAwait(false); + } + if (((string)finalizeResult!) != "invalidated") + { + if (metrics != null) + { + metrics.CacheDidWrite = true; + } + } + + didFinish = true; + } + catch (Exception ex) + { + pullException = ex; + } + finally + { + // Not sure what state the cache is in because we might have partially written results. + if (!didFinish) + { + await cache.KeyDeleteAsync(new[] { + queryCache, + queryRefCount, + queryData, + queryWriterCount + }).ConfigureAwait(false); + } + } + }, cancellationToken); + + try + { + while (!cancellationToken.IsCancellationRequested) + { + if (await Task.WhenAny(pullSemaphore.WaitAsync(cancellationToken), puller).ConfigureAwait(false) == puller) + { + // The puller has finished. Yield the remaining elements. + while (pullBatches.TryDequeue(out IReadOnlyList? next)) + { + cancellationToken.ThrowIfCancellationRequested(); + yield return next; + } + yield break; + } + else + { + // We got another entity in the queue. + while (!puller.IsCompleted) + { + cancellationToken.ThrowIfCancellationRequested(); + if (pullBatches.TryDequeue(out IReadOnlyList? next)) + { + yield return next; + } + else + { + // Queue contention; try again. + await Task.Yield(); + continue; + } + } + } + } + } + finally + { + // Wait for the puller to just push everything into the cache, and ask it to not store + // into the concurrent queue for us. + pullBatches = null; + await puller.ConfigureAwait(false); + } + } + break; + case "nocache-nostore": + { + // There is another reader on the existing cache value at the moment, but + // the contents of the cache is stale (because a write happened after the + // other reader started reading, but before we started reading). We just go + // straight to the database, and don't store our result in Redis (because + // we need to wait for all readers to finish before storing a new cache value). + await foreach (var batch in _datastoreRepositoryLayer.QueryAsync( + @namespace, + where, + order, + limit, + transaction, + metrics, + cancellationToken).AsBatches().ConfigureAwait(false)) + { + yield return batch; + } + } + break; + } + } + } + finally + { + if (metrics != null) + { + metrics.CacheElapsedMilliseconds = stopwatch!.ElapsedMilliseconds - metrics.DatastoreElapsedMilliseconds; + } + } + } + } + + public async Task> QueryPaginatedAsync( + string @namespace, + PaginatedQueryCursor cursor, + int limit, + Expression> where, + Expression>? order, + IModelTransaction? transaction, + RepositoryOperationMetrics? metrics, + CancellationToken cancellationToken) where T : Model, new() + { + using (_managedTracer.StartSpan($"db.rediscache.query_paginated", $"{@namespace},{typeof(T).Name}")) + { + if (metrics != null) + { + metrics.CacheCompatible = false; + } + return await _datastoreRepositoryLayer.QueryPaginatedAsync( + @namespace, + cursor, + limit, + where, + order, + transaction, + metrics, + cancellationToken).ConfigureAwait(false); + } + } + + public async Task LoadAsync( + string @namespace, + Key key, + IModelTransaction? transaction, + RepositoryOperationMetrics? metrics, + CancellationToken cancellationToken) where T : Model, new() + { + using (_managedTracer.StartSpan($"db.rediscache.load", $"{@namespace},{typeof(T).Name}")) + { + ArgumentNullException.ThrowIfNull(@namespace, nameof(@namespace)); + ArgumentNullException.ThrowIfNull(key); + + if (metrics != null) + { + metrics.CacheCompatible = true; + } + + Stopwatch? stopwatch = null; + if (metrics != null) + { + stopwatch = Stopwatch.StartNew(); + } + try + { + if (transaction != null) + { + // Transactional loads must hit Datastore so that Datastore can enforce transactionality. If + // an entity that was read is written to before the current transaction finishes, Datastore will + // force our application to retry the transaction. If we were to hit the cache in these scenarios + // Datastore would not be able to detect concurrency issues and would not throw the appropriate + // exception. + if (metrics != null) + { + metrics.CacheDidRead = false; + metrics.CacheDidWrite = false; + metrics.CacheCompatible = false; + } + return await _datastoreRepositoryLayer.LoadAsync( + @namespace, + key, + transaction, + metrics, + cancellationToken).ConfigureAwait(false); + } + else + { + // This value ensures that we don't write stale data to the + // cache if there's been a write since we started running. + var cache = _redis.GetDatabase(); + var (queryLastWriteKey, queryLastWriteValue) = await GetLastWriteAsync(cache, @namespace, new T()).ConfigureAwait(false); + + var cacheKey = GetSimpleCacheKey(key); + var cacheEntity = await cache.StringGetAsync(cacheKey).ConfigureAwait(false); + if (cacheEntity.HasValue) + { + await _metricService.AddPoint(_cacheLookups, 1, null, new Dictionary + { + { "kind", (new T()).GetKind() }, + { "namespace", @namespace }, + { "result", "hit" }, + }).ConfigureAwait(false); + if (metrics != null) + { + metrics.CacheDidRead = true; + } + var model = _jsonConverter.From( + @namespace, + (string)cacheEntity!); + if (model != null) + { + return model; + } + } + + var keyFactory = await _datastoreRepositoryLayer.GetKeyFactoryAsync(@namespace, metrics, cancellationToken).ConfigureAwait(false); + var entity = await _datastoreRepositoryLayer.LoadAsync( + @namespace, + key, + transaction, + metrics, + cancellationToken).ConfigureAwait(false); + cacheEntity = _jsonConverter.To( + @namespace, + entity, + false, + _ => keyFactory.CreateIncompleteKey()); + RedisResult cacheResult; + using (_managedTracer.StartSpan("db.rediscache.load.write_cached_entity_to_cache", $"{@namespace},{typeof(T).Name}")) + { + cacheResult = await cache.ScriptEvaluateAsync( + _writeSingleCachedEntityIntoCache, + new RedisKey[] + { + cacheKey, + queryLastWriteKey, + }, + new RedisValue[] + { + new RedisValue(cacheEntity!), + (RedisValue)queryLastWriteValue, + }).ConfigureAwait(false); + } + await _metricService.AddPoint(_cacheLookups, 1, null, new Dictionary + { + { "kind", (new T()).GetKind() }, + { "namespace", @namespace }, + { "result", "miss" }, + }).ConfigureAwait(false); + if (((string)cacheResult!) != "invalidated") + { + if (metrics != null) + { + metrics.CacheDidWrite = true; + } + } + return entity; + } + } + finally + { + if (metrics != null) + { + metrics.CacheElapsedMilliseconds = stopwatch!.ElapsedMilliseconds - metrics.DatastoreElapsedMilliseconds; + } + } + } + } + + public IBatchedAsyncEnumerable> LoadAsync( + string @namespace, + IAsyncEnumerable keys, + IModelTransaction? transaction, + RepositoryOperationMetrics? metrics, + CancellationToken cancellationToken) where T : Model, new() + => BatchedLoadAsync( + @namespace, + keys, + transaction, + metrics, + cancellationToken).AsBatchedAsyncEnumerable(); + + private async IAsyncEnumerable>> BatchedLoadAsync( + string @namespace, + IAsyncEnumerable keys, + IModelTransaction? transaction, + RepositoryOperationMetrics? metrics, + [EnumeratorCancellation] CancellationToken cancellationToken) where T : Model, new() + { + using (_managedTracer.StartSpan($"db.rediscache.load", $"{@namespace},{typeof(T).Name}")) + { + ArgumentNullException.ThrowIfNull(@namespace, nameof(@namespace)); + + if (metrics != null) + { + metrics.CacheCompatible = true; + } + + Stopwatch? stopwatch = null; + if (metrics != null) + { + stopwatch = Stopwatch.StartNew(); + } + try + { + if (transaction != null) + { + // Transactional loads must hit Datastore so that Datastore can enforce transactionality. If + // an entity that was read is written to before the current transaction finishes, Datastore will + // force our application to retry the transaction. If we were to hit the cache in these scenarios + // Datastore would not be able to detect concurrency issues and would not throw the appropriate + // exception. + if (metrics != null) + { + metrics.CacheDidRead = false; + metrics.CacheDidWrite = false; + metrics.CacheCompatible = false; + } + + await foreach (var batch in _datastoreRepositoryLayer.LoadAsync( + @namespace, + keys, + transaction, + metrics, + cancellationToken).AsBatches().ConfigureAwait(false)) + { + yield return batch; + } + } + else + { + var keyFactory = await _datastoreRepositoryLayer.GetKeyFactoryAsync(@namespace, metrics, cancellationToken).ConfigureAwait(false); + + // This value ensures that we don't write stale data to the + // cache if there's been a write since we started running. + var cache = _redis.GetDatabase(); + var (queryLastWriteKey, queryLastWriteValue) = await GetLastWriteAsync(cache, @namespace, new T()).ConfigureAwait(false); + + var cacheEvaluation = keys.SelectFastAwait(async key => + { + if (key == null) + { + throw new ArgumentNullException(nameof(keys), "One or more keys passed to LoadAsync was null."); + } + + var cacheKey = GetSimpleCacheKey(key); + var cacheEntity = await cache.StringGetAsync(cacheKey).ConfigureAwait(false); + return (key: key, cacheEntity: cacheEntity.HasValue ? (string)cacheEntity! : null); + }); + + var hits = 0; + var misses = 0; + var entities = cacheEvaluation + .Classify<(Key key, string? cacheEntity), KeyValuePair>(x => x.cacheEntity != null ? "hit" : "miss") + .AndForClassification("hit", x => + { + hits++; + if (metrics != null) + { + metrics.CacheDidRead = true; + } + return new KeyValuePair( + x.key, + _jsonConverter.From(@namespace, x.cacheEntity!)); + }) + .AndForClassificationStream("miss", inputs => + { + misses++; + return + _datastoreRepositoryLayer.LoadAsync( + @namespace, + inputs.Select(x => x.key), + transaction, + metrics, + cancellationToken) + .SelectFastAwait(async v => + { + // Store in the cache as we get the results from Datastore. + var cacheKey = GetSimpleCacheKey(v.Key); + var cacheEntity = _jsonConverter.To( + @namespace, + v.Value, + false, + _ => keyFactory.CreateIncompleteKey()); + RedisResult cacheResult = await cache.ScriptEvaluateAsync( + _writeSingleCachedEntityIntoCache, + new RedisKey[] + { + cacheKey, + queryLastWriteKey, + }, + new RedisValue[] + { + new RedisValue(cacheEntity), + (RedisValue)queryLastWriteValue, + }).ConfigureAwait(false); + if (((string)cacheResult!) != "invalidated") + { + if (metrics != null) + { + metrics.CacheDidWrite = true; + } + } + return v; + }); + }); + if (hits > 0) + { + await _metricService.AddPoint(_cacheLookups, hits, null, new Dictionary + { + { "kind", (new T()).GetKind() }, + { "namespace", @namespace }, + { "result", "hit" }, + }).ConfigureAwait(false); + } + if (misses > 0) + { + await _metricService.AddPoint(_cacheLookups, misses, null, new Dictionary + { + { "kind", (new T()).GetKind() }, + { "namespace", @namespace }, + { "result", "hit" }, + }).ConfigureAwait(false); + } + + // @note: The classify API doesn't give us a good way of propagating the batches from + // Datastore yet; just emulate some batches here. + await foreach (var entity in entities.BatchInto(200).WithCancellation(cancellationToken)) + { + yield return entity; + } + } + } + finally + { + if (metrics != null) + { + metrics.CacheElapsedMilliseconds = stopwatch!.ElapsedMilliseconds - metrics.DatastoreElapsedMilliseconds; + } + } + } + } + + public async IAsyncEnumerable> LoadAcrossNamespacesAsync( + IAsyncEnumerable keys, + RepositoryOperationMetrics? metrics, + [EnumeratorCancellation] CancellationToken cancellationToken) where T : Model, new() + { + using (_managedTracer.StartSpan($"db.rediscache.load_across_namespaces", $"{typeof(T).Name}")) + { + if (metrics != null) + { + metrics.CacheCompatible = true; + } + + Stopwatch? stopwatch = null; + if (metrics != null) + { + stopwatch = Stopwatch.StartNew(); + } + try + { + // This value ensures that we don't write stale data to the + // cache if there's been a write since we started running. + var cache = _redis.GetDatabase(); + var (queryLastWriteKey, queryLastWriteValue) = await GetLastWriteAsync(cache, "(cross-namespace)", new T()).ConfigureAwait(false); + + var cacheEvaluation = keys.SelectFastAwait(async key => + { + if (key == null) + { + throw new ArgumentNullException(nameof(keys), "One or more keys passed to LoadAcrossNamespacesAsync was null."); + } + + var cacheKey = GetSimpleCacheKey(key); + var cacheEntity = await cache.StringGetAsync(cacheKey).ConfigureAwait(false); + return (key: key, cacheEntity: cacheEntity.HasValue ? (string)cacheEntity! : null); + }); + + var hits = 0; + var misses = 0; + var entities = cacheEvaluation + .Classify<(Key key, string? cacheEntity), KeyValuePair>(x => x.cacheEntity != null ? "hit" : "miss") + .AndForClassification("hit", x => + { + hits++; + if (metrics != null) + { + metrics.CacheDidRead = true; + } + return new KeyValuePair( + x.key, + _jsonConverter.From(x.key.PartitionId.NamespaceId, x.cacheEntity!)); + }) + .AndForClassificationStream("miss", inputs => + { + misses++; + return + _datastoreRepositoryLayer.LoadAcrossNamespacesAsync( + inputs.Select(x => x.key), + metrics, + cancellationToken) + .SelectFastAwait(async v => + { + // Store in the cache as we get the results from Datastore. + var keyFactory = await _datastoreRepositoryLayer.GetKeyFactoryAsync(v.Key.PartitionId.NamespaceId, metrics, cancellationToken).ConfigureAwait(false); + var cacheKey = GetSimpleCacheKey(v.Key); + var cacheEntity = _jsonConverter.To( + v.Key.PartitionId.NamespaceId, + v.Value, + false, + _ => keyFactory.CreateIncompleteKey()); + RedisResult cacheResult = await cache.ScriptEvaluateAsync( + _writeSingleCachedEntityIntoCache, + new RedisKey[] + { + cacheKey, + queryLastWriteKey, + }, + new RedisValue[] + { + new RedisValue(cacheEntity), + (RedisValue)queryLastWriteValue, + }).ConfigureAwait(false); + if (((string)cacheResult!) != "invalidated") + { + if (metrics != null) + { + metrics.CacheDidWrite = true; + } + } + return v; + }); + }); + if (hits > 0) + { + await _metricService.AddPoint(_cacheLookups, hits, null, new Dictionary + { + { "kind", (new T()).GetKind() }, + { "namespace", "(cross-namespace)" }, + { "result", "hit" }, + }).ConfigureAwait(false); + } + if (misses > 0) + { + await _metricService.AddPoint(_cacheLookups, misses, null, new Dictionary + { + { "kind", (new T()).GetKind() }, + { "namespace", "(cross-namespace)" }, + { "result", "hit" }, + }).ConfigureAwait(false); + } + + await foreach (var entity in entities.WithCancellation(cancellationToken)) + { + yield return entity; + } + } + finally + { + if (metrics != null) + { + metrics.CacheElapsedMilliseconds = stopwatch!.ElapsedMilliseconds - metrics.DatastoreElapsedMilliseconds; + } + } + } + } + + public async IAsyncEnumerable CreateAsync( + string @namespace, + IAsyncEnumerable models, + IModelTransaction? transaction, + RepositoryOperationMetrics? metrics, + [EnumeratorCancellation] CancellationToken cancellationToken) where T : Model, new() + { + using (_managedTracer.StartSpan($"db.rediscache.create", $"{@namespace},{typeof(T).Name}")) + { + var columns = new HashSet(); + try + { + await foreach (var entity in _datastoreRepositoryLayer.CreateAsync( + @namespace, + models, + transaction, + metrics, + cancellationToken).ConfigureAwait(false)) + { + if (transaction == null) + { + columns.Add($"KEYALL:{@namespace}:{entity.GetKind()}"); + foreach (var kv in entity.GetTypes()) + { + columns.Add($"KEYCOLUMN:{@namespace}:{entity.GetKind()}:{kv.Key}"); + } + } + + yield return entity; + } + } + finally + { + if (columns.Count > 0) + { + using (_managedTracer.StartSpan("db.rediscache.cache.purge_columns", $"{@namespace},{typeof(T).Name}")) + { + var db = _redis.GetDatabase(); + await RedisCacheRepositoryLayer.IncrementLastWriteAsync(db, new T()).ConfigureAwait(false); + var queriesFlushed = await db.ScriptEvaluateAsync(_purgeColumns, columns.Select(x => new RedisKey(x)).ToArray()).ConfigureAwait(false); + if (metrics != null) + { + metrics.CacheQueriesFlushed += ((long)queriesFlushed); + await _metricService.AddPoint(_cacheInvalidations, ((long)queriesFlushed), null, new Dictionary + { + { "kind", (new T()).GetKind() }, + }).ConfigureAwait(false); + } + } + } + } + } + } + + public async IAsyncEnumerable UpsertAsync( + string @namespace, + IAsyncEnumerable models, + IModelTransaction? transaction, + RepositoryOperationMetrics? metrics, + [EnumeratorCancellation] CancellationToken cancellationToken) where T : Model, new() + { + using (_managedTracer.StartSpan($"db.rediscache.upsert", $"{@namespace},{typeof(T).Name}")) + { + var columns = new HashSet(); + try + { + await foreach (var entity in _datastoreRepositoryLayer.UpsertAsync( + @namespace, + models, + transaction, + metrics, + cancellationToken).ConfigureAwait(false)) + { + if (transaction == null) + { + columns.Add($"KEYALL:{@namespace}:{entity.GetKind()}"); + foreach (var kv in entity.GetTypes()) + { + // We must assume upserts are creates, therefore we don't compare values. + columns.Add($"KEYCOLUMN:{@namespace}:{entity.GetKind()}:{kv.Key}"); + } + } + + yield return entity; + } + } + finally + { + if (columns.Count > 0) + { + using (_managedTracer.StartSpan("db.rediscache.cache.purge_columns", $"{@namespace},{typeof(T).Name}")) + { + var db = _redis.GetDatabase(); + await RedisCacheRepositoryLayer.IncrementLastWriteAsync(db, new T()).ConfigureAwait(false); + var queriesFlushed = await db.ScriptEvaluateAsync(_purgeColumns, columns.Select(x => new RedisKey(x)).ToArray()).ConfigureAwait(false); + if (metrics != null) + { + metrics.CacheQueriesFlushed += ((long)queriesFlushed); + await _metricService.AddPoint(_cacheInvalidations, ((long)queriesFlushed), null, new Dictionary + { + { "kind", (new T()).GetKind() }, + }).ConfigureAwait(false); + } + } + } + } + } + } + + public async IAsyncEnumerable UpdateAsync( + string @namespace, + IAsyncEnumerable models, + IModelTransaction? transaction, + RepositoryOperationMetrics? metrics, + [EnumeratorCancellation] CancellationToken cancellationToken) where T : Model, new() + { + using (_managedTracer.StartSpan($"db.rediscache.update", $"{@namespace},{typeof(T).Name}")) + { + var columns = new HashSet(); + try + { + await foreach (var entity in _datastoreRepositoryLayer.UpdateAsync( + @namespace, + models, + transaction, + metrics, + cancellationToken).ConfigureAwait(false)) + { + if (transaction == null) + { + columns.Add($"KEYALL:{@namespace}:{entity.GetKind()}"); + + foreach (var kv in entity.GetTypes()) + { + var wasColumnModified = false; + if (entity._originalData == null || !entity._originalData.TryGetValue(kv.Key, out object? oldValue)) + { + wasColumnModified = true; + } + else + { + var newValue = entity.GetType().GetProperty(kv.Key, BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic)!.GetValue(entity); + if (newValue == null) + { + wasColumnModified = oldValue != null; + } + else if (oldValue == null) + { + wasColumnModified = true; + } + else if (newValue.GetType() != oldValue.GetType()) + { + wasColumnModified = true; + } + else + { + switch (oldValue) + { + case bool v: + wasColumnModified = v != (bool)newValue; + break; + case string v: + wasColumnModified = v != (string)newValue; + break; + case Instant v: + wasColumnModified = v != (Instant)newValue; + break; + case long v: + wasColumnModified = v != (long)newValue; + break; + case double v: + wasColumnModified = v != (double)newValue; + break; + case LatLng v: + wasColumnModified = v.Latitude != ((LatLng)newValue).Latitude || v.Longitude != ((LatLng)newValue).Longitude; + break; + case Key v: + wasColumnModified = !v.Equals((Key)newValue); + break; + default: + // Don't know how to compare this type yet, assume modified. + wasColumnModified = true; + break; + } + } + } + + if (wasColumnModified) + { + columns.Add($"KEYCOLUMN:{@namespace}:{entity.GetKind()}:{kv.Key}"); + } + } + } + + yield return entity; + } + } + finally + { + if (columns.Count > 0) + { + using (_managedTracer.StartSpan("db.rediscache.cache.purge_columns", $"{@namespace},{typeof(T).Name}")) + { + var db = _redis.GetDatabase(); + await RedisCacheRepositoryLayer.IncrementLastWriteAsync(db, new T()).ConfigureAwait(false); + var queriesFlushed = await db.ScriptEvaluateAsync(_purgeColumns, columns.Select(x => new RedisKey(x)).ToArray()).ConfigureAwait(false); + if (metrics != null) + { + metrics.CacheQueriesFlushed += ((long)queriesFlushed); + await _metricService.AddPoint(_cacheInvalidations, ((long)queriesFlushed), null, new Dictionary + { + { "kind", (new T()).GetKind() }, + }).ConfigureAwait(false); + } + } + } + } + } + } + + public async Task DeleteAsync( + string @namespace, + IAsyncEnumerable models, + IModelTransaction? transaction, + RepositoryOperationMetrics? metrics, + CancellationToken cancellationToken) where T : Model, new() + { + using (_managedTracer.StartSpan($"db.rediscache.delete", $"{@namespace},{typeof(T).Name}")) + { + await _datastoreRepositoryLayer.DeleteAsync( + @namespace, + models, + transaction, + metrics, + cancellationToken).ConfigureAwait(false); + } + } + + public Task AllocateKeyAsync( + string @namespace, + IModelTransaction? transaction, + RepositoryOperationMetrics? metrics, + CancellationToken cancellationToken) where T : Model, new() + { + using (_managedTracer.StartSpan($"db.rediscache.allocate_key", $"{@namespace},{typeof(T).Name}")) + { + return _datastoreRepositoryLayer.AllocateKeyAsync(@namespace, transaction, metrics, cancellationToken); + } + } + + public Task GetKeyFactoryAsync( + string @namespace, + RepositoryOperationMetrics? metrics, + CancellationToken cancellationToken) where T : Model, new() + { + using (_managedTracer.StartSpan($"db.rediscache.get_key_factory", $"{@namespace},{typeof(T).Name}")) + { + return _datastoreRepositoryLayer.GetKeyFactoryAsync(@namespace, metrics, cancellationToken); + } + } + + public Task BeginTransactionAsync( + string @namespace, + TransactionMode mode, + RepositoryOperationMetrics? metrics, + CancellationToken cancellationToken) + { + using (_managedTracer.StartSpan($"db.rediscache.begin_transaction", @namespace)) + { + return _datastoreRepositoryLayer.BeginTransactionAsync(@namespace, mode, metrics, cancellationToken); + } + } + + public Task RollbackAsync( + string @namespace, + IModelTransaction transaction, + RepositoryOperationMetrics? metrics, + CancellationToken cancellationToken) + { + using (_managedTracer.StartSpan($"db.rediscache.rollback", @namespace)) + { + return _datastoreRepositoryLayer.RollbackAsync(@namespace, transaction, metrics, cancellationToken); + } + } + + public async Task CommitAsync( + string @namespace, + IModelTransaction transaction, + RepositoryOperationMetrics? metrics, + CancellationToken cancellationToken) + { + using (_managedTracer.StartSpan($"db.rediscache.commit", @namespace)) + { + await _datastoreRepositoryLayer.CommitAsync(@namespace, transaction, metrics, cancellationToken).ConfigureAwait(false); + + var db = _redis.GetDatabase(); + + // For all the model types involved, prevent stale cache stores. + foreach (var kind in transaction.ModifiedModels.Select(x => x.GetKind()).Distinct()) + { + await RedisCacheRepositoryLayer.IncrementLastWriteAsync(db, kind).ConfigureAwait(false); ; + } + + // Clear simple cache keys. + var keys = transaction.ModifiedModels.Select(x => GetSimpleCacheKey(x.Key)).ToArray(); + for (int i = 0; i < keys.Length; i += 50) + { + var buffer = new RedisKey[(int)Math.Min(i + 50, keys.Length - i)]; + for (int x = 0; x < buffer.Length; x++) + { + buffer[x] = new RedisKey(keys[i + x]); + } + var removedCount = await db.KeyDeleteAsync(buffer).ConfigureAwait(false); + if (metrics != null) + { + metrics.CacheQueriesFlushed += removedCount; + await _metricService.AddPoint(_cacheInvalidations, removedCount, null, new Dictionary + { + { "kind", "(transaction commit)" }, + }).ConfigureAwait(false); + } + } + + // Clear complex caches. + foreach (var key in transaction.ModifiedModels.Select(x => GetSimpleCachedInKey(x.Key))) + { + using (_managedTracer.StartSpan("db.rediscache.cache.purge_queries", $"{key}")) + { + var queriesFlushed = await db.ScriptEvaluateAsync(_purgeQueries, new[] { new RedisKey(key) }).ConfigureAwait(false); + if (metrics != null) + { + metrics.CacheQueriesFlushed += ((long)queriesFlushed); + await _metricService.AddPoint(_cacheInvalidations, ((long)queriesFlushed), null, new Dictionary + { + { "kind", "(transaction commit)" }, + }).ConfigureAwait(false); + } + } + } + + // Clear column keys. + var columns = new HashSet(); + foreach (var entity in transaction.ModifiedModels) + { + columns.Add($"KEYALL:{@namespace}:{entity.GetKind()}"); + + foreach (var kv in entity.GetTypes()) + { + var wasColumnModified = false; + if (entity._originalData == null || !entity._originalData.TryGetValue(kv.Key, out object? oldValue)) + { + wasColumnModified = true; + } + else + { + var newValue = entity.GetType().GetProperty(kv.Key, BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic)!.GetValue(entity); + if (newValue == null) + { + wasColumnModified = oldValue != null; + } + else if (oldValue == null) + { + wasColumnModified = true; + } + else if (newValue.GetType() != oldValue.GetType()) + { + wasColumnModified = true; + } + else + { + switch (oldValue) + { + case bool v: + wasColumnModified = v != (bool)newValue; + break; + case string v: + wasColumnModified = v != (string)newValue; + break; + case Instant v: + wasColumnModified = v != (Instant)newValue; + break; + case long v: + wasColumnModified = v != (long)newValue; + break; + case double v: + wasColumnModified = v != (double)newValue; + break; + case LatLng v: + wasColumnModified = v.Latitude != ((LatLng)newValue).Latitude || v.Longitude != ((LatLng)newValue).Longitude; + break; + case Key v: + wasColumnModified = !v.Equals((Key)newValue); + break; + default: + // Don't know how to compare this type yet, assume modified. + wasColumnModified = true; + break; + } + } + } + + if (wasColumnModified) + { + columns.Add($"KEYCOLUMN:{@namespace}:{entity.GetKind()}:{kv.Key}"); + } + } + } + if (columns.Count > 0) + { + using (_managedTracer.StartSpan("db.rediscache.cache.purge_columns")) + { + var queriesFlushed = await db.ScriptEvaluateAsync(_purgeColumns, columns.Select(x => new RedisKey(x)).ToArray()).ConfigureAwait(false); + if (metrics != null) + { + metrics.CacheQueriesFlushed += ((long)queriesFlushed); + await _metricService.AddPoint(_cacheInvalidations, ((long)queriesFlushed), null, new Dictionary + { + { "kind", "(transaction commit)" }, + }).ConfigureAwait(false); + } + } + } + } + } + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Legacy/GlobalRepositoryLegacyExtensions.cs b/UET/Redpoint.CloudFramework/Repository/Legacy/GlobalRepositoryLegacyExtensions.cs new file mode 100644 index 00000000..b57cc580 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Legacy/GlobalRepositoryLegacyExtensions.cs @@ -0,0 +1,356 @@ +namespace Redpoint.CloudFramework.Datastore +{ + using Google.Cloud.Datastore.V1; + using Google.Protobuf; + using Google.Protobuf.Collections; + using NodaTime; + using Redpoint.CloudFramework.Models; + using Redpoint.CloudFramework.Repository; + using Redpoint.CloudFramework.Repository.Datastore; + using Redpoint.CloudFramework.Repository.Layers; + using Redpoint.CloudFramework.Repository.Transaction; + using Redpoint.Collections; + using System; + using System.Collections.Generic; + using System.Diagnostics.CodeAnalysis; + using System.Linq; + using System.Linq.Expressions; + using System.Reflection; + using System.Threading; + using System.Threading.Tasks; + + [Obsolete("These API methods are obsolete, and you should upgrade to the latest IRepository APIs.")] + public static class GlobalRepositoryLegacyExtensions + { + private static IRepositoryLayer R(IGlobalRepository globalRepository) + { + return ((DatastoreGlobalRepository)globalRepository).Layer; + } + + private static Expression ConvertFilterToExpression<[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties | DynamicallyAccessedMemberTypes.NonPublicProperties)] T>(IGlobalRepository globalRepository, Filter filter, ParameterExpression parameterExpr) + { + if (filter == null) + { + return Expression.Constant(true); + } + else if (filter.FilterTypeCase == Filter.FilterTypeOneofCase.PropertyFilter) + { + var targetProperty = typeof(T).GetProperty(filter.PropertyFilter.Property.Name == "__key__" ? "Key" : filter.PropertyFilter.Property.Name, BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic); + if (targetProperty == null) + { + throw new InvalidOperationException($"Unable to get target property named '{filter.PropertyFilter.Property.Name}' on type '{typeof(T).FullName}'"); + } + var memberAccessExpr = Expression.MakeMemberAccess(parameterExpr, targetProperty); + Expression constantExpr; + if (filter.PropertyFilter.Value.ValueTypeCase == Value.ValueTypeOneofCase.TimestampValue) + { + constantExpr = Expression.Constant(((DatastoreGlobalRepository)globalRepository)._instantTimestampConverter.FromDatastoreValueToNodaTimeInstant(filter.PropertyFilter.Value), typeof(Instant?)); + } + else + { + constantExpr = Expression.Convert(Expression.Constant(filter.PropertyFilter.Value), targetProperty.PropertyType); + } + + switch (filter.PropertyFilter.Op) + { + case PropertyFilter.Types.Operator.LessThan: + return Expression.MakeBinary(ExpressionType.LessThan, memberAccessExpr, constantExpr); + case PropertyFilter.Types.Operator.LessThanOrEqual: + return Expression.MakeBinary(ExpressionType.LessThanOrEqual, memberAccessExpr, constantExpr); + case PropertyFilter.Types.Operator.GreaterThan: + return Expression.MakeBinary(ExpressionType.GreaterThan, memberAccessExpr, constantExpr); + case PropertyFilter.Types.Operator.GreaterThanOrEqual: + return Expression.MakeBinary(ExpressionType.GreaterThanOrEqual, memberAccessExpr, constantExpr); + case PropertyFilter.Types.Operator.Equal: + return Expression.MakeBinary(ExpressionType.Equal, memberAccessExpr, constantExpr); + case PropertyFilter.Types.Operator.HasAncestor: + return Expression.Call( + null, + typeof(RepositoryExtensions).GetMethod(nameof(RepositoryExtensions.HasAncestor))!, + memberAccessExpr, + constantExpr); + default: + throw new InvalidOperationException(); + } + } + else + { + Expression chainedExpr = ConvertFilterToExpression(globalRepository, filter.CompositeFilter.Filters[0], parameterExpr); + for (int i = 1; i < filter.CompositeFilter.Filters.Count; i++) + { + chainedExpr = Expression.AndAlso(chainedExpr, ConvertFilterToExpression(globalRepository, filter.CompositeFilter.Filters[1], parameterExpr)); + } + return chainedExpr; + } + } + + private static Expression>? ConvertOrderToExpression<[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties | DynamicallyAccessedMemberTypes.NonPublicProperties)] T>(IGlobalRepository globalRepository, RepeatedField order, ParameterExpression parameterExpr) + { + if (order.Count == 0) + { + return null; + } + else + { + Expression? chainedExpr = null; + for (int i = 0; i < order.Count; i++) + { + var targetProperty = typeof(T).GetProperty(order[i].Property.Name, BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic); + if (targetProperty == null) + { + throw new InvalidOperationException($"Unable to get target property named '{order[i].Property.Name}' on type '{typeof(T).FullName}'"); + } + var memberAccessExpr = Expression.MakeMemberAccess(parameterExpr, targetProperty); + var orderExpr = order[i].Direction == PropertyOrder.Types.Direction.Ascending ? + Expression.MakeBinary(ExpressionType.LessThan, memberAccessExpr, memberAccessExpr) : + Expression.MakeBinary(ExpressionType.GreaterThan, memberAccessExpr, memberAccessExpr); + if (chainedExpr == null) + { + chainedExpr = orderExpr; + } + else + { + chainedExpr = Expression.MakeBinary(ExpressionType.Or, chainedExpr, orderExpr); + } + } + return Expression.Lambda>(chainedExpr!, parameterExpr); + } + } + + [Obsolete("Use QueryAsync instead.")] + public static Task> CreateQuery(this IGlobalRepository globalRepository, string @namespace) where T : Model, new() + { + return Task.FromResult(new ModelQuery(@namespace, new Query(new T().GetKind()))); + } + + [Obsolete("Use QueryAsync instead.")] + public static async Task> RunUncachedQuery<[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties | DynamicallyAccessedMemberTypes.NonPublicProperties)] T>(this IGlobalRepository globalRepository, string @namespace, ModelQuery query, + ReadOptions.Types.ReadConsistency readConsistency, IModelTransaction? transaction = null) where T : Model, new() + { + ArgumentNullException.ThrowIfNull(globalRepository); + ArgumentNullException.ThrowIfNull(query); + + var parameterExpr = Expression.Parameter(typeof(T)); + var conditionExpr = ConvertFilterToExpression(globalRepository, query.Query.Filter, parameterExpr); + var expr = Expression.Lambda>(conditionExpr, parameterExpr); + + var order = ConvertOrderToExpression(globalRepository, query.Query.Order, parameterExpr); + + var results = await R(globalRepository).QueryAsync(@namespace, expr, order, query.Query.Limit, transaction, null, CancellationToken.None).ToListAsync().ConfigureAwait(false); + + return new MappedDatastoreQueryResults + { + EndCursor = ByteString.Empty, + EndCursorForClients = null, + Entities = results, + MoreResults = QueryResultBatch.Types.MoreResultsType.NoMoreResults, + }; + } + + [Obsolete("Use GetKeyFactoryAsync instead.")] + public static async Task GetKeyFactory(this IGlobalRepository globalRepository, string @namespace) where T : Model, new() + { + ArgumentNullException.ThrowIfNull(globalRepository); + + return await R(globalRepository).GetKeyFactoryAsync(@namespace, null, CancellationToken.None).ConfigureAwait(false); + } + + [Obsolete("Use LoadAsync instead.")] + public static async Task> LoadMany(this IGlobalRepository globalRepository, string @namespace, Key[] keys, IModelTransaction? transaction = null) where T : Model, new() + { + ArgumentNullException.ThrowIfNull(globalRepository); + + return await R(globalRepository).LoadAsync(@namespace, keys.ToAsyncEnumerable(), transaction, null, CancellationToken.None).ToSafeDictionaryAsync(k => k.Key, v => v.Value).ConfigureAwait(false); + } + + [Obsolete("Use LoadAcrossNamespacesAsync instead.")] + public static async Task> LoadManyAcrossNamespaces(this IGlobalRepository globalRepository, Key[] keys) where T : Model, new() + { + ArgumentNullException.ThrowIfNull(globalRepository); + + return await R(globalRepository).LoadAcrossNamespacesAsync(keys.ToAsyncEnumerable(), null, CancellationToken.None).ToSafeDictionaryAsync(k => k.Key, v => v.Value).ConfigureAwait(false); + } + + [Obsolete("Use LoadAsync instead.")] + public static async Task LoadOneBy(this IGlobalRepository globalRepository, string @namespace, Key key, IModelTransaction? transaction = null) where T : Model, new() + { + ArgumentNullException.ThrowIfNull(globalRepository); + + return await R(globalRepository).LoadAsync(@namespace, key, transaction, null, CancellationToken.None).ConfigureAwait(false); + } + + [Obsolete("Use QueryAsync instead.")] + public static async Task LoadOneBy<[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties | DynamicallyAccessedMemberTypes.NonPublicProperties)] T, TValue>(this IGlobalRepository globalRepository, string @namespace, string field, TValue value, IModelTransaction? transaction = null) where T : Model, new() + { + ArgumentNullException.ThrowIfNull(globalRepository); + + var parameterExpr = Expression.Parameter(typeof(T)); + var targetProperty = typeof(T).GetProperty(field, BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic); + if (targetProperty == null) + { + throw new InvalidOperationException($"Unable to get target property named '{field}' on type '{typeof(T).FullName}'"); + } + var accessExpr = Expression.MakeMemberAccess(parameterExpr, targetProperty); + BinaryExpression equalExpr; + if (value != null && accessExpr.Type.Name.StartsWith("Nullable`1", StringComparison.Ordinal) && !value.GetType().Name.StartsWith("Nullable`1", StringComparison.Ordinal)) + { + equalExpr = Expression.Equal(accessExpr, Expression.Convert(Expression.Constant(value), accessExpr.Type)); + } + else + { + equalExpr = Expression.Equal(accessExpr, Expression.Constant(value)); + } + var expr = Expression.Lambda>(equalExpr, parameterExpr); + + return await R(globalRepository).QueryAsync(@namespace, expr, null, 1, transaction, null, CancellationToken.None).FirstOrDefaultAsync().ConfigureAwait(false); + } + + [Obsolete("Use QueryAsync instead.")] + public static async Task> LoadAllBy<[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties | DynamicallyAccessedMemberTypes.NonPublicProperties)] T, TValue>(this IGlobalRepository globalRepository, string @namespace, string field, TValue? value, IModelTransaction? transaction = null) where T : Model, new() + { + ArgumentNullException.ThrowIfNull(globalRepository); + + var parameterExpr = Expression.Parameter(typeof(T)); + var targetProperty = typeof(T).GetProperty(field, BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic); + if (targetProperty == null) + { + throw new InvalidOperationException($"Unable to get target property named '{field}' on type '{typeof(T).FullName}'"); + } + var accessExpr = Expression.MakeMemberAccess(parameterExpr, targetProperty); + BinaryExpression equalExpr; + if (value != null && accessExpr.Type.Name.StartsWith("Nullable`1", StringComparison.Ordinal) && !value.GetType().Name.StartsWith("Nullable`1", StringComparison.Ordinal)) + { + equalExpr = Expression.Equal(accessExpr, Expression.Convert(Expression.Constant(value), accessExpr.Type)); + } + else + { + equalExpr = Expression.Equal(accessExpr, Expression.Constant(value)); + } + var expr = Expression.Lambda>(equalExpr, parameterExpr); + + return await R(globalRepository).QueryAsync(@namespace, expr, null, null, transaction, null, CancellationToken.None).ToListAsync().ConfigureAwait(false); + } + + [Obsolete("Use QueryAsync instead.")] + public static async Task> LoadAll(this IGlobalRepository globalRepository, string @namespace, IModelTransaction? transaction = null) where T : Model, new() + { + ArgumentNullException.ThrowIfNull(globalRepository); + + return await R(globalRepository).QueryAsync(@namespace, x => true, null, null, transaction, null, CancellationToken.None).ToListAsync().ConfigureAwait(false); + } + + [Obsolete("Use QueryAsync instead.")] + public static async Task> LoadAllUncached(this IGlobalRepository globalRepository, string @namespace, IModelTransaction? transaction = null) where T : Model, new() + { + ArgumentNullException.ThrowIfNull(globalRepository); + + // NOTE: This should forcibly use the datastore repository layer instead of the redis cache repository layer, but also + // we should just delete this method because it's no longer necessary. + return await R(globalRepository).QueryAsync(@namespace, x => true, null, null, transaction, null, CancellationToken.None).ToListAsync().ConfigureAwait(false); + } + + [Obsolete("Use QueryAsync instead.")] + public static async IAsyncEnumerable LoadAllByFiltersUncached<[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties | DynamicallyAccessedMemberTypes.NonPublicProperties)] T>(this IGlobalRepository globalRepository, string @namespace, Filter filter) where T : Model, new() + { + ArgumentNullException.ThrowIfNull(globalRepository); + + var parameterExpr = Expression.Parameter(typeof(T)); + var conditionExpr = ConvertFilterToExpression(globalRepository, filter, parameterExpr); + var expr = Expression.Lambda>(conditionExpr, parameterExpr); + + await foreach (var model in R(globalRepository).QueryAsync(@namespace, expr, null, null, null, null, CancellationToken.None).ConfigureAwait(false)) + { + yield return model; + } + } + + [Obsolete("Use CreateAsync instead.")] + public static async Task Create(this IGlobalRepository globalRepository, string @namespace, T model, IModelTransaction? transaction = null) where T : Model, new() + { + ArgumentNullException.ThrowIfNull(globalRepository); + + await R(globalRepository).CreateAsync(@namespace, new[] { model }.ToAsyncEnumerable(), transaction, null, CancellationToken.None).FirstAsync().ConfigureAwait(false); + } + + [Obsolete("Use CreateAsync instead.")] + public static async Task CreateMany(this IGlobalRepository globalRepository, string @namespace, IList models) where T : Model, new() + { + ArgumentNullException.ThrowIfNull(globalRepository); + + return await R(globalRepository).CreateAsync(@namespace, models.ToAsyncEnumerable(), null, null, CancellationToken.None).ToArrayAsync().ConfigureAwait(false); + } + + [Obsolete("Use UpsertAsync instead.")] + public static async Task Upsert(this IGlobalRepository globalRepository, string @namespace, T model, IModelTransaction? transaction = null) where T : Model, new() + { + ArgumentNullException.ThrowIfNull(globalRepository); + + await R(globalRepository).UpsertAsync(@namespace, new[] { model }.ToAsyncEnumerable(), transaction, null, CancellationToken.None).FirstAsync().ConfigureAwait(false); + } + + [Obsolete("Use UpdateAsync instead.")] + public static async Task Update(this IGlobalRepository globalRepository, string @namespace, T model, IModelTransaction? transaction = null) where T : Model, new() + { + ArgumentNullException.ThrowIfNull(globalRepository); + + await R(globalRepository).UpdateAsync(@namespace, new[] { model }.ToAsyncEnumerable(), transaction, null, CancellationToken.None).FirstAsync().ConfigureAwait(false); + } + + [Obsolete("Use UpdateAsync instead.")] + public static async Task UpdateMany(this IGlobalRepository globalRepository, string @namespace, IList models) where T : Model, new() + { + ArgumentNullException.ThrowIfNull(globalRepository); + + await R(globalRepository).UpdateAsync(@namespace, models.ToAsyncEnumerable(), null, null, CancellationToken.None).ToListAsync().ConfigureAwait(false); + } + + [Obsolete("Use DeleteAsync instead.")] + public static async Task Delete(this IGlobalRepository globalRepository, string @namespace, T model, IModelTransaction? transaction = null) where T : Model, new() + { + ArgumentNullException.ThrowIfNull(globalRepository); + + await R(globalRepository).DeleteAsync(@namespace, new[] { model }.ToAsyncEnumerable(), transaction, null, CancellationToken.None).ConfigureAwait(false); + } + + [Obsolete("Use DeleteAsync instead.")] + public static async Task DeleteMany(this IGlobalRepository globalRepository, string @namespace, IList models) where T : Model, new() + { + ArgumentNullException.ThrowIfNull(globalRepository); + + await R(globalRepository).DeleteAsync(@namespace, models.ToAsyncEnumerable(), null, null, CancellationToken.None).ConfigureAwait(false); + } + + [Obsolete("Use GetKeyFactoryAsync and create the named key manually.")] + public static async Task CreateNamedKey(this IGlobalRepository globalRepository, string @namespace, string name) where T : Model, new() + { + ArgumentNullException.ThrowIfNull(globalRepository); + + var factory = await R(globalRepository).GetKeyFactoryAsync(@namespace, null, CancellationToken.None).ConfigureAwait(false); + return factory.CreateKey(name); + } + + [Obsolete("Use BeginTransactionAsync instead.")] + public static async Task BeginTransaction(this IGlobalRepository globalRepository, string @namespace) + { + ArgumentNullException.ThrowIfNull(globalRepository); + + return await R(globalRepository).BeginTransactionAsync(@namespace, TransactionMode.ReadWrite, null, CancellationToken.None).ConfigureAwait(false); + } + + [Obsolete("Use CommitAsync instead.")] + public static async Task Commit(this IGlobalRepository globalRepository, string @namespace, IModelTransaction transaction) + { + ArgumentNullException.ThrowIfNull(globalRepository); + + await R(globalRepository).CommitAsync(@namespace, transaction, null, CancellationToken.None).ConfigureAwait(false); + } + + [Obsolete("Use RollbackAsync instead.")] + public static async Task Rollback(this IGlobalRepository globalRepository, string @namespace, IModelTransaction transaction) + { + ArgumentNullException.ThrowIfNull(globalRepository); + + await R(globalRepository).RollbackAsync(@namespace, transaction, null, CancellationToken.None).ConfigureAwait(false); + } + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Legacy/MappedDatastoreQueryResults.cs b/UET/Redpoint.CloudFramework/Repository/Legacy/MappedDatastoreQueryResults.cs new file mode 100644 index 00000000..9c6cfa03 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Legacy/MappedDatastoreQueryResults.cs @@ -0,0 +1,15 @@ +namespace Redpoint.CloudFramework.Datastore +{ + using System.Collections.Generic; + using Google.Cloud.Datastore.V1; + using Google.Protobuf; + using Redpoint.CloudFramework.Models; + + public sealed class MappedDatastoreQueryResults where T : Model, new() + { + public required ByteString EndCursor { get; set; } + public required string? EndCursorForClients { get; set; } + public required QueryResultBatch.Types.MoreResultsType MoreResults { get; set; } + public required ICollection Entities { get; set; } + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Legacy/ModelQuery.cs b/UET/Redpoint.CloudFramework/Repository/Legacy/ModelQuery.cs new file mode 100644 index 00000000..627d684a --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Legacy/ModelQuery.cs @@ -0,0 +1,17 @@ +namespace Redpoint.CloudFramework.Datastore +{ + using Google.Cloud.Datastore.V1; + using Redpoint.CloudFramework.Models; + + public class ModelQuery where T : Model, new() + { + public ModelQuery(string @namespace, Query query) + { + Namespace = @namespace; + Query = query; + } + + public string Namespace { get; } + public Query Query { get; } + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Legacy/RepositoryLegacyExtensions.cs b/UET/Redpoint.CloudFramework/Repository/Legacy/RepositoryLegacyExtensions.cs new file mode 100644 index 00000000..6f51cf7d --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Legacy/RepositoryLegacyExtensions.cs @@ -0,0 +1,171 @@ +namespace Redpoint.CloudFramework.Datastore +{ + using Google.Cloud.Datastore.V1; + using Redpoint.CloudFramework.Models; + using Redpoint.CloudFramework.Repository; + using Redpoint.CloudFramework.Repository.Datastore; + using Redpoint.CloudFramework.Repository.Transaction; + using System; + using System.Collections.Generic; + using System.Diagnostics.CodeAnalysis; + using System.Threading.Tasks; + +#pragma warning disable CS0618 // Type or member is obsolete + [Obsolete("These API methods are obsolete, and you should upgrade to the latest IRepository APIs.")] + public static class RepositoryLegacyExtensions + { + private static DatastoreGlobalRepository G(IRepository repository) + { + return (DatastoreGlobalRepository)((DatastoreRepository)repository)._globalDatastore; + } + + [Obsolete("Use QueryAsync instead.")] + public static async Task> CreateQuery(this IRepository repository) where T : Model, new() + { + ArgumentNullException.ThrowIfNull(repository); + + return await G(repository).CreateQuery(await ((DatastoreRepository)repository).GetDatastoreNamespace().ConfigureAwait(false)).ConfigureAwait(false); + } + + [Obsolete("Use QueryAsync instead.")] + public static async Task> RunUncachedQuery<[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties | DynamicallyAccessedMemberTypes.NonPublicProperties)] T>(this IRepository repository, ModelQuery query, + ReadOptions.Types.ReadConsistency readConsistency, IModelTransaction? transaction = null) where T : Model, new() + { + ArgumentNullException.ThrowIfNull(repository); + + return await G(repository).RunUncachedQuery(await ((DatastoreRepository)repository).GetDatastoreNamespace().ConfigureAwait(false), query, readConsistency, transaction).ConfigureAwait(false); + } + + [Obsolete("Use GetKeyFactoryAsync instead.")] + public static async Task GetKeyFactory(this IRepository repository) where T : Model, new() + { + ArgumentNullException.ThrowIfNull(repository); + + return await G(repository).GetKeyFactory(await ((DatastoreRepository)repository).GetDatastoreNamespace().ConfigureAwait(false)).ConfigureAwait(false); + } + + [Obsolete("Use LoadAsync instead.")] + public static async Task> LoadMany(this IRepository repository, Key[] keys, IModelTransaction? transaction = null) where T : Model, new() + { + ArgumentNullException.ThrowIfNull(repository); + + return await G(repository).LoadMany(await ((DatastoreRepository)repository).GetDatastoreNamespace().ConfigureAwait(false), keys, transaction).ConfigureAwait(false); + } + + [Obsolete("Use LoadAsync instead.")] + public static async Task LoadOneBy(this IRepository repository, Key key, IModelTransaction? transaction = null) where T : Model, new() + { + ArgumentNullException.ThrowIfNull(repository); + + return await G(repository).LoadOneBy(await ((DatastoreRepository)repository).GetDatastoreNamespace().ConfigureAwait(false), key, transaction).ConfigureAwait(false); + } + + [Obsolete("Use QueryAsync instead.")] + public static async Task LoadOneBy<[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties | DynamicallyAccessedMemberTypes.NonPublicProperties)] T, TValue>(this IRepository repository, string field, TValue value, IModelTransaction? transaction = null) where T : Model, new() + { + ArgumentNullException.ThrowIfNull(repository); + + return await G(repository).LoadOneBy(await ((DatastoreRepository)repository).GetDatastoreNamespace().ConfigureAwait(false), field, value, transaction).ConfigureAwait(false); + } + + [Obsolete("Use QueryAsync instead.")] + public static async Task> LoadAllBy<[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties | DynamicallyAccessedMemberTypes.NonPublicProperties)] T, TValue>(this IRepository repository, string field, TValue value, IModelTransaction? transaction = null) where T : Model, new() + { + ArgumentNullException.ThrowIfNull(repository); + + return await G(repository).LoadAllBy(await ((DatastoreRepository)repository).GetDatastoreNamespace().ConfigureAwait(false), field, value, transaction).ConfigureAwait(false); + } + + [Obsolete("Use QueryAsync instead.")] + public static async Task> LoadAll(this IRepository repository, IModelTransaction? transaction = null) where T : Model, new() + { + ArgumentNullException.ThrowIfNull(repository); + + return await G(repository).LoadAll(await ((DatastoreRepository)repository).GetDatastoreNamespace().ConfigureAwait(false), transaction).ConfigureAwait(false); + } + + [Obsolete("Use QueryAsync instead.")] + public static async Task> LoadAllUncached(this IRepository repository, IModelTransaction? transaction = null) where T : Model, new() + { + ArgumentNullException.ThrowIfNull(repository); + + return await G(repository).LoadAllUncached(await ((DatastoreRepository)repository).GetDatastoreNamespace().ConfigureAwait(false), transaction).ConfigureAwait(false); + } + + [Obsolete("Use QueryAsync instead.")] + public static async IAsyncEnumerable LoadAllByFiltersUncached<[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties | DynamicallyAccessedMemberTypes.NonPublicProperties)] T>(this IRepository repository, Filter filter) where T : Model, new() + { + ArgumentNullException.ThrowIfNull(repository); + + await foreach (var result in G(repository).LoadAllByFiltersUncached(await ((DatastoreRepository)repository).GetDatastoreNamespace().ConfigureAwait(false), filter).ConfigureAwait(false)) + { + yield return result; + } + } + + [Obsolete("Use CreateAsync instead.")] + public static async Task Create(this IRepository repository, T model, IModelTransaction? transaction = null) where T : Model, new() + { + ArgumentNullException.ThrowIfNull(repository); + + await G(repository).Create(await ((DatastoreRepository)repository).GetDatastoreNamespace().ConfigureAwait(false), model, transaction).ConfigureAwait(false); + } + + [Obsolete("Use CreateAsync instead.")] + public static async Task CreateMany(this IRepository repository, IList models) where T : Model, new() + { + ArgumentNullException.ThrowIfNull(repository); + + return await G(repository).CreateMany(await ((DatastoreRepository)repository).GetDatastoreNamespace().ConfigureAwait(false), models).ConfigureAwait(false); + } + + [Obsolete("Use UpsertAsync instead.")] + public static async Task Upsert(this IRepository repository, T model, IModelTransaction? transaction = null) where T : Model, new() + { + ArgumentNullException.ThrowIfNull(repository); + + await G(repository).Upsert(await ((DatastoreRepository)repository).GetDatastoreNamespace().ConfigureAwait(false), model, transaction).ConfigureAwait(false); + } + + [Obsolete("Use UpdateAsync instead.")] + public static async Task Update(this IRepository repository, T model, IModelTransaction? transaction = null) where T : Model, new() + { + ArgumentNullException.ThrowIfNull(repository); + + await G(repository).Update(await ((DatastoreRepository)repository).GetDatastoreNamespace().ConfigureAwait(false), model, transaction).ConfigureAwait(false); + } + + [Obsolete("Use DeleteAsync instead.")] + public static async Task Delete(this IRepository repository, T model, IModelTransaction? transaction = null) where T : Model, new() + { + ArgumentNullException.ThrowIfNull(repository); + + await G(repository).Delete(await ((DatastoreRepository)repository).GetDatastoreNamespace().ConfigureAwait(false), model, transaction).ConfigureAwait(false); + } + + [Obsolete("Use BeginTransactionAsync instead.")] + public static async Task BeginTransaction(this IRepository repository) + { + ArgumentNullException.ThrowIfNull(repository); + + return await G(repository).BeginTransaction(await ((DatastoreRepository)repository).GetDatastoreNamespace().ConfigureAwait(false)).ConfigureAwait(false); + } + + [Obsolete("Use CommitAsync instead.")] + public static async Task Commit(this IRepository repository, IModelTransaction transaction) + { + ArgumentNullException.ThrowIfNull(repository); + + await G(repository).Commit(await ((DatastoreRepository)repository).GetDatastoreNamespace().ConfigureAwait(false), transaction).ConfigureAwait(false); + } + + [Obsolete("Use RollbackAsync instead.")] + public static async Task Rollback(this IRepository repository, IModelTransaction transaction) + { + ArgumentNullException.ThrowIfNull(repository); + + await G(repository).Rollback(await ((DatastoreRepository)repository).GetDatastoreNamespace().ConfigureAwait(false), transaction).ConfigureAwait(false); + } + } +#pragma warning restore CS0618 // Type or member is obsolete +} diff --git a/UET/Redpoint.CloudFramework/Repository/Metrics/RepositoryOperationMetrics.cs b/UET/Redpoint.CloudFramework/Repository/Metrics/RepositoryOperationMetrics.cs new file mode 100644 index 00000000..41ad1b0c --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Metrics/RepositoryOperationMetrics.cs @@ -0,0 +1,68 @@ +namespace Redpoint.CloudFramework.Repository.Metrics +{ + /// + /// If you construct this class and pass an instance to a repository or global repository call, it will return + /// metrics about the call, such as whether or not the cache was hit. + /// + public class RepositoryOperationMetrics + { + /// + /// Did we read from the Redis cache for this query? + /// + /// + /// An operation can both read and write to the cache during LoadAsync (if only some keys are cached). + /// + public bool CacheDidRead { get; internal set; } + + /// + /// Did we write from the Redis cache as part of this query? + /// + /// + /// An operation can both read and write to the cache during LoadAsync (if only some keys are cached). + /// + public bool CacheDidWrite { get; internal set; } + + /// + /// Is this query ever compatible with the cache? If this is false, then this query + /// will always hit Datastore for reads. + /// + public bool CacheCompatible { get; internal set; } + + /// + /// The number of cached queries (both singular and complex) that were + /// flushed from the Redis cache due to this write operation. + /// + public long CacheQueriesFlushed { get; internal set; } + + /// + /// For QueryAsync operations, this returns the query hash used to key result sets inside Redis. + /// + public string? CacheHash { get; internal set; } + + /// + /// The number of milliseconds elapsed at the Redis caching layer. This excludes + /// time spent in the Datastore layer. + /// + public float CacheElapsedMilliseconds { get; internal set; } + + /// + /// The number of entities read directly from Datastore. + /// + public long DatastoreEntitiesRead { get; internal set; } + + /// + /// The number of entities written directly to Datastore. + /// + public long DatastoreEntitiesWritten { get; internal set; } + + /// + /// The number of entities deleted directly from Datastore. + /// + public long DatastoreEntitiesDeleted { get; internal set; } + + /// + /// The number of milliseconds elapsed at the Datastore layer. + /// + public float DatastoreElapsedMilliseconds { get; internal set; } + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Migration/DatastoreStartupMigrator.cs b/UET/Redpoint.CloudFramework/Repository/Migration/DatastoreStartupMigrator.cs new file mode 100644 index 00000000..90ef6293 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Migration/DatastoreStartupMigrator.cs @@ -0,0 +1,169 @@ +namespace Redpoint.CloudFramework.Repository.Migration +{ + using Microsoft.Extensions.DependencyInjection; + using Microsoft.Extensions.Hosting; + using Microsoft.Extensions.Logging; + using Redpoint.CloudFramework.Locking; + using Redpoint.CloudFramework.Models; + using Redpoint.CloudFramework.Prefix; + using Redpoint.CloudFramework.Repository.Layers; + using System; + using System.Collections.Generic; + using System.Linq; + using System.Threading; + using System.Threading.Tasks; + + internal class DatastoreStartupMigrator : IHostedService + { + private readonly IGlobalRepository _globalRepository; + private readonly IServiceProvider _serviceProvider; + private readonly RegisteredModelMigratorBase[] _migrators; + private readonly ILogger _logger; + private readonly IGlobalLockService _globalLock; + private readonly IGlobalPrefix _globalPrefix; + + public DatastoreStartupMigrator( + IGlobalRepository globalRepository, + IServiceProvider serviceProvider, + RegisteredModelMigratorBase[] migrators, + ILogger logger, + IGlobalLockService globalLock, + IGlobalPrefix globalPrefix) + { + _globalRepository = globalRepository; + _serviceProvider = serviceProvider; + _migrators = migrators; + _logger = logger; + _globalLock = globalLock; + _globalPrefix = globalPrefix; + } + + /// + /// This isn't a real model; we just use it to construct keys for locking. + /// + private class RCFMigrationLockModel : Model + { + public override HashSet GetIndexes() + { + throw new NotImplementedException(); + } + + public override string GetKind() + { + return "rcf_migrationLock"; + } + + public override long GetSchemaVersion() + { + throw new NotImplementedException(); + } + + public override Dictionary GetTypes() + { + throw new NotImplementedException(); + } + } + + public async Task StartAsync(CancellationToken cancellationToken) + { + if (_migrators.Length == 0) + { + _logger.LogInformation("There are no database migrators registered."); + return; + } + else + { + _logger.LogInformation($"There are {_migrators.Length} database migrators registered."); + } + + var drl = _serviceProvider.GetRequiredService(); + + var migratorsByModel = _migrators.GroupBy(x => x.ModelType); + foreach (var modelGroup in migratorsByModel) + { + _logger.LogInformation($"Running database migrators for: {modelGroup.Key.FullName}"); + +#pragma warning disable IL2072 // DynamicallyAccessedMembers is set on ModelType. + var referenceModel = (Model)Activator.CreateInstance(modelGroup.Key)!; +#pragma warning restore IL2072 + var currentSchemaVersion = referenceModel.GetSchemaVersion(); + + var migratorsByVersion = modelGroup.ToDictionary(k => k.ToSchemaVersion, v => v.MigratorType); + for (long i = 2; i <= currentSchemaVersion; i++) + { + if (!migratorsByVersion.ContainsKey(i)) + { + throw new InvalidOperationException($"Missing migrator to migrate {modelGroup.Key.Name} from version {i - 1} to {i}. Make sure the migrator is registered in the service provider with .AddMigrator()."); + } + } + var firstMigrator = modelGroup.First(); + + // Do an early check before locking. + if (!await firstMigrator.QueryForOutdatedModelsAsync(drl, currentSchemaVersion).AnyAsync(cancellationToken: cancellationToken).ConfigureAwait(false)) + { + continue; + } + + // We must lock at the type level, because QueryForOutdatedModels will be out of date the moment + // any individual model is processed. + _logger.LogInformation($"Acquiring lock to perform migrations for '{referenceModel.GetKind()}'..."); + var keyFactory = await _globalRepository.GetKeyFactoryAsync(string.Empty, cancellationToken: cancellationToken).ConfigureAwait(false); + var key = keyFactory.CreateKey(referenceModel.GetKind()); + var handler = await _globalLock.Acquire(string.Empty, key).ConfigureAwait(false); + try + { + _logger.LogInformation($"Performing migrations for '{referenceModel.GetKind()}'..."); + + await foreach (var loadedModel in firstMigrator.QueryForOutdatedModelsAsync(drl, currentSchemaVersion).ConfigureAwait(false)) + { + var loadedModelVersion = loadedModel.schemaVersion ?? 1; + var needsSaveFromUs = false; + + _logger.LogInformation($"Migrating '{_globalPrefix.CreateInternal(loadedModel.Key)}'..."); + + for (long i = loadedModelVersion + 1; i <= currentSchemaVersion; i++) + { + var migrator = _serviceProvider.GetService(migratorsByVersion[i]); +#pragma warning disable IL2075 // DynamicallyAccessedMembers is set on MigratorType. + var migrationDidSave = await ((Task)migratorsByVersion[i].GetMethod("MigrateAsync")!.Invoke(migrator, new object[] { loadedModel })!).ConfigureAwait(false); +#pragma warning restore IL2075 + if (migrationDidSave) + { + needsSaveFromUs = false; + if (loadedModel.schemaVersion != i) + { + throw new InvalidOperationException("Expected that MigrateAsync would set schemaVersion and call UpdateAsync as needed!"); + } + } + else + { + needsSaveFromUs = true; + loadedModel.schemaVersion = i; + } + } + + if (needsSaveFromUs) + { + await firstMigrator.UpdateAsync(_globalRepository, loadedModel).ConfigureAwait(false); + } + } + } + catch (Exception ex) + { + _logger.LogError(ex, $"Failed to apply migrations for '{referenceModel.GetKind()}': {ex.Message}"); + } + finally + { + _logger.LogInformation($"Releasing lock that was used to perform migrations for '{referenceModel.GetKind()}'..."); + await handler.DisposeAsync().ConfigureAwait(false); + _logger.LogInformation($"Released lock that was used to perform migrations for '{referenceModel.GetKind()}'."); + } + } + } + + public Task StopAsync(CancellationToken cancellationToken) + { + return Task.CompletedTask; + } + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Migration/IModelMigrator.cs b/UET/Redpoint.CloudFramework/Repository/Migration/IModelMigrator.cs new file mode 100644 index 00000000..47dfe368 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Migration/IModelMigrator.cs @@ -0,0 +1,16 @@ +namespace Redpoint.CloudFramework.Repository.Migration +{ + using Redpoint.CloudFramework.Models; + using System.Threading.Tasks; + + public interface IModelMigrator where T : Model + { + /// + /// Migrate the specified model to the version this migrator was registered for. Returns true if the model had + /// UpdateAsync called on it as part of the migration. + /// + /// The model to migration. + /// Returns true if the model had UpdateAsync called on it as part of the migration. + Task MigrateAsync(T model); + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Migration/MigrationServiceExtensions.cs b/UET/Redpoint.CloudFramework/Repository/Migration/MigrationServiceExtensions.cs new file mode 100644 index 00000000..0b295414 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Migration/MigrationServiceExtensions.cs @@ -0,0 +1,23 @@ +namespace Redpoint.CloudFramework.Repository.Migration +{ + using Microsoft.Extensions.DependencyInjection; + using Redpoint.CloudFramework.Models; + using System.Diagnostics.CodeAnalysis; + + public static class MigrationServiceExtensions + { + public static IServiceCollection AddMigration<[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicParameterlessConstructor)] TModel, [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicMethods | DynamicallyAccessedMemberTypes.PublicConstructors)] TMigration>(this IServiceCollection services, int toSchemaVersion) where TModel : Model, new() where TMigration : IModelMigrator + { + services.AddTransient(sp => + { + return new RegisteredModelMigrator + { + MigratorType = typeof(TMigration), + ToSchemaVersion = toSchemaVersion, + }; + }); + services.AddTransient(typeof(TMigration), typeof(TMigration)); + return services; + } + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Migration/RegisteredModelMigrator.cs b/UET/Redpoint.CloudFramework/Repository/Migration/RegisteredModelMigrator.cs new file mode 100644 index 00000000..b0c99109 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Migration/RegisteredModelMigrator.cs @@ -0,0 +1,30 @@ +namespace Redpoint.CloudFramework.Repository.Migration +{ + using Redpoint.CloudFramework.Models; + using Redpoint.CloudFramework.Repository.Layers; + using System; + using System.Collections.Generic; + using System.Diagnostics.CodeAnalysis; + + internal sealed class RegisteredModelMigrator<[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicParameterlessConstructor)] T> : RegisteredModelMigratorBase where T : Model, new () + { + public override Type ModelType { get; } = typeof(T); + + public override async Task UpdateAsync(IGlobalRepository globalRepository, object model) + { + await globalRepository.UpdateAsync(string.Empty, (T)model, null, null, CancellationToken.None).ConfigureAwait(false); + } + + public override IAsyncEnumerable QueryForOutdatedModelsAsync(IDatastoreRepositoryLayer drl, long currentSchemaVersion) + { + return drl.QueryAsync( + string.Empty, + x => x.schemaVersion < currentSchemaVersion, + null, + null, + null, + null, + CancellationToken.None).Cast(); + } + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Migration/RegisteredModelMigratorBase.cs b/UET/Redpoint.CloudFramework/Repository/Migration/RegisteredModelMigratorBase.cs new file mode 100644 index 00000000..203c033b --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Migration/RegisteredModelMigratorBase.cs @@ -0,0 +1,21 @@ +namespace Redpoint.CloudFramework.Repository.Migration +{ + using Redpoint.CloudFramework.Models; + using Redpoint.CloudFramework.Repository.Layers; + using System; + using System.Diagnostics.CodeAnalysis; + + internal abstract class RegisteredModelMigratorBase + { + public abstract Type ModelType { get; } + + [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicMethods)] + public required Type MigratorType { get; set; } + + public long ToSchemaVersion { get; set; } + + public abstract Task UpdateAsync(IGlobalRepository globalRepository, object model); + + public abstract IAsyncEnumerable QueryForOutdatedModelsAsync(IDatastoreRepositoryLayer drl, long currentSchemaVersion); + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Pagination/PaginatedQueryCursor.cs b/UET/Redpoint.CloudFramework/Repository/Pagination/PaginatedQueryCursor.cs new file mode 100644 index 00000000..de50d6b8 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Pagination/PaginatedQueryCursor.cs @@ -0,0 +1,59 @@ +namespace Redpoint.CloudFramework.Repository.Pagination +{ + using Google.Protobuf; + using Microsoft.AspNetCore.Mvc; + + /// + /// Represents a cursor for paginated queries. + /// + [ModelBinder(typeof(PaginatedQueryCursorModelBinder))] + [Newtonsoft.Json.JsonConverter(typeof(PaginatedQueryCursorNewtonConverter))] + [System.Text.Json.Serialization.JsonConverter(typeof(PaginatedQueryCursorSystemConverter))] + public class PaginatedQueryCursor + { + private readonly string? _cursor; + + public PaginatedQueryCursor(ByteString bs) + { + if (bs == ByteString.Empty) + { + _cursor = null; + } + else + { + _cursor = bs?.ToBase64(); + } + } + + public PaginatedQueryCursor(string? s) + { + if (string.IsNullOrWhiteSpace(s)) + { + _cursor = null; + } + else + { + _cursor = s; + } + } + + public override string ToString() + { + return _cursor ?? string.Empty; + } + + public override bool Equals(object? obj) + { + return obj is PaginatedQueryCursor && ((PaginatedQueryCursor)obj)._cursor == _cursor; + } + + public override int GetHashCode() + { + return _cursor?.GetHashCode(StringComparison.Ordinal) ?? 0; + } + + public static implicit operator string?(PaginatedQueryCursor qc) => qc == null ? null : qc._cursor; + public static implicit operator ByteString(PaginatedQueryCursor qc) => qc?._cursor == null ? ByteString.Empty : ByteString.FromBase64(qc._cursor); + public ByteString ToByteString() => (ByteString)this; + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Pagination/PaginatedQueryCursorModelBinder.cs b/UET/Redpoint.CloudFramework/Repository/Pagination/PaginatedQueryCursorModelBinder.cs new file mode 100644 index 00000000..e4e42323 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Pagination/PaginatedQueryCursorModelBinder.cs @@ -0,0 +1,27 @@ +namespace Redpoint.CloudFramework.Repository.Pagination +{ + using Microsoft.AspNetCore.Mvc.ModelBinding; + using System; + using System.Threading.Tasks; + + public class PaginatedQueryCursorModelBinder : IModelBinder + { + public Task BindModelAsync(ModelBindingContext bindingContext) + { + ArgumentNullException.ThrowIfNull(bindingContext); + + var modelName = bindingContext.ModelName; + var valueProviderResult = bindingContext.ValueProvider.GetValue(modelName); + + if (valueProviderResult == ValueProviderResult.None || + valueProviderResult.FirstValue == null) + { + return Task.CompletedTask; + } + + bindingContext.ModelState.SetModelValue(modelName, valueProviderResult); + bindingContext.Result = ModelBindingResult.Success(new PaginatedQueryCursor(valueProviderResult.FirstValue)); + return Task.CompletedTask; + } + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Pagination/PaginatedQueryCursorNewtonConverter.cs b/UET/Redpoint.CloudFramework/Repository/Pagination/PaginatedQueryCursorNewtonConverter.cs new file mode 100644 index 00000000..dac9b6f7 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Pagination/PaginatedQueryCursorNewtonConverter.cs @@ -0,0 +1,36 @@ +namespace Redpoint.CloudFramework.Repository.Pagination +{ + using System; + + public class PaginatedQueryCursorNewtonConverter : Newtonsoft.Json.JsonConverter + { + public override PaginatedQueryCursor ReadJson(Newtonsoft.Json.JsonReader reader, Type objectType, PaginatedQueryCursor? existingValue, bool hasExistingValue, Newtonsoft.Json.JsonSerializer serializer) + { + ArgumentNullException.ThrowIfNull(reader); + + if (reader.TokenType == Newtonsoft.Json.JsonToken.String) + { + return new PaginatedQueryCursor(reader.ReadAsString()); + } + else + { + reader.Read(); + return new PaginatedQueryCursor(string.Empty); + } + } + + public override void WriteJson(Newtonsoft.Json.JsonWriter writer, PaginatedQueryCursor? value, Newtonsoft.Json.JsonSerializer serializer) + { + ArgumentNullException.ThrowIfNull(writer); + + if (value == null) + { + writer.WriteNull(); + } + else + { + writer.WriteValue((string)value!); + } + } + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Pagination/PaginatedQueryCursorSystemConverter.cs b/UET/Redpoint.CloudFramework/Repository/Pagination/PaginatedQueryCursorSystemConverter.cs new file mode 100644 index 00000000..d22f4ff5 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Pagination/PaginatedQueryCursorSystemConverter.cs @@ -0,0 +1,34 @@ +namespace Redpoint.CloudFramework.Repository.Pagination +{ + using System; + + public class PaginatedQueryCursorSystemConverter : System.Text.Json.Serialization.JsonConverter + { + public override PaginatedQueryCursor Read(ref System.Text.Json.Utf8JsonReader reader, Type typeToConvert, System.Text.Json.JsonSerializerOptions options) + { + if (reader.TokenType == System.Text.Json.JsonTokenType.String) + { + return new PaginatedQueryCursor(reader.GetString()); + } + else + { + reader.Read(); + return new PaginatedQueryCursor(string.Empty); + } + } + + public override void Write(System.Text.Json.Utf8JsonWriter writer, PaginatedQueryCursor value, System.Text.Json.JsonSerializerOptions options) + { + ArgumentNullException.ThrowIfNull(writer); + + if (value == null) + { + writer.WriteNullValue(); + } + else + { + writer.WriteStringValue(value); + } + } + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Pagination/PaginatedQueryResult.cs b/UET/Redpoint.CloudFramework/Repository/Pagination/PaginatedQueryResult.cs new file mode 100644 index 00000000..64e9ad4a --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Pagination/PaginatedQueryResult.cs @@ -0,0 +1,12 @@ +namespace Redpoint.CloudFramework.Repository.Pagination +{ + using Redpoint.CloudFramework.Models; + using System.Collections.Generic; + + public record struct PaginatedQueryResult where T : Model, new() + { + public PaginatedQueryCursor? NextCursor { get; set; } + + public required IReadOnlyList Results { get; set; } + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Redis/DistributedCacheExtended.cs b/UET/Redpoint.CloudFramework/Repository/Redis/DistributedCacheExtended.cs new file mode 100644 index 00000000..bfe9b3b9 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Redis/DistributedCacheExtended.cs @@ -0,0 +1,100 @@ +namespace Redpoint.CloudFramework.Repository.Redis +{ + using Microsoft.Extensions.Caching.StackExchangeRedis; + using Microsoft.Extensions.Options; + using StackExchange.Redis; + using System; + using System.Collections.Generic; + using System.Linq; + using System.Threading.Tasks; + + internal class DistributedCacheExtended : IDistributedCacheExtended, IDisposable + { + private const string _clearCacheLuaScript = + "for _,k in ipairs(redis.call('KEYS', ARGV[1])) do\n" + + " redis.call('DEL', k)\n" + + "end"; + private const string _getKeysLuaScript = "return redis.call('keys', ARGV[1])"; + private readonly RedisCacheOptions _options; + private ConnectionMultiplexer? _connection; + private IDatabase? _cache; + private bool _isDisposed; + + public DistributedCacheExtended(IOptions redisCacheOptions) + { + _options = redisCacheOptions.Value; + } + + ~DistributedCacheExtended() + { + Dispose(false); + } + + public async Task ClearAsync() + { + ThrowIfDisposed(); + await EnsureInitialized().ConfigureAwait(false); + await _cache!.ScriptEvaluateAsync( + _clearCacheLuaScript, + values: new RedisValue[] + { + _options.InstanceName + "*" + }).ConfigureAwait(false); + } + + public async Task> GetKeysAsync() + { + ThrowIfDisposed(); + await EnsureInitialized().ConfigureAwait(false); + var result = await _cache!.ScriptEvaluateAsync( + _getKeysLuaScript, + values: new RedisValue[] + { + _options.InstanceName + "*" + }).ConfigureAwait(false); + return ((RedisResult[])result!).Select(x => x.ToString()!.Substring(_options.InstanceName!.Length)).ToArray(); + } + + public async Task RemoveAsync(string[] keys) + { + ThrowIfDisposed(); + ArgumentNullException.ThrowIfNull(keys); + await EnsureInitialized().ConfigureAwait(false); + var keysArray = keys.Select(x => (RedisKey)(_options.InstanceName + x)).ToArray(); + await _cache!.KeyDeleteAsync(keysArray).ConfigureAwait(false); + } + + public void Dispose() + { + Dispose(true); + GC.SuppressFinalize(this); + } + + protected async Task EnsureInitialized() + { + if (_connection == null) + { + _connection = await ConnectionMultiplexer.ConnectAsync(_options.Configuration!).ConfigureAwait(false); + _cache = _connection.GetDatabase(); + } + } + + private void Dispose(bool disposing) + { + if (!_isDisposed) + { + if (disposing && _connection != null) + { + _connection.Close(); + } + + _isDisposed = true; + } + } + + private void ThrowIfDisposed() + { + ObjectDisposedException.ThrowIf(_isDisposed, this); + } + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Transaction/IGlobalNestedTransactions.cs b/UET/Redpoint.CloudFramework/Repository/Transaction/IGlobalNestedTransactions.cs new file mode 100644 index 00000000..deb8b72b --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Transaction/IGlobalNestedTransactions.cs @@ -0,0 +1,31 @@ +namespace Redpoint.CloudFramework.Repository.Transaction +{ + using Redpoint.CloudFramework.Repository.Metrics; + using System.Threading.Tasks; + + public static class NestedTransactionExtensions + { + public static async Task BeginPotentiallyNestedTransactionAsync( + this IGlobalRepository globalRepository, + string @namespace, + IModelTransaction? existingTransaction, + RepositoryOperationMetrics? metrics = null, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(globalRepository); + + if (existingTransaction == null) + { + return await globalRepository.BeginTransactionAsync(@namespace, metrics: metrics, cancellationToken: cancellationToken).ConfigureAwait(false); + } + else if (existingTransaction.Namespace == @namespace) + { + return new NestedModelTransaction(existingTransaction); + } + else + { + throw new InvalidOperationException("Cross-namespace nested transaction attempted!"); + } + } + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Transaction/IModelTransaction.cs b/UET/Redpoint.CloudFramework/Repository/Transaction/IModelTransaction.cs new file mode 100644 index 00000000..89e08e11 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Transaction/IModelTransaction.cs @@ -0,0 +1,53 @@ +namespace Redpoint.CloudFramework.Repository.Transaction +{ + using Google.Cloud.Datastore.V1; + using Redpoint.CloudFramework.Models; + using System; + using System.Collections.Generic; + using System.Threading; + using System.Threading.Tasks; + + public interface IModelTransaction : IAsyncDisposable + { + /// + /// The namespace the transaction is occurring in. + /// + string Namespace { get; } + + /// + /// The transaction itself. + /// + internal DatastoreTransaction Transaction { get; } + + /// + /// A list of models that have been modified by this transaction. + /// + IReadOnlyList ModifiedModels { get; } + internal List ModifiedModelsList { get; } + + /// + /// A list of queued operations to be performed immediately before + /// Commit() is called for Datastore. + /// + IReadOnlyList> QueuedPreCommitOperations { get; } + internal List> QueuedPreCommitOperationsList { get; } + + /// + /// If true, this transaction has been committed. + /// + bool HasCommitted { get; internal set; } + + /// + /// If true, this transaction has been rolled back explicitly. + /// + bool HasRolledBack { get; internal set; } + + /// + /// If true, this transaction is a nested transaction and you can't call + /// + /// or + /// on it directly. + /// + bool IsNestedTransaction { get; } + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Transaction/NestedModelTransaction.cs b/UET/Redpoint.CloudFramework/Repository/Transaction/NestedModelTransaction.cs new file mode 100644 index 00000000..0ef3881a --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Transaction/NestedModelTransaction.cs @@ -0,0 +1,49 @@ +namespace Redpoint.CloudFramework.Repository.Transaction +{ + using Google.Cloud.Datastore.V1; + using Redpoint.CloudFramework.Models; + using System; + using System.Collections.Generic; + using System.Threading.Tasks; + + internal sealed class NestedModelTransaction : IModelTransaction + { + private readonly IModelTransaction _transaction; + + internal NestedModelTransaction( + IModelTransaction transaction) + { + _transaction = transaction; + } + + public string Namespace => _transaction.Namespace; + + public DatastoreTransaction Transaction => _transaction.Transaction; + + public IReadOnlyList ModifiedModels => _transaction.ModifiedModels; + public List ModifiedModelsList => _transaction.ModifiedModelsList; + + public IReadOnlyList> QueuedPreCommitOperations => _transaction.QueuedPreCommitOperations; + public List> QueuedPreCommitOperationsList => _transaction.QueuedPreCommitOperationsList; + + public bool HasCommitted + { + get => _transaction.HasCommitted; + set => _transaction.HasCommitted = value; + } + + public bool HasRolledBack + { + get => _transaction.HasRolledBack; + set => _transaction.HasRolledBack = value; + } + + public bool IsNestedTransaction => true; + + public ValueTask DisposeAsync() + { + // Nested transactions do not auto-rollback on disposal. + return ValueTask.CompletedTask; + } + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Transaction/TopLevelModelTransaction.cs b/UET/Redpoint.CloudFramework/Repository/Transaction/TopLevelModelTransaction.cs new file mode 100644 index 00000000..cbe7a8c4 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Transaction/TopLevelModelTransaction.cs @@ -0,0 +1,54 @@ +namespace Redpoint.CloudFramework.Repository.Transaction +{ + using Google.Cloud.Datastore.V1; + using Redpoint.CloudFramework.Models; + using Redpoint.CloudFramework.Repository.Layers; + using System; + using System.Collections.Generic; + using System.Threading; + using System.Threading.Tasks; + + internal sealed class TopLevelModelTransaction : IModelTransaction + { + private readonly IDatastoreRepositoryLayer _datastoreRepositoryLayer; + + internal TopLevelModelTransaction( + string @namespace, + DatastoreTransaction transaction, + IDatastoreRepositoryLayer datastoreRepositoryLayer) + { + _datastoreRepositoryLayer = datastoreRepositoryLayer; + + Namespace = @namespace; + Transaction = transaction; + ModifiedModelsList = new List(); + QueuedPreCommitOperationsList = new List>(); + HasCommitted = false; + HasRolledBack = false; + } + + public string Namespace { get; } + + public DatastoreTransaction Transaction { get; } + + public IReadOnlyList ModifiedModels => ModifiedModelsList; + public List ModifiedModelsList { get; } + + public IReadOnlyList> QueuedPreCommitOperations => QueuedPreCommitOperationsList; + public List> QueuedPreCommitOperationsList { get; } + + public bool HasCommitted { get; set; } + + public bool HasRolledBack { get; set; } + + public bool IsNestedTransaction => false; + + public async ValueTask DisposeAsync() + { + if (!HasCommitted && !HasRolledBack) + { + await _datastoreRepositoryLayer.RollbackAsync(Namespace, this, null, CancellationToken.None).ConfigureAwait(false); + } + } + } +} diff --git a/UET/Redpoint.CloudFramework/Repository/Transaction/TransactionMode.cs b/UET/Redpoint.CloudFramework/Repository/Transaction/TransactionMode.cs new file mode 100644 index 00000000..971c1c1d --- /dev/null +++ b/UET/Redpoint.CloudFramework/Repository/Transaction/TransactionMode.cs @@ -0,0 +1,20 @@ +namespace Redpoint.CloudFramework.Repository.Transaction +{ + /// + /// The type of transaction being performed. The default is read-write, but you can + /// specify for improved performance. + /// + public enum TransactionMode + { + /// + /// This transaction modifies the repository. + /// + ReadWrite, + + /// + /// The transaction only reads data from the repository, but needs a consistent view across + /// multiple reads or queries. + /// + ReadOnly, + } +} diff --git a/UET/Redpoint.CloudFramework/ServiceCollectionExtensions.cs b/UET/Redpoint.CloudFramework/ServiceCollectionExtensions.cs new file mode 100644 index 00000000..72ea72bd --- /dev/null +++ b/UET/Redpoint.CloudFramework/ServiceCollectionExtensions.cs @@ -0,0 +1,22 @@ +namespace Redpoint.CloudFramework +{ + using Microsoft.Extensions.DependencyInjection; + using Redpoint.CloudFramework.Prefix; + using System.Diagnostics.CodeAnalysis; + + /// + /// Provides additional service registration methods for . + /// + public static class ServiceCollectionExtensions + { + /// + /// Adds the specified prefix provider to the service collection. + /// + /// The prefix provider implementation. + /// The service collection to register it with. + public static void AddPrefixProvider<[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicConstructors)] T>(this IServiceCollection services) where T : class, IPrefixProvider + { + services.AddSingleton(); + } + } +} diff --git a/UET/Redpoint.CloudFramework/Session/ConfigureCookieOptions.cs b/UET/Redpoint.CloudFramework/Session/ConfigureCookieOptions.cs new file mode 100644 index 00000000..7e4e20c3 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Session/ConfigureCookieOptions.cs @@ -0,0 +1,28 @@ +namespace Redpoint.CloudFramework.Session +{ + using Microsoft.AspNetCore.Authentication.Cookies; + using Microsoft.Extensions.DependencyInjection; + using Microsoft.Extensions.Options; + + public class ConfigureCookieOptions : IPostConfigureOptions + { + private readonly IServiceScopeFactory _serviceScopeFactory; + + public ConfigureCookieOptions(IServiceScopeFactory serviceScopeFactory) + { + _serviceScopeFactory = serviceScopeFactory; + } + + public void PostConfigure(string? name, CookieAuthenticationOptions options) + { + ArgumentNullException.ThrowIfNull(options); + + using (var scope = _serviceScopeFactory.CreateScope()) + { + var provider = scope.ServiceProvider; + + options.SessionStore = provider.GetRequiredService(); + } + } + } +} diff --git a/UET/Redpoint.CloudFramework/Session/RedisTicketStore.cs b/UET/Redpoint.CloudFramework/Session/RedisTicketStore.cs new file mode 100644 index 00000000..72d3e0d5 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Session/RedisTicketStore.cs @@ -0,0 +1,83 @@ +namespace Redpoint.CloudFramework.Session +{ + using Microsoft.AspNetCore.Authentication; + using Microsoft.AspNetCore.Authentication.Cookies; + using Microsoft.Extensions.Caching.Distributed; + using System; + using System.Security.Claims; + using System.Threading.Tasks; + + public class RedisTicketStore : ITicketStore + { + private const string _keyPrefix = "TKT:"; + + private readonly IDistributedCache _cache; + + public RedisTicketStore(IDistributedCache cache) + { + _cache = cache; + } + + public async Task StoreAsync(AuthenticationTicket ticket) + { + ArgumentNullException.ThrowIfNull(ticket); + + var key = _keyPrefix + ticket.AuthenticationScheme + ":"; + var sub = ticket.Principal.FindFirstValue("sub"); + if (sub != null) + { + key += sub + ":" + Guid.NewGuid(); + } + else + { + var name = ticket.Principal.FindFirstValue(ClaimTypes.NameIdentifier); + if (name != null) + { + key += name + ":" + Guid.NewGuid(); + } + else + { + key += Guid.NewGuid(); + } + } + await RenewAsync(key, ticket).ConfigureAwait(false); + return key; + } + + public Task RenewAsync(string key, AuthenticationTicket ticket) + { + ArgumentNullException.ThrowIfNull(ticket); + + var options = new DistributedCacheEntryOptions(); + var expiresUtc = ticket.Properties.ExpiresUtc; + if (expiresUtc.HasValue) + { + options.SetAbsoluteExpiration(expiresUtc.Value); + } + byte[] val = SerializeToBytes(ticket); + _cache.Set(key, val, options); + return Task.FromResult(0); + } + + public Task RetrieveAsync(string key) + { + return Task.FromResult(DeserializeFromBytes(_cache.Get(key))); + } + + public Task RemoveAsync(string key) + { + _cache.Remove(key); + return Task.FromResult(0); + } + + private static byte[] SerializeToBytes(AuthenticationTicket source) + { + return TicketSerializer.Default.Serialize(source); + } + + private static AuthenticationTicket? DeserializeFromBytes(byte[]? source) + { + return source == null ? null : TicketSerializer.Default.Deserialize(source); + } + } +} diff --git a/UET/Redpoint.CloudFramework/SingleCurrentTenantService.cs b/UET/Redpoint.CloudFramework/SingleCurrentTenantService.cs new file mode 100644 index 00000000..e4ddff1b --- /dev/null +++ b/UET/Redpoint.CloudFramework/SingleCurrentTenantService.cs @@ -0,0 +1,18 @@ +namespace Redpoint.CloudFramework +{ + using Google.Cloud.Datastore.V1; + using System.Threading.Tasks; + + internal class SingleCurrentTenantService : ICurrentTenantService + { + public Task GetTenant() + { + return Task.FromResult(null); + } + + public Task GetTenantDatastoreKeyFromNamespace(string @namespace) + { + return Task.FromResult(null); + } + } +} diff --git a/UET/Redpoint.CloudFramework/Startup/BaseConfigurator.cs b/UET/Redpoint.CloudFramework/Startup/BaseConfigurator.cs new file mode 100644 index 00000000..e7ebb6bf --- /dev/null +++ b/UET/Redpoint.CloudFramework/Startup/BaseConfigurator.cs @@ -0,0 +1,304 @@ +extern alias RDCommandLine; + +namespace Redpoint.CloudFramework.Startup +{ + using Google.Cloud.Datastore.V1; + using Microsoft.Extensions.Configuration; + using Microsoft.Extensions.DependencyInjection; + using Microsoft.Extensions.DependencyInjection.Extensions; + using Microsoft.Extensions.Hosting; + using Microsoft.Extensions.Logging; + using Microsoft.Extensions.Options; + using Quartz; + using RDCommandLine::Microsoft.Extensions.Logging.Console; + using Redpoint.CloudFramework.BigQuery; + using Redpoint.CloudFramework.Configuration; + using Redpoint.CloudFramework.Counter; + using Redpoint.CloudFramework.Event; + using Redpoint.CloudFramework.Event.PubSub; + using Redpoint.CloudFramework.GoogleInfrastructure; + using Redpoint.CloudFramework.Infrastructure; + using Redpoint.CloudFramework.Locking; + using Redpoint.CloudFramework.Metric; + using Redpoint.CloudFramework.Prefix; + using Redpoint.CloudFramework.Processor; + using Redpoint.CloudFramework.Repository; + using Redpoint.CloudFramework.Repository.Contention; + using Redpoint.CloudFramework.Repository.Converters.Expression; + using Redpoint.CloudFramework.Repository.Converters.Model; + using Redpoint.CloudFramework.Repository.Converters.Timestamp; + using Redpoint.CloudFramework.Repository.Converters.Value; + using Redpoint.CloudFramework.Repository.Datastore; + using Redpoint.CloudFramework.Repository.Hooks; + using Redpoint.CloudFramework.Repository.Layers; + using Redpoint.CloudFramework.Repository.Migration; + using Redpoint.CloudFramework.Storage; + using Redpoint.CloudFramework.Tracing; + using Redpoint.Logging.SingleLine; + using System; + using System.Diagnostics.CodeAnalysis; + using System.IO; + using System.Linq; + + internal class BaseConfigurator : IBaseConfigurator + { + [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicConstructors)] + internal Type? _prefixProvider = null; + [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicConstructors)] + internal Type _currentTenantService = typeof(SingleCurrentTenantService); + internal GoogleCloudUsageFlag _googleCloudUsage = GoogleCloudUsageFlag.Default; + internal bool _requireGoogleCloudSecretManagerLoad = false; + internal bool _isInteractiveCLIApp = false; + internal Action? _customConfigLayers = null; + + public TBase UsePrefixProvider<[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicConstructors)] T>() where T : IPrefixProvider + { + _prefixProvider = typeof(T); + return (TBase)(object)this; + } + + public TBase UseMultiTenant<[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicConstructors)] T>() where T : ICurrentTenantService + { + _currentTenantService = typeof(T); + return (TBase)(object)this; + } + + public TBase UseGoogleCloud(GoogleCloudUsageFlag usageFlag) + { + _googleCloudUsage = usageFlag; + return (TBase)(object)this; + } + + public TBase UseCustomConfigLayers(Action customConfigLayers) + { + _customConfigLayers = customConfigLayers; + return (TBase)(object)this; + } + + public TBase RequireGoogleCloudSecretManagerConfiguration() + { + _requireGoogleCloudSecretManagerLoad = true; + return (TBase)(object)this; + } + + protected void ValidateConfiguration() + { + if (_requireGoogleCloudSecretManagerLoad) + { + // Use of RequireGoogleCloudSecretManagerConfiguration implies Secret Manager service. + _googleCloudUsage |= GoogleCloudUsageFlag.SecretManager; + } + } + + protected virtual void ConfigureAppConfiguration(IHostEnvironment env, IConfigurationBuilder config) + { + config.Sources.Clear(); + + if (_isInteractiveCLIApp) + { + config.AddJsonFile($"appsettings.CLI.json", optional: false, reloadOnChange: true); + } + else + { + config.AddJsonFile("appsettings.json", optional: true, reloadOnChange: !env.IsProduction()); + if (env.IsDevelopment() || env.IsStaging()) + { + config.AddJsonFile("appsettings.DevelopmentStaging.json", optional: true, reloadOnChange: true); + } + config.AddJsonFile($"appsettings.{env.EnvironmentName}.json", + optional: true, reloadOnChange: !env.IsProduction()); + } + + var configPath = Environment.GetEnvironmentVariable("CLOUD_FRAMEWORK_CONFIG_PATH"); + if (!string.IsNullOrEmpty(configPath) && File.Exists(configPath)) + { + config.AddJsonFile(configPath, optional: false, reloadOnChange: false); + } + + if (!_isInteractiveCLIApp && + (_googleCloudUsage & GoogleCloudUsageFlag.SecretManager) != 0) + { + // Construct our service provider and configuration source regardless + // of whether we are in production to ensure that dependencies are satisifed. + var minimalServices = new ServiceCollection(); + minimalServices.AddSingleton(env); + AddDefaultLogging(env, minimalServices); + minimalServices.AddSingleton(); + minimalServices.AddSingleton(); + minimalServices.AddSingleton(); + minimalServices.AddSecretManagerConfiguration(_requireGoogleCloudSecretManagerLoad); + + // @note: This service provider *MUST NOT* be disposed, as instances continue to use it + // throughout the lifetime of the application, not just during configuration setup. + var minimalServiceProvider = minimalServices.BuildServiceProvider(); + var minimalLogging = minimalServiceProvider.GetRequiredService>>(); + foreach (var configurationSource in minimalServiceProvider.GetServices()) + { + if (env.IsProduction()) + { + minimalLogging.LogInformation($"Adding '{configurationSource.GetType().FullName}' configuration source to configuration as this instance is running in production..."); + config.Add(configurationSource); + } + else + { + minimalLogging.LogInformation($"Not adding '{configurationSource.GetType().FullName}' configuration source to configuration as this instance is not running in production."); + } + } + } + + if (_customConfigLayers != null) + { + _customConfigLayers(env, config); + } + + config.AddEnvironmentVariables(); + } + + private static void AddDefaultLogging(IHostEnvironment hostEnvironment, IServiceCollection services) + { + services.AddLogging(builder => + { + builder.ClearProviders(); + builder.SetMinimumLevel(LogLevel.Information); + builder.AddSingleLineConsoleFormatter(options => + { + options.OmitLogPrefix = false; + options.ColorBehavior = hostEnvironment.IsProduction() ? LoggerColorBehavior.Disabled : LoggerColorBehavior.Default; + }); + builder.AddSingleLineConsole(); + }); + } + + protected virtual void PreStartupConfigureServices(IHostEnvironment hostEnvironment, IServiceCollection services) + { + if (!_isInteractiveCLIApp) + { + // Add default logging configuration. + AddDefaultLogging(hostEnvironment, services); + } + + // Add the core stuff that every application needs. + if (_prefixProvider != null) + { + services.AddSingleton(typeof(IPrefixProvider), _prefixProvider); + } + services.AddSingleton(typeof(ICurrentTenantService), _currentTenantService); + services.AddSingleton(sp => sp.GetServices().ToArray()); + + if (_googleCloudUsage != GoogleCloudUsageFlag.None) + { + // Add global environment configuration. + services.AddSingleton(); + services.AddSingleton(); + } + + // Add the cache services. + services.AddMemoryCache(); + services.AddDistributedRedpointCache(hostEnvironment); + + // Add file storage. + if (hostEnvironment.IsDevelopment() || hostEnvironment.IsStaging()) + { + services.AddSingleton(); + } + else + { + services.AddSingleton(); + } + } + + protected virtual void PostStartupConfigureServices(IServiceCollection services) + { + // Add the global services provided by Cloud Framework. + + if ((_googleCloudUsage & GoogleCloudUsageFlag.Datastore) != 0) + { + services.AddSingleton(); + services.AddSingleton(svc => svc.GetServices().ToArray()); + services.AddSingleton, JsonModelConverter>(); + services.AddSingleton, EntityModelConverter>(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + if (!_isInteractiveCLIApp) + { + services.AddHostedService(); + services.AddTransient(sp => sp.GetServices().ToArray()); + } + services.AddSingleton(); + services.AddSingleton(); + } + if ((_googleCloudUsage & GoogleCloudUsageFlag.BigQuery) != 0) + { + services.AddSingleton(); + } + if ((_googleCloudUsage & GoogleCloudUsageFlag.PubSub) != 0) + { + services.AddSingleton(); + } + else + { + services.AddSingleton(); + } +#pragma warning disable CS0618 + if ((_googleCloudUsage & GoogleCloudUsageFlag.Metrics) != 0) +#pragma warning restore CS0618 + { + services.AddSingleton(); + } + else + { + services.AddSingleton(); + } + if (!_isInteractiveCLIApp && (_googleCloudUsage & GoogleCloudUsageFlag.SecretManager) != 0) + { + services.AddSecretManagerRuntime(); + } + + services.AddSingleton(); + + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + + if (services.Any(x => x.ServiceType == typeof(IQuartzScheduledProcessorBinding))) + { + services.TryAddEnumerable(new[] + { + ServiceDescriptor.Singleton, QuartzCloudFrameworkPostConfigureOptions>() + }); + services.AddQuartz(options => + { + options.UseSimpleTypeLoader(); + // @todo: In future we should support clustering, but for now we do not. + options.UseInMemoryStore(); + }); + services.AddQuartzHostedService(options => options.WaitForJobsToComplete = true); + } + } + } +} diff --git a/UET/Redpoint.CloudFramework/Startup/BoundHelmConfiguration.cs b/UET/Redpoint.CloudFramework/Startup/BoundHelmConfiguration.cs new file mode 100644 index 00000000..b7a65ca6 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Startup/BoundHelmConfiguration.cs @@ -0,0 +1,17 @@ +namespace Redpoint.CloudFramework.Startup +{ + public class BoundHelmConfiguration : IOptionalHelmConfiguration + { + private readonly HelmConfiguration _config; + + public BoundHelmConfiguration(HelmConfiguration config) + { + _config = config; + } + + public HelmConfiguration GetHelmConfig() + { + return _config; + } + } +} diff --git a/UET/Redpoint.CloudFramework/Startup/ConnectionMultiplexerProxy.cs b/UET/Redpoint.CloudFramework/Startup/ConnectionMultiplexerProxy.cs new file mode 100644 index 00000000..629448f4 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Startup/ConnectionMultiplexerProxy.cs @@ -0,0 +1,392 @@ +namespace Redpoint.CloudFramework.Startup +{ + using Microsoft.Extensions.Logging; + using StackExchange.Redis; + using StackExchange.Redis.Maintenance; + using StackExchange.Redis.Profiling; + using System; + using System.IO; + using System.Linq; + using System.Net; + using System.Threading; + using System.Threading.Tasks; + +#pragma warning disable CS8766 + internal class ConnectionMultiplexerProxy : IConnectionMultiplexer +#pragma warning restore CS8766 + { + private Lazy _connectionMultiplexerLazy; + + public ConnectionMultiplexerProxy(string redisServer, ILogger logger) + { + _connectionMultiplexerLazy = new Lazy(() => ConnectToRedis(redisServer, logger)!); + } + + private ConnectionMultiplexer ConnectionMultiplexer + { + get + { + return _connectionMultiplexerLazy.Value; + } + } + + internal static string? GetRedisConnectionString(string redisServer) + { + if (string.IsNullOrWhiteSpace(redisServer)) + { + return null; + } + + var serverName = redisServer; + var portSuffix = string.Empty; + if (serverName.Contains(':', StringComparison.InvariantCultureIgnoreCase)) + { + var s = serverName.Split(_hostPortSeparator, 2); + serverName = s[0]; + portSuffix = ":" + s[1]; + } + + string redisConnect; + if (IPAddress.TryParse(serverName, out var address)) + { + // Server component is a direct network address; use + // original Redis server string as-is. + redisConnect = redisServer; + } + else + { + // Lookup based on DNS. + var dnsTask = Dns.GetHostAddressesAsync(serverName); + var addresses = new List(); + foreach (var dnsEntry in dnsTask.Result) + { + if (dnsEntry.AddressFamily == System.Net.Sockets.AddressFamily.InterNetwork) + { + addresses.Add(dnsEntry.MapToIPv4().ToString() + portSuffix); + } + else if (dnsEntry.AddressFamily == System.Net.Sockets.AddressFamily.InterNetworkV6) + { + addresses.Add($"[{dnsEntry.MapToIPv6().ToString()}]{portSuffix}"); + } + } + redisConnect = string.Join(",", addresses); + } + + return redisConnect + ",allowAdmin=true"; + } + + private static ConnectionMultiplexer? ConnectToRedis( + string redisServer, + ILogger logger) + { + var redisConnect = GetRedisConnectionString(redisServer); + + if (redisConnect == null) + { + return null; + } + + ConnectionMultiplexer? connectionMultiplexer = null; + var now = DateTime.Now; + for (var i = 0; i < 30; i++) + { + try + { + logger?.LogInformation($"Attempting to connect to Redis {redisConnect} (attempt #{i + 1})..."); + connectionMultiplexer = ConnectionMultiplexer.Connect(redisConnect); + break; + } + catch (RedisConnectionException) + { + logger?.LogWarning($"Failed to connect to Redis (attempt #{i + 1}), waiting {i} seconds before trying again..."); + Thread.Sleep(i * 1000); + continue; + } + } + + if (connectionMultiplexer == null) + { + logger?.LogCritical($"Unable to connect to Redis after 30 attempts and {Math.Round((DateTime.Now - now).TotalMinutes, 0)} minutes... something is drastically wrong!"); + throw new InvalidOperationException("Unable to connect to Redis!"); + } + + return connectionMultiplexer; + } + + #region Proxied Methods + + public string ClientName => ConnectionMultiplexer.ClientName; + + public string Configuration => ConnectionMultiplexer.Configuration; + + public int TimeoutMilliseconds => ConnectionMultiplexer.TimeoutMilliseconds; + + public long OperationCount => ConnectionMultiplexer.OperationCount; + +#pragma warning disable CS0618 + public bool PreserveAsyncOrder { get => ConnectionMultiplexer.PreserveAsyncOrder; set => ConnectionMultiplexer.PreserveAsyncOrder = value; } +#pragma warning restore CS0618 + + public bool IsConnected => ConnectionMultiplexer.IsConnected; + + public bool IsConnecting => ConnectionMultiplexer.IsConnecting; + + [Obsolete] + public bool IncludeDetailInExceptions { get => ConnectionMultiplexer.IncludeDetailInExceptions; set => ConnectionMultiplexer.IncludeDetailInExceptions = value; } + + public int StormLogThreshold { get => ConnectionMultiplexer.StormLogThreshold; set => ConnectionMultiplexer.StormLogThreshold = value; } + + internal static readonly char[] _hostPortSeparator = new[] { ':' }; + + public event EventHandler ErrorMessage + { + add + { + ConnectionMultiplexer.ErrorMessage += value; + } + + remove + { + ConnectionMultiplexer.ErrorMessage -= value; + } + } + + public event EventHandler ConnectionFailed + { + add + { + ConnectionMultiplexer.ConnectionFailed += value; + } + + remove + { + ConnectionMultiplexer.ConnectionFailed -= value; + } + } + + public event EventHandler InternalError + { + add + { + ConnectionMultiplexer.InternalError += value; + } + + remove + { + ConnectionMultiplexer.InternalError -= value; + } + } + + public event EventHandler ConnectionRestored + { + add + { + ConnectionMultiplexer.ConnectionRestored += value; + } + + remove + { + ConnectionMultiplexer.ConnectionRestored -= value; + } + } + + public event EventHandler ConfigurationChanged + { + add + { + ConnectionMultiplexer.ConfigurationChanged += value; + } + + remove + { + ConnectionMultiplexer.ConfigurationChanged -= value; + } + } + + public event EventHandler ConfigurationChangedBroadcast + { + add + { + ConnectionMultiplexer.ConfigurationChangedBroadcast += value; + } + + remove + { + ConnectionMultiplexer.ConfigurationChangedBroadcast -= value; + } + } + + public event EventHandler HashSlotMoved + { + add + { + ConnectionMultiplexer.HashSlotMoved += value; + } + + remove + { + ConnectionMultiplexer.HashSlotMoved -= value; + } + } + + public event EventHandler ServerMaintenanceEvent + { + add + { + ConnectionMultiplexer.ServerMaintenanceEvent += value; + } + + remove + { + ConnectionMultiplexer.ServerMaintenanceEvent -= value; + } + } + + public void RegisterProfiler(Func profilingSessionProvider) + { + ConnectionMultiplexer.RegisterProfiler(profilingSessionProvider); + } + + public ServerCounters GetCounters() + { + return ConnectionMultiplexer.GetCounters(); + } + + public EndPoint[] GetEndPoints(bool configuredOnly = false) + { + return ConnectionMultiplexer.GetEndPoints(configuredOnly); + } + + public void Wait(Task task) + { + ConnectionMultiplexer.Wait(task); + } + + public T Wait(Task task) + { + return ((IConnectionMultiplexer)ConnectionMultiplexer).Wait(task); + } + + public void WaitAll(params Task[] tasks) + { + ConnectionMultiplexer.WaitAll(tasks); + } + + public int HashSlot(RedisKey key) + { + return ConnectionMultiplexer.HashSlot(key); + } + + public ISubscriber GetSubscriber(object? asyncState = null) + { + return ConnectionMultiplexer.GetSubscriber(asyncState); + } + + public IDatabase GetDatabase(int db = -1, object? asyncState = null) + { + return ConnectionMultiplexer.GetDatabase(db, asyncState); + } + + public IServer GetServer(string host, int port, object? asyncState = null) + { + return ConnectionMultiplexer.GetServer(host, port, asyncState); + } + + public IServer GetServer(string hostAndPort, object? asyncState = null) + { + return ConnectionMultiplexer.GetServer(hostAndPort, asyncState); + } + + public IServer GetServer(IPAddress host, int port) + { + return ConnectionMultiplexer.GetServer(host, port); + } + + public IServer GetServer(EndPoint endpoint, object? asyncState = null) + { + return ConnectionMultiplexer.GetServer(endpoint, asyncState); + } + + public Task ConfigureAsync(TextWriter? log = null) + { + return ConnectionMultiplexer.ConfigureAsync(log); + } + + public bool Configure(TextWriter? log = null) + { + return ConnectionMultiplexer.Configure(log); + } + + public string GetStatus() + { + return ConnectionMultiplexer.GetStatus(); + } + + public void GetStatus(TextWriter log) + { + ConnectionMultiplexer.GetStatus(log); + } + + public void Close(bool allowCommandsToComplete = true) + { + ConnectionMultiplexer.Close(allowCommandsToComplete); + } + + public Task CloseAsync(bool allowCommandsToComplete = true) + { + return ConnectionMultiplexer.CloseAsync(allowCommandsToComplete); + } + + public string? GetStormLog() + { + return ConnectionMultiplexer.GetStormLog(); + } + + public void ResetStormLog() + { + ConnectionMultiplexer.ResetStormLog(); + } + + public long PublishReconfigure(CommandFlags flags = CommandFlags.None) + { + return ConnectionMultiplexer.PublishReconfigure(flags); + } + + public Task PublishReconfigureAsync(CommandFlags flags = CommandFlags.None) + { + return ConnectionMultiplexer.PublishReconfigureAsync(flags); + } + + public int GetHashSlot(RedisKey key) + { + return ConnectionMultiplexer.GetHashSlot(key); + } + + public void ExportConfiguration(Stream destination, ExportOptions options = (ExportOptions)(-1)) + { + ConnectionMultiplexer.ExportConfiguration(destination, options); + } + + public void Dispose() + { + ConnectionMultiplexer.Dispose(); + } + + public IServer[] GetServers() + { + return ConnectionMultiplexer.GetServers(); + } + + public void AddLibraryNameSuffix(string suffix) + { + ConnectionMultiplexer.AddLibraryNameSuffix(suffix); + } + + public ValueTask DisposeAsync() + { + return ConnectionMultiplexer.DisposeAsync(); + } + + #endregion + } +} diff --git a/UET/Redpoint.CloudFramework/Startup/DefaultServiceAppConfigurator.cs b/UET/Redpoint.CloudFramework/Startup/DefaultServiceAppConfigurator.cs new file mode 100644 index 00000000..c746d314 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Startup/DefaultServiceAppConfigurator.cs @@ -0,0 +1,171 @@ +namespace Redpoint.CloudFramework.Startup +{ + using Microsoft.Extensions.Configuration; + using Microsoft.Extensions.DependencyInjection; + using Microsoft.Extensions.Hosting; + using Redpoint.CloudFramework.Processor; + using System; + using System.Collections.Generic; + using System.Threading.Tasks; + using System.Linq; + using Microsoft.Extensions.Logging; + using Redpoint.CloudFramework.Tracing; + using System.Diagnostics.CodeAnalysis; + using Quartz; + + internal class DefaultServiceAppConfigurator : BaseConfigurator, IServiceAppConfigurator + { + private readonly Dictionary> _processors = new Dictionary>(); + private Func? _dockerFactory; + private Action? _serviceConfiguration; + private Func? _helmConfig; + private string[] _defaultRoleNames = Array.Empty(); + + public IServiceAppConfigurator AddProcessor<[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicConstructors)] T>() where T : class, IContinuousProcessor + { + _processors[T.RoleName] = (services) => + { + services.AddTransient(); + services.AddHostedService>(); + }; + return this; + } + + public IServiceAppConfigurator AddProcessor<[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicConstructors)] T>(Action triggerBuilder) where T : class, IScheduledProcessor + { + _processors[T.RoleName] = (services) => + { + services.AddTransient(); + services.AddTransient(_ => + { + return new QuartzScheduledProcessorBinding(T.RoleName, triggerBuilder); + }); + }; + return this; + } + + public IServiceAppConfigurator UseDevelopmentDockerContainers(Func factory) + { + _dockerFactory = factory; + return this; + } + + public IServiceAppConfigurator UseHelm(Func helmConfig) + { + _helmConfig = helmConfig; + return this; + } + + public IServiceAppConfigurator UseDefaultRoles(params string[] roleNames) + { + _defaultRoleNames = roleNames; + return this; + } + + [RequiresDynamicCode("This internally uses HostBuilder, which requires dynamic code.")] + public async Task StartServiceApp(string[] args) + { + ValidateConfiguration(); + if (args.Contains("--help")) + { + Console.WriteLine("Specify one or more of the following roles on the command-line, or pass --all-roles:"); + foreach (var processor in _processors) + { + Console.WriteLine(" " + processor.Key); + } + return 2; + } + + if (string.IsNullOrWhiteSpace(Environment.GetEnvironmentVariable("ASPNETCORE_ENVIRONMENT"))) + { + throw new InvalidOperationException("ASPNETCORE_ENVIRONMENT must set, even for service applications."); + } + + var selectedRoleNames = new HashSet(); + foreach (var processor in _processors) + { + if (args.Contains("--all-roles") || + args.Contains(processor.Key)) + { + selectedRoleNames.Add(processor.Key); + } + } + if (selectedRoleNames.Count == 0) + { + foreach (var defaultRoleName in _defaultRoleNames) + { + if (_processors.ContainsKey(defaultRoleName)) + { + selectedRoleNames.Add(defaultRoleName); + } + } + } + if (selectedRoleNames.Count == 0) + { + throw new InvalidOperationException("No processors were enabled. Use --all-roles or list the roles to run on the command-line."); + } + + var build = new HostBuilder() + .UseEnvironment(Environment.GetEnvironmentVariable("ASPNETCORE_ENVIRONMENT")!) + .ConfigureAppConfiguration((hostingContext, config) => + { + ConfigureAppConfiguration(hostingContext.HostingEnvironment, config); + }) + .ConfigureServices((context, services) => + { + var configurationBuilder = new ConfigurationBuilder() + .SetBasePath(context.HostingEnvironment.ContentRootPath); + ConfigureAppConfiguration(context.HostingEnvironment, configurationBuilder); + var configuration = configurationBuilder.Build(); + // Register IConfiguration for web host. + services.AddSingleton(configuration); + // Replace the builder's IConfiguration for the rest of ConfigureServices and Startup. + context.Configuration = configuration; + }) + .ConfigureServices((context, services) => + { + if (_helmConfig == null) + { + // Add the lifetime service that will set up the development environment if necessary. + services.AddSingleton(sp => + { + return new DevelopmentStartup( + sp.GetRequiredService(), + sp.GetRequiredService>(), + _googleCloudUsage, + sp.GetRequiredService(), + _dockerFactory); + }); + } + else if (context.HostingEnvironment.IsDevelopment()) + { + var helmConfig = _helmConfig(context.Configuration, context.HostingEnvironment.ContentRootPath); + services.AddSingleton(new BoundHelmConfiguration(helmConfig)); + Environment.SetEnvironmentVariable("REDIS_SERVER", "localhost:" + helmConfig.RedisPort); + } + }) + .ConfigureServices((context, services) => + { + services.AddSingleton(); + + this.PreStartupConfigureServices(context.HostingEnvironment, services); + this._serviceConfiguration?.Invoke(services); + this.PostStartupConfigureServices(services); + + foreach (var roleName in selectedRoleNames) + { + _processors[roleName](services); + } + }) + .Build(); + await build.RunAsync().ConfigureAwait(false); + return 0; + } + + public IServiceAppConfigurator UseServiceConfiguration(Action configureServices) + { + _serviceConfiguration = configureServices; + return this; + } + } +} diff --git a/UET/Redpoint.CloudFramework/Startup/DefaultWebAppConfigurator.cs b/UET/Redpoint.CloudFramework/Startup/DefaultWebAppConfigurator.cs new file mode 100644 index 00000000..cf7f3983 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Startup/DefaultWebAppConfigurator.cs @@ -0,0 +1,308 @@ +namespace Redpoint.CloudFramework.Startup +{ + using Counter; + using Microsoft.AspNetCore.Builder; + using Microsoft.AspNetCore.DataProtection; + using Microsoft.AspNetCore.Hosting; + using Microsoft.AspNetCore.Http; + using Microsoft.AspNetCore.Mvc.Infrastructure; + using Microsoft.Extensions.Configuration; + using Microsoft.Extensions.DependencyInjection; + using Microsoft.Extensions.Hosting; + using Microsoft.Extensions.Logging; + using Quartz; + using Redpoint.CloudFramework.DataProtection; + using Redpoint.CloudFramework.Locking; + using Redpoint.CloudFramework.Prefix; + using Redpoint.CloudFramework.Processor; + using Redpoint.CloudFramework.Repository; + using Redpoint.CloudFramework.Repository.Datastore; + using Redpoint.CloudFramework.Tracing; + using System; + using System.Collections.Generic; + using System.Diagnostics.CodeAnalysis; + using System.IO; + using System.Linq; + using System.Net.Http; + using System.Threading.Tasks; + +#pragma warning disable CS0612 + internal class DefaultWebAppConfigurator : BaseConfigurator, IWebAppConfigurator, IStartupConfigureServicesFilter +#pragma warning restore CS0612 + { + [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicConstructors | DynamicallyAccessedMemberTypes.PublicMethods)] + private Type? _startupType; + private WebHostBuilderContext? _context; + private double _tracingRate = 0.0; + private Func? _dockerFactory; + private Func? _helmConfig; + private string[] _prefixes = Array.Empty(); + private readonly Dictionary> _processors = new Dictionary>(); + + public IWebAppConfigurator UseStartup<[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicConstructors | DynamicallyAccessedMemberTypes.PublicMethods)] T>() + { + _startupType = typeof(T); + return this; + } + + public IWebAppConfigurator AddDevelopmentProcessor<[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicConstructors)] T>() where T : class, IContinuousProcessor + { + _processors[T.RoleName] = (services) => + { + services.AddTransient(); + services.AddHostedService>(); + }; + return this; + } + + public IWebAppConfigurator AddDevelopmentProcessor<[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicConstructors)] T>(Action triggerBuilder) where T : class, IScheduledProcessor + { + _processors[T.RoleName] = (services) => + { + services.AddTransient(); + services.AddTransient(_ => + { + return new QuartzScheduledProcessorBinding(T.RoleName, triggerBuilder); + }); + }; + return this; + } + + public IWebAppConfigurator UseSentryTracing(double tracingRate) + { + _tracingRate = tracingRate; + return this; + } + + public IWebAppConfigurator UsePerformanceTracing(double tracingRate) + { + return UseSentryTracing(tracingRate); + } + + public IWebAppConfigurator UseDevelopmentDockerContainers(Func factory) + { + _dockerFactory = factory; + return this; + } + + public IWebAppConfigurator UseHelm(Func helmConfig) + { + _helmConfig = helmConfig; + return this; + } + + public IWebAppConfigurator FilterPathPrefixesFromSentryPerformance(string[] prefixes) + { + _prefixes = prefixes ?? Array.Empty(); + return this; + } + + public Task GetWebApp() + { + ValidateConfiguration(); + if (_startupType == null) + { + throw new InvalidOperationException("You must specify the ASP.NET startup class by calling UseStartup()."); + } + if (typeof(IStartup).IsAssignableFrom(_startupType)) + { + throw new InvalidOperationException("Your startup class must not implement IStartup (instead, use convention-based startup)."); + } + + var hostBuilder = new WebHostBuilder() + .ConfigureServices((context, services) => + { + if (_helmConfig == null) + { + // Add the lifetime service that will set up the development environment if necessary. + services.AddSingleton(sp => + { + return new DevelopmentStartup( + sp.GetRequiredService(), + sp.GetRequiredService>(), + _googleCloudUsage, + sp.GetRequiredService(), + _dockerFactory); + }); + services.AddSingleton(sp => + { + return sp.GetRequiredService(); + }); + } + else if (context.HostingEnvironment.IsDevelopment()) + { + var helmConfig = _helmConfig(context.Configuration, context.HostingEnvironment.ContentRootPath); + services.AddSingleton(new BoundHelmConfiguration(helmConfig)); + Environment.SetEnvironmentVariable("REDIS_SERVER", "localhost:" + helmConfig.RedisPort); + } + }) + .UseKestrel() + .UseContentRoot(Directory.GetCurrentDirectory()) + .UseSentry(options => + { + options.TracesSampleRate = _tracingRate; + options.TracesSampler = (ctx) => + { + if (ctx.CustomSamplingContext.ContainsKey("__HttpPath") && + ctx.CustomSamplingContext["__HttpPath"] is string) + { + var path = (string?)ctx.CustomSamplingContext["__HttpPath"]; + if (path != null) + { + if (path == "/healthz") + { + return 0; + } + + if (_prefixes.Any(x => path.StartsWith(x, StringComparison.Ordinal))) + { + return 0; + } + } + } + + return null; + }; + options.AdjustStandardEnvironmentNameCasing = false; + }) + .ConfigureAppConfiguration((hostingContext, config) => + { + ConfigureAppConfiguration(hostingContext.HostingEnvironment, config); + }) + .ConfigureServices((context, services) => + { + var configurationBuilder = new ConfigurationBuilder() + .SetBasePath(context.HostingEnvironment.ContentRootPath); + ConfigureAppConfiguration(context.HostingEnvironment, configurationBuilder); + var configuration = configurationBuilder.Build(); + // Register IConfiguration for web host. + services.AddSingleton(configuration); + // Replace the builder's IConfiguration for the rest of ConfigureServices and Startup. + context.Configuration = configuration; + }) + .ConfigureServices((context, services) => + { + _context = context; + // There is no replacement for this functionality, but UseStartup does not immediately execute so calling ConfigureServices on the host builder does not allow us to execute "post startup" configure, and there's no other way of hooking around startup. +#pragma warning disable CS0612 + services.AddSingleton(this); +#pragma warning restore CS0612 + }) + .UseStartup(_startupType); + return Task.FromResult(hostBuilder.Build()); + } + + public async Task StartWebApp() + { + var host = await GetWebApp().ConfigureAwait(false); + await host.RunAsync().ConfigureAwait(false); + } + + public async Task StartWebApp() where T : IWebAppProvider + { + var host = await T.GetWebHostAsync().ConfigureAwait(false); + await host.RunAsync().ConfigureAwait(false); + } + + public async Task StartWebApp(IWebHost host) + { + ArgumentNullException.ThrowIfNull(host); + await host.RunAsync().ConfigureAwait(false); + } + + Action IStartupConfigureServicesFilter.ConfigureServices(Action next) + { + return services => + { + this.PreStartupConfigureServices(_context!.HostingEnvironment, services); + + next(services); + + DefaultWebAppConfigurator.RemoveDefaultDataProtectionServices(services); + this.PostStartupConfigureServices(services); + + if (_context.HostingEnvironment.IsDevelopment() && _processors.Count > 0) + { + foreach (var kv in _processors) + { + kv.Value(services); + } + } + }; + } + + private static void RemoveDefaultDataProtectionServices(IServiceCollection services) + { + // AddSession in .NET 5 automatically calls AddDataProtection. This will register a service for IConfigureOptions, which in turn resolves IRegistryPolicyResolver which then goes through and sets up the default data protection. We never want to use these services; we only ever want to use our DataProtectionProvider, so undo all of the bindings that AddDataProtectionServices has gone and set up. + void RemoveSingleBoundService(IServiceCollection services, string fullName) + { + foreach (var serviceDescriptor in services.Where(x => x?.ServiceType?.FullName == fullName).ToList()) + { + services.Remove(serviceDescriptor); + } + } + void RemoveSingleBoundImplementation(IServiceCollection services, string fullName) + { + foreach (var serviceDescriptor in services.Where(x => x?.ImplementationType?.FullName == fullName).ToList()) + { + services.Remove(serviceDescriptor); + } + } + RemoveSingleBoundService(services, "Microsoft.AspNetCore.DataProtection.IRegistryPolicyResolver"); + RemoveSingleBoundImplementation(services, "Microsoft.AspNetCore.DataProtection.Internal.KeyManagementOptionsSetup"); + RemoveSingleBoundImplementation(services, "Microsoft.AspNetCore.DataProtection.Internal.DataProtectionOptionsSetup"); + RemoveSingleBoundService(services, "Microsoft.AspNetCore.DataProtection.KeyManagement.IKeyManager"); + RemoveSingleBoundService(services, "Microsoft.AspNetCore.DataProtection.Infrastructure.IApplicationDiscriminator"); + RemoveSingleBoundImplementation(services, "Microsoft.AspNetCore.DataProtection.Internal.DataProtectionHostedService"); + RemoveSingleBoundService(services, "Microsoft.AspNetCore.DataProtection.KeyManagement.Internal.IDefaultKeyResolver"); + RemoveSingleBoundService(services, "Microsoft.AspNetCore.DataProtection.KeyManagement.Internal.IKeyRingProvider"); + foreach (var serviceDescriptor in services.Where(x => x?.ServiceType == typeof(IDataProtectionProvider) && x?.ImplementationType != typeof(StaticDataProtectionProvider)).ToList()) + { + services.Remove(serviceDescriptor); + } + RemoveSingleBoundService(services, "Microsoft.AspNetCore.DataProtection.XmlEncryption.ICertificateResolver"); + } + + protected override void PreStartupConfigureServices(IHostEnvironment hostEnvironment, IServiceCollection services) + { + // Add the static data protector. + services.AddSingleton(); + services.AddSingleton(); + + base.PreStartupConfigureServices(hostEnvironment, services); + } + + protected override void PostStartupConfigureServices(IServiceCollection services) + { + // Add common HTTP services. + services.AddSingleton(); + services.AddScoped(); + + base.PostStartupConfigureServices(services); + + // Add the services for multi-tenanting. These are only valid in a web app, and we only register them if the current tenant service is not our builtin SingleTenantService. + if (_currentTenantService != typeof(SingleCurrentTenantService)) + { + if ((_googleCloudUsage & GoogleCloudUsageFlag.Datastore) != 0) + { + services.AddScoped(); + services.AddScoped(); +#pragma warning disable CS0618 // Type or member is obsolete + services.AddScoped(); +#pragma warning restore CS0618 // Type or member is obsolete + services.AddScoped(); + } + services.AddScoped(); + } + + // Register the Sentry tracer, since we always use Sentry. + services.AddSingleton(); + + // If we don't have the HTTP client factory registered, register it now. + if (!services.Any(x => x.ServiceType == typeof(IHttpClientFactory))) + { + services.AddHttpClient(); + } + } + } +} diff --git a/UET/Redpoint.CloudFramework/Startup/DeveloperDockerPort.cs b/UET/Redpoint.CloudFramework/Startup/DeveloperDockerPort.cs new file mode 100644 index 00000000..90e3060c --- /dev/null +++ b/UET/Redpoint.CloudFramework/Startup/DeveloperDockerPort.cs @@ -0,0 +1,19 @@ +namespace Redpoint.CloudFramework.Startup +{ + public record struct DeveloperDockerPort + { + public DeveloperDockerPort(ushort containerPort, ushort hostPort) + { + ContainerPort = containerPort; + HostPort = hostPort; + } + + public ushort ContainerPort { get; set; } + + public ushort HostPort { get; set; } + + public static implicit operator DeveloperDockerPort(ushort d) => new DeveloperDockerPort(d, d); + + public static DeveloperDockerPort FromUInt16(ushort d) => new DeveloperDockerPort(d, d); + } +} diff --git a/UET/Redpoint.CloudFramework/Startup/DevelopmentDockerContainer.cs b/UET/Redpoint.CloudFramework/Startup/DevelopmentDockerContainer.cs new file mode 100644 index 00000000..98554091 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Startup/DevelopmentDockerContainer.cs @@ -0,0 +1,16 @@ +namespace Redpoint.CloudFramework.Startup +{ + using System.Collections.Generic; + + public record class DevelopmentDockerContainer + { + public required string Name { get; set; } + public string? Context { get; set; } + public string? Image { get; set; } + public string Dockerfile { get; set; } = "Dockerfile"; + public IReadOnlyCollection Ports { get; set; } = Array.Empty(); + public IReadOnlyDictionary Environment { get; set; } = new Dictionary(); + public IReadOnlyList Arguments { get; set; } = Array.Empty(); + internal string? ImageId { get; set; } + } +} diff --git a/UET/Redpoint.CloudFramework/Startup/DevelopmentStartup.cs b/UET/Redpoint.CloudFramework/Startup/DevelopmentStartup.cs new file mode 100644 index 00000000..c160272d --- /dev/null +++ b/UET/Redpoint.CloudFramework/Startup/DevelopmentStartup.cs @@ -0,0 +1,451 @@ +namespace Redpoint.CloudFramework.Startup +{ + using Docker.DotNet; + using Docker.DotNet.Models; + using ICSharpCode.SharpZipLib.Tar; + using Microsoft.Extensions.Configuration; + using Microsoft.Extensions.Hosting; + using Microsoft.Extensions.Logging; + using System; + using System.Collections.Generic; + using System.Diagnostics; + using System.Globalization; + using System.IO; + using System.Linq; + using System.Runtime.InteropServices; + using System.Text; + using System.Threading; + using System.Threading.Tasks; + + internal class DevelopmentStartup : IHostedService + { + private readonly IHostEnvironment _hostEnvironment; + private readonly ILogger _logger; + private readonly GoogleCloudUsageFlag _googleCloudUsage; + private readonly IConfiguration _configuration; + private readonly Func? _dockerFactory; + internal bool _didStart; + + internal static readonly string[] _pubsubArgs = new[] + { + "gcloud", + "beta", + "emulators", + "pubsub", + "start", + "--host-port=0.0.0.0:9000" + }; + internal static readonly string[] _datastoreArgs = new[] + { + "gcloud", + "beta", + "emulators", + "datastore", + "start", + // Firestore guarantees strong consistency now, so this + // should be reasonably safe. + "--consistency=1.0", + "--host-port=0.0.0.0:9001", + "--no-store-on-disk" + }; + + public DevelopmentStartup( + IHostEnvironment hostEnvironment, + ILogger logger, + GoogleCloudUsageFlag googleCloudUsage, + IConfiguration configuration, + Func? dockerFactory) + { + _hostEnvironment = hostEnvironment; + _logger = logger; + _googleCloudUsage = googleCloudUsage; + _configuration = configuration; + _dockerFactory = dockerFactory; + _didStart = false; + } + + private record ExpectedContainer + { + public required string Name { get; set; } + public string? Image { get; set; } = null; + public IReadOnlyList Arguments { get; set; } = Array.Empty(); + public IReadOnlyCollection Ports { get; set; } = Array.Empty(); + public IReadOnlyCollection Env { get; set; } = Array.Empty(); + public bool DoNotPull { get; internal set; } = false; + } + + private class ConsoleLogProgress : IProgress + { + public void Report(JSONMessage value) + { + } + } + + public async Task StartAsync(CancellationToken cancellationToken) + { + try + { + if (!_hostEnvironment.IsDevelopment()) + { + return; + } + + if (Environment.GetEnvironmentVariable("NO_AUTOSTART_DEPENDENCIES") == "true") + { + return; + } + + var client = new DockerClientConfiguration().CreateClient(); + + if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) + { + _logger.LogInformation("Ensuring that Docker Desktop is running..."); + bool connected = false; + bool startedDocker = false; + while (!connected) + { + try + { + await client.Containers.ListContainersAsync(new ContainersListParameters { Limit = 1 }, cancellationToken).ConfigureAwait(false); + connected = true; + } + catch (Exception ex) when (ex is TimeoutException || ex is DockerApiException) + { + if (!startedDocker) + { + if (Process.GetProcessesByName("Docker Desktop.exe").Length == 0) + { + // Run gpupdate /force first. + _logger.LogInformation("Running gpupdate /force before starting Docker Desktop..."); + var gpupdate = Process.Start(Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.System), "gpupdate.exe"), "/force"); + gpupdate.WaitForExit(); + + // Start Docker Desktop automatically. + _logger.LogInformation("Starting Docker Desktop for you..."); + Process.Start(Path.Combine( + Environment.GetFolderPath(Environment.SpecialFolder.ProgramFiles), + "Docker", + "Docker", + "Docker Desktop.exe")); + startedDocker = true; + } + } + } + } + } + + var expectedContainers = new List(); + + var developerContainers = _dockerFactory != null ? _dockerFactory(_configuration, _hostEnvironment.ContentRootPath) : Array.Empty(); + foreach (var developerContainer in developerContainers) + { + // For developer containers, we have to make sure they're + // up to date and built first. + if (developerContainer.Image == null) + { + if (string.IsNullOrWhiteSpace(developerContainer.Context) || + string.IsNullOrWhiteSpace(developerContainer.Dockerfile)) + { + throw new InvalidOperationException($"You must set either 'Image' or ('Context' and 'Dockerfile') for each additional container configuration (for '{developerContainer.Name}' container)."); + } + + string? lastEntry = null; + await client.Images.BuildImageFromDockerfileAsync(new ImageBuildParameters + { + Dockerfile = developerContainer.Dockerfile, + }, CreateTarballForDockerfileDirectory(developerContainer.Context), Array.Empty(), new Dictionary(), new ForwardingProgress((JSONMessage msg) => + { + var entry = msg.Status ?? msg.Stream?.Trim(); + if (!string.IsNullOrWhiteSpace(entry)) + { + if (lastEntry != entry) + { + _logger.LogInformation($"Building {developerContainer.Name}: {entry}"); + } + lastEntry = entry; + } + + if (entry != null && entry.StartsWith("Successfully built ", StringComparison.InvariantCulture)) + { + developerContainer.ImageId = entry.Substring("Successfully built ".Length); + } + }), cancellationToken).ConfigureAwait(false); + if (string.IsNullOrWhiteSpace(developerContainer.ImageId)) + { + throw new DevelopmentStartupException($"Docker image for {developerContainer.Name} failed to build. Check the output for more information."); + } + expectedContainers.Add( + new ExpectedContainer + { + Name = developerContainer.Name, + Image = developerContainer.ImageId, + Arguments = developerContainer.Arguments ?? Array.Empty(), + Ports = developerContainer.Ports, + Env = developerContainer.Environment.Select(kv => $"{kv.Key}={kv.Value}").ToList(), + DoNotPull = true, + }); + } + else + { + expectedContainers.Add( + new ExpectedContainer + { + Name = developerContainer.Name, + Image = developerContainer.Image, + Arguments = developerContainer.Arguments ?? Array.Empty(), + Ports = developerContainer.Ports, + Env = developerContainer.Environment.Select(kv => $"{kv.Key}={kv.Value}").ToList(), + }); + } + } + + expectedContainers.Add( + new ExpectedContainer + { + Name = "redis", + Image = "redis:6.0.10", + Arguments = { }, + Ports = new DeveloperDockerPort[] + { + 6379 + } + }); + if ((_googleCloudUsage & GoogleCloudUsageFlag.PubSub) != 0) + { + expectedContainers.Add( + new ExpectedContainer + { + Name = "pubsub", + Image = "gcr.io/google.com/cloudsdktool/cloud-sdk:latest", + Arguments = _pubsubArgs, + Ports = new DeveloperDockerPort[] + { + 9000 + } + } + ); + } + if ((_googleCloudUsage & GoogleCloudUsageFlag.Datastore) != 0) + { + expectedContainers.Add( + new ExpectedContainer + { + Name = "datastore", + Image = "gcr.io/google.com/cloudsdktool/cloud-sdk:latest", + Arguments = _datastoreArgs, + Ports = new DeveloperDockerPort[] + { + 9001 + } + } + ); + }; + + var runningContainers = (await client.Containers.ListContainersAsync(new ContainersListParameters + { + All = true, + }, cancellationToken).ConfigureAwait(false)); + var runningContainersByName = new Dictionary(); + foreach (var runningContainer in runningContainers) + { + foreach (var name in runningContainer.Names) + { + runningContainersByName[name] = runningContainer; + } + } + + if ((_googleCloudUsage & GoogleCloudUsageFlag.PubSub) != 0 || + (_googleCloudUsage & GoogleCloudUsageFlag.Datastore) != 0) + { + _logger.LogInformation("This application will connect to the local Redis emulator."); + } + else + { + _logger.LogInformation("This application will connect to the local Redis and Google Cloud emulators."); + } + + // Create the network. + var network = (await client.Networks.ListNetworksAsync(cancellationToken: cancellationToken).ConfigureAwait(false)).FirstOrDefault(x => x.Name == "cloud-framework")?.ID; + if (network == null) + { + network = (await client.Networks.CreateNetworkAsync(new NetworksCreateParameters + { + Name = "cloud-framework", + }, cancellationToken).ConfigureAwait(false)).ID; + } + + foreach (var expectedContainer in expectedContainers) + { + bool start = false; + if (runningContainersByName.ContainsKey("/" + expectedContainer.Name)) + { + // This container is running, check to make sure it's arguments are correct. + var runningContainer = runningContainersByName["/" + expectedContainer.Name]; + if (runningContainer.Image != expectedContainer.Image || + runningContainer.State != "running" || + !runningContainer.NetworkSettings.Networks.Any(x => x.Value.NetworkID == network) || + !runningContainer.Ports.Select(x => x.PublicPort).All(x => expectedContainer.Ports.Contains(x)) || + !expectedContainer.Ports.Select(x => x).All(x => runningContainer.Ports.Any(y => x.ContainerPort == y.PublicPort))) + { + // This container is wrong. + _logger.LogInformation($"Stopping and removing {expectedContainer.Name} container because it's configuration is incorrect."); + try + { + await client.Containers.KillContainerAsync(runningContainer.ID, new ContainerKillParameters + { + Signal = "SIGKILL" + }, cancellationToken).ConfigureAwait(false); + } + catch { } + await client.Containers.RemoveContainerAsync(runningContainer.ID, new ContainerRemoveParameters + { + Force = true + }, cancellationToken).ConfigureAwait(false); + start = true; + } + } + else + { + // Container not running, always start it. + start = true; + } + + if (!start) + { + continue; + } + + if (!expectedContainer.DoNotPull) + { + if (!(await client.Images.ListImagesAsync(new ImagesListParameters + { + All = true, + }, cancellationToken).ConfigureAwait(false)).Any(x => x.RepoTags?.Contains(expectedContainer.Image) ?? false)) + { + _logger.LogInformation($"Pulling the {expectedContainer.Image} image... (this might take a while)"); + await client.Images.CreateImageAsync(new ImagesCreateParameters + { + FromImage = expectedContainer.Image, + }, null, new ConsoleLogProgress(), cancellationToken).ConfigureAwait(false); + } + } + + _logger.LogInformation($"Launching {expectedContainer.Name} container because it is necessary for the development environment."); + var createdContainerConfig = new CreateContainerParameters + { + Name = expectedContainer.Name, + Image = expectedContainer.Image, + Cmd = expectedContainer.Arguments.ToList(), + ExposedPorts = expectedContainer.Ports.ToDictionary(k => k.ContainerPort.ToString(CultureInfo.InvariantCulture) + "/tcp", v => new EmptyStruct()), + HostConfig = new HostConfig + { + PortBindings = expectedContainer.Ports.ToDictionary(k => k.ContainerPort.ToString(CultureInfo.InvariantCulture) + "/tcp", v => (IList)new List + { + new PortBinding + { + // Only expose as "localhost" on the host machine. + HostIP = "127.0.0.1", + HostPort = v.HostPort.ToString(CultureInfo.InvariantCulture), + } + }), + }, + NetworkingConfig = new NetworkingConfig + { + EndpointsConfig = new Dictionary + { + { + network, + new EndpointSettings + { + NetworkID = network, + Aliases = new List + { + expectedContainer.Name, + } + } + } + } + }, + Env = new List + { + "CLOUDSDK_CORE_PROJECT=local-dev" + }.Concat(expectedContainer.Env ?? new List()).ToList(), + }; + var createdContainer = await client.Containers.CreateContainerAsync(createdContainerConfig, cancellationToken).ConfigureAwait(false); + await client.Containers.StartContainerAsync(createdContainer.ID, new ContainerStartParameters + { + }, cancellationToken).ConfigureAwait(false); + } + } + finally + { + _didStart = true; + } + } + + public Task StopAsync(CancellationToken cancellationToken) + { + _didStart = false; + return Task.CompletedTask; + } + + private static MemoryStream CreateTarballForDockerfileDirectory(string directory) + { + var tarball = new MemoryStream(); + var files = Directory.GetFiles(directory, "*.*", SearchOption.AllDirectories); + + using var archive = new TarOutputStream(tarball, Encoding.UTF8) + { + //Prevent the TarOutputStream from closing the underlying memory stream when done + IsStreamOwner = false + }; + + foreach (var file in files) + { + //Replacing slashes as KyleGobel suggested and removing leading / + string tarName = file.Substring(directory.Length).Replace('\\', '/').TrimStart('/'); + + //Let's create the entry header + var entry = TarEntry.CreateTarEntry(tarName); + using var fileStream = File.OpenRead(file); + entry.Size = fileStream.Length; + entry.TarHeader.Mode = Convert.ToInt32("100755", 8); //chmod 755 + archive.PutNextEntry(entry); + + //Now write the bytes of data + byte[] localBuffer = new byte[32 * 1024]; + while (true) + { + int numRead = fileStream.Read(localBuffer, 0, localBuffer.Length); + if (numRead <= 0) + break; + + archive.Write(localBuffer, 0, numRead); + } + + //Nothing more to do with this entry + archive.CloseEntry(); + } + archive.Close(); + + //Reset the stream and return it, so it can be used by the caller + tarball.Position = 0; + return tarball; + } + + private class ForwardingProgress : IProgress + { + private readonly Action _func; + + public ForwardingProgress(Action func) + { + _func = func; + } + + public void Report(T value) + { + _func(value); + } + } + } +} diff --git a/UET/Redpoint.CloudFramework/Startup/DevelopmentStartupException.cs b/UET/Redpoint.CloudFramework/Startup/DevelopmentStartupException.cs new file mode 100644 index 00000000..4905a068 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Startup/DevelopmentStartupException.cs @@ -0,0 +1,10 @@ +namespace Redpoint.CloudFramework.Startup +{ + using System; + + public class DevelopmentStartupException : Exception + { + /// + public DevelopmentStartupException(string message) : base(message) { } + } +} diff --git a/UET/Redpoint.CloudFramework/Startup/HelmConfiguration.cs b/UET/Redpoint.CloudFramework/Startup/HelmConfiguration.cs new file mode 100644 index 00000000..17e55c20 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Startup/HelmConfiguration.cs @@ -0,0 +1,9 @@ +namespace Redpoint.CloudFramework.Startup +{ + public class HelmConfiguration + { + public int DatastorePort { get; set; } + public int RedisPort { get; set; } + public int PubSubPort { get; set; } + } +} diff --git a/UET/Redpoint.CloudFramework/Startup/IBaseConfigurator.cs b/UET/Redpoint.CloudFramework/Startup/IBaseConfigurator.cs new file mode 100644 index 00000000..259321f5 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Startup/IBaseConfigurator.cs @@ -0,0 +1,36 @@ +extern alias RDCommandLine; + +namespace Redpoint.CloudFramework.Startup +{ + using Microsoft.Extensions.Configuration; + using Microsoft.Extensions.Hosting; + using Redpoint.CloudFramework.Prefix; + using System; + using System.Diagnostics.CodeAnalysis; + + public interface IBaseConfigurator + { + [Obsolete("Use AddPrefixProvider on the service collection inside Startup instead of this method.")] + TBase UsePrefixProvider<[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicConstructors)] T>() where T : IPrefixProvider; + + TBase UseMultiTenant<[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicConstructors)] T>() where T : ICurrentTenantService; + + /// + /// Set what Google Cloud services are used at runtime. By default all services are on except for Logging, Trace and Error Reporting (which are sufficiently covered by Sentry instead). + /// + /// + /// + TBase UseGoogleCloud(GoogleCloudUsageFlag usageFlag); + + /// + /// If called, this requires that the 'appsettings' secret be loaded from Google Cloud Secret Manager + /// in production when the application starts up. + /// + /// If your application relies on secrets from Google Cloud Secret Manager, you can use this to ensure + /// the application doesn't start up in an inconsistent state. + /// + TBase RequireGoogleCloudSecretManagerConfiguration(); + + TBase UseCustomConfigLayers(Action customConfigLayers); + } +} diff --git a/UET/Redpoint.CloudFramework/Startup/IOptionalHelmConfiguration.cs b/UET/Redpoint.CloudFramework/Startup/IOptionalHelmConfiguration.cs new file mode 100644 index 00000000..f991ca0e --- /dev/null +++ b/UET/Redpoint.CloudFramework/Startup/IOptionalHelmConfiguration.cs @@ -0,0 +1,7 @@ +namespace Redpoint.CloudFramework.Startup +{ + public interface IOptionalHelmConfiguration + { + HelmConfiguration? GetHelmConfig(); + } +} diff --git a/UET/Redpoint.CloudFramework/Startup/IServiceAppConfigurator.cs b/UET/Redpoint.CloudFramework/Startup/IServiceAppConfigurator.cs new file mode 100644 index 00000000..f41186cf --- /dev/null +++ b/UET/Redpoint.CloudFramework/Startup/IServiceAppConfigurator.cs @@ -0,0 +1,28 @@ +namespace Redpoint.CloudFramework.Startup +{ + using Microsoft.Extensions.Configuration; + using Microsoft.Extensions.DependencyInjection; + using Redpoint.CloudFramework.Processor; + using System; + using System.Threading.Tasks; + using System.Diagnostics.CodeAnalysis; + using Quartz; + + public interface IServiceAppConfigurator : IBaseConfigurator + { + [RequiresDynamicCode("This internally uses HostBuilder, which requires dynamic code.")] + Task StartServiceApp(string[] args); + + IServiceAppConfigurator AddProcessor<[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicConstructors)] T>() where T : class, IContinuousProcessor; + + IServiceAppConfigurator AddProcessor<[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicConstructors)] T>(Action triggerBuilder) where T : class, IScheduledProcessor; + + IServiceAppConfigurator UseServiceConfiguration(Action configureServices); + + IServiceAppConfigurator UseDevelopmentDockerContainers(Func factory); + + IServiceAppConfigurator UseHelm(Func helmConfig); + + IServiceAppConfigurator UseDefaultRoles(params string[] roleNames); + } +} diff --git a/UET/Redpoint.CloudFramework/Startup/IWebAppConfigurator.cs b/UET/Redpoint.CloudFramework/Startup/IWebAppConfigurator.cs new file mode 100644 index 00000000..69160f0c --- /dev/null +++ b/UET/Redpoint.CloudFramework/Startup/IWebAppConfigurator.cs @@ -0,0 +1,38 @@ +namespace Redpoint.CloudFramework.Startup +{ + using Microsoft.AspNetCore.Hosting; + using Microsoft.Extensions.Configuration; + using Quartz; + using Redpoint.CloudFramework.Processor; + using System; + using System.Diagnostics.CodeAnalysis; + using System.Threading.Tasks; + + public interface IWebAppConfigurator : IBaseConfigurator + { + IWebAppConfigurator UseStartup<[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicConstructors | DynamicallyAccessedMemberTypes.PublicMethods)] T>(); + + IWebAppConfigurator UseSentryTracing(double tracingRate); + + [Obsolete("Call UseSentryTracing instead.")] + IWebAppConfigurator UsePerformanceTracing(double tracingRate); + + IWebAppConfigurator UseDevelopmentDockerContainers(Func factory); + + IWebAppConfigurator UseHelm(Func helmConfig); + + IWebAppConfigurator FilterPathPrefixesFromSentryPerformance(string[] prefixes); + + Task GetWebApp(); + + Task StartWebApp(); + + Task StartWebApp() where T : IWebAppProvider; + + Task StartWebApp(IWebHost host); + + IWebAppConfigurator AddDevelopmentProcessor<[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicConstructors)] T>() where T : class, IContinuousProcessor; + + IWebAppConfigurator AddDevelopmentProcessor<[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicConstructors)] T>(Action triggerBuilder) where T : class, IScheduledProcessor; + } +} diff --git a/UET/Redpoint.CloudFramework/Startup/IWebAppProvider.cs b/UET/Redpoint.CloudFramework/Startup/IWebAppProvider.cs new file mode 100644 index 00000000..b02b870b --- /dev/null +++ b/UET/Redpoint.CloudFramework/Startup/IWebAppProvider.cs @@ -0,0 +1,10 @@ +namespace Redpoint.CloudFramework.Startup +{ + using Microsoft.AspNetCore.Hosting; + using System.Threading.Tasks; + + public interface IWebAppProvider + { + static abstract ValueTask GetWebHostAsync(); + } +} diff --git a/UET/Redpoint.CloudFramework/Startup/InteractiveConsoleAppConfigurator.cs b/UET/Redpoint.CloudFramework/Startup/InteractiveConsoleAppConfigurator.cs new file mode 100644 index 00000000..ca1e712a --- /dev/null +++ b/UET/Redpoint.CloudFramework/Startup/InteractiveConsoleAppConfigurator.cs @@ -0,0 +1,154 @@ +namespace Redpoint.CloudFramework.Startup +{ + // @note: This needs to be completely redone based on the command-line infrastructure that we wrote + // for UET, but we don't have any active projects that are using InteractiveConsoleAppConfigurator + // so we don't need to refactor this now. +#if ENABLE_UNSUPPORTED + using Microsoft.Extensions.Configuration; + using Microsoft.Extensions.DependencyInjection; + using Microsoft.Extensions.Hosting; + using Microsoft.Extensions.Logging; + using Redpoint.CloudFramework.Tracing; + using System; + using System.CommandLine; + using System.CommandLine.Builder; + using System.CommandLine.IO; + using System.CommandLine.Parsing; + using System.Diagnostics.CodeAnalysis; + using System.IO; + using System.Linq; + using System.Threading.Tasks; + using static System.Environment; + + public interface IInteractiveConsoleAppConfigurator : IBaseConfigurator + { + IInteractiveConsoleAppConfigurator UseCommand(RootCommand rootCommand); + + IInteractiveConsoleAppConfigurator UseServiceConfiguration(Action configureServices); + + IInteractiveConsoleAppConfigurator UseHelm(Func helmConfig); + + [RequiresUnreferencedCode("This implementation scans the AppDomain for all implementations of Command.")] + Task StartInteractiveConsoleApp(string[] args); + } + + internal class InteractiveConsoleAppConfigurator : BaseConfigurator, IInteractiveConsoleAppConfigurator + { + private RootCommand? _rootCommand; + private Action? _configureServices; + private Func? _helmConfig; + + public InteractiveConsoleAppConfigurator() + { + _isInteractiveCLIApp = true; + } + + public IInteractiveConsoleAppConfigurator UseCommand(RootCommand rootCommand) + { + _rootCommand = rootCommand; + return this; + } + + public IInteractiveConsoleAppConfigurator UseServiceConfiguration(Action configureServices) + { + _configureServices = configureServices; + return this; + } + + public IInteractiveConsoleAppConfigurator UseHelm(Func helmConfig) + { + _helmConfig = helmConfig; + return this; + } + + [RequiresUnreferencedCode("This implementation scans the AppDomain for all implementations of Command.")] + public async Task StartInteractiveConsoleApp(string[] args) + { + ValidateConfiguration(); + if (_rootCommand == null) + { + throw new InvalidOperationException("You must specify the root command by calling UseCommand(command)."); + } + + var productionOption = new Option("--production", "If this flag is passed, this CLI connects to the production database directly. You must also pass --production-project-id and --production-redis-server."); + var productionProjectIdOption = new Option("--production-project-id", "The Google Cloud project ID to use with --production.") { ArgumentHelpName = "project-id" }; + var productionRedisServerOption = new Option("--production-redis-server", "The Redis server to connect to for use with --production. You'll typically need to forward a connection to the Redis server using kubectl port-forward.") { ArgumentHelpName = "127.0.0.1:6379" }; + _rootCommand.AddGlobalOption(productionOption); + _rootCommand.AddGlobalOption(productionProjectIdOption); + _rootCommand.AddGlobalOption(productionRedisServerOption); + + var rootArgs = _rootCommand.Parse(args); + var isProduction = rootArgs.GetValueForOption(productionOption); + if (isProduction) + { + if (string.IsNullOrWhiteSpace(rootArgs.GetValueForOption(productionProjectIdOption)) || + string.IsNullOrWhiteSpace(rootArgs.GetValueForOption(productionRedisServerOption))) + { + Console.Error.WriteLine("--production-project-id and --production-redis-server must both be set!"); + return 1; + } + + Environment.SetEnvironmentVariable("GOOGLE_APPLICATION_CREDENTIALS", Path.Combine(Environment.GetFolderPath(SpecialFolder.ApplicationData), "gcloud", "application_default_credentials.json")); + Environment.SetEnvironmentVariable("GOOGLE_CLOUD_PROJECT_ID", rootArgs.GetValueForOption(productionProjectIdOption)); + Environment.SetEnvironmentVariable("REDIS_SERVER", rootArgs.GetValueForOption(productionRedisServerOption)); + } + + var parser = new CommandLineBuilder(_rootCommand) + .UseHost((IHostBuilder hostBuilder) => + { + hostBuilder = hostBuilder + .UseEnvironment(isProduction ? "Production" : "Development") + .ConfigureServices((context, services) => + { + if (!isProduction) + { + if (_helmConfig == null) + { + services.AddSingleton(sp => + { + return new DevelopmentStartup( + sp.GetRequiredService(), + sp.GetRequiredService>(), + _googleCloudUsage, + sp.GetRequiredService(), + (_, _) => Array.Empty()); + }); + } + else if (context.HostingEnvironment.IsDevelopment()) + { + var helmConfig = _helmConfig(context.Configuration, context.HostingEnvironment.ContentRootPath); + services.AddSingleton(new BoundHelmConfiguration(helmConfig)); + Environment.SetEnvironmentVariable("REDIS_SERVER", "localhost:" + helmConfig.RedisPort); + } + } + }) + .ConfigureServices((context, services) => + { + services.AddSingleton(); + services.AddSingleton(); + + this.PreStartupConfigureServices(context.HostingEnvironment, services); + _configureServices?.Invoke(services); + this.PostStartupConfigureServices(services); + }) + .ConfigureAppConfiguration((hostingContext, config) => + { + ConfigureAppConfiguration(hostingContext.HostingEnvironment, config); + }); + // Bind all the commands. + foreach (var commandType in AppDomain.CurrentDomain.GetAssemblies().SelectMany(x => x.GetTypes()).Where(x => typeof(Command).IsAssignableFrom(x))) + { + var handlerType = commandType.GetNestedType("Handler"); + if (handlerType != null) + { + hostBuilder.UseCommandHandler(commandType, handlerType); + } + } + }) + .UseHelp() + .Build(); + return await parser.InvokeAsync(args).ConfigureAwait(false); + } + } +#endif +} diff --git a/UET/Redpoint.CloudFramework/Startup/RedisConnector.cs b/UET/Redpoint.CloudFramework/Startup/RedisConnector.cs new file mode 100644 index 00000000..0ae0b8a1 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Startup/RedisConnector.cs @@ -0,0 +1,91 @@ +namespace Redpoint.CloudFramework +{ + using Redpoint.CloudFramework.Cache; + using Microsoft.Extensions.Caching.Distributed; + using Microsoft.Extensions.DependencyInjection; + using System; + using StackExchange.Redis; + using Microsoft.Extensions.Logging; + using System.Threading; + using Microsoft.Extensions.Caching.StackExchangeRedis; + using Microsoft.Extensions.Hosting; + using System.Threading.Tasks; + using Redpoint.CloudFramework.Repository.Redis; + using Redpoint.CloudFramework.Repository; + using Redpoint.CloudFramework.Startup; + + internal static class RedisConnector + { + internal static void AddDistributedRedpointCache( + this IServiceCollection services, + IHostEnvironment hostEnvironment) + { + var redisServerEnv = Environment.GetEnvironmentVariable("REDIS_SERVER"); + string redisServer; + if (!hostEnvironment.IsDevelopment()) + { + if (string.IsNullOrWhiteSpace(redisServerEnv)) + { + throw new InvalidOperationException("Cloud Framework requires a Redis server in production/staging environments. Set the REDIS_SERVER environment variable."); + } + redisServer = redisServerEnv; + } + else if (!string.IsNullOrWhiteSpace(redisServerEnv)) + { + // Allow development override for cases where this application is being + // run as a dependency of another application. + redisServer = redisServerEnv; + } + else if (Environment.GetEnvironmentVariable("GITLAB_CI") == "true") + { + // This will be running a service in GitLab CI/CD. + redisServer = "redis:6379"; + } + else + { + // This will be running in a Docker container on the local machine. + redisServer = "localhost:6379"; + } + + var redisConnect = ConnectionMultiplexerProxy.GetRedisConnectionString(redisServer); + + services.AddOptions(); + services.Configure(x => + { + x.Configuration = redisConnect; + }); + services.Add(ServiceDescriptor.Singleton()); + services.Add(ServiceDescriptor.Singleton()); + + services.AddSingleton(sp => + { + return new ConnectionMultiplexerProxy( + redisServer, + sp.GetRequiredService>()); + }); + services.AddSingleton(); + } + + private class WaitUntilRedisConnectedService : IHostedService + { + private readonly IServiceProvider _serviceProvider; + + public WaitUntilRedisConnectedService(IServiceProvider serviceProvider) + { + _serviceProvider = serviceProvider; + } + + public Task StartAsync(CancellationToken cancellationToken) + { + // This will block until we are connected to Redis. Since the web host's IHostedService won't have StartAsync called until this StartAsync returns, this ensures we don't start listening until we're connected to Redis. + _ = _serviceProvider.GetRequiredService().ClientName; + return Task.CompletedTask; + } + + public Task StopAsync(CancellationToken cancellationToken) + { + return Task.CompletedTask; + } + } + } +} diff --git a/UET/Redpoint.CloudFramework/Storage/B2NetFileStorage.cs b/UET/Redpoint.CloudFramework/Storage/B2NetFileStorage.cs new file mode 100644 index 00000000..ccc08c96 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Storage/B2NetFileStorage.cs @@ -0,0 +1,171 @@ +namespace Redpoint.CloudFramework.Storage +{ + using B2Net; + using B2Net.Models; + using Microsoft.Extensions.Configuration; + using Microsoft.Extensions.Logging; + using System; + using System.Collections.Generic; + using System.IO; + using System.Threading.Tasks; + using System.Web; + + public class B2NetFileStorage : IFileStorage + { + private readonly IConfiguration _configuration; + + public B2NetFileStorage(IConfiguration configuration, ILogger logger) + { + logger.LogInformation("Using Backblaze B2 storage to store files"); + _configuration = configuration; + } + + // NOTE: You can not cache B2Client instances across requests because they + // do not refresh their authorization codes and will eventually stop working over time. + + private B2Client GetClient(FileStorageProfile profile) + { + if (_configuration.GetSection($"CloudFramework:B2:{profile.Name}") == null) + { + throw new InvalidOperationException($"B2 profile {profile.Name} is not configured!"); + } + + B2Options options = new B2Options + { + KeyId = _configuration.GetSection($"CloudFramework:B2:{profile.Name}:KeyId")?.Value, + ApplicationKey = _configuration.GetSection($"CloudFramework:B2:{profile.Name}:ApplicationKey")?.Value, + BucketId = _configuration.GetSection($"CloudFramework:B2:{profile.Name}:BucketId")?.Value, + PersistBucket = true, + }; + return new B2Client(options); + } + + public async Task GetInfo(FileStorageProfile profile, string fileId) + { + ArgumentNullException.ThrowIfNull(profile); + + var client = GetClient(profile); + var info = await client.Files.GetInfo(fileId).ConfigureAwait(false); + return new CloudFile + { + FileId = info.FileId, + Filename = info.FileName, + Size = info.Size, + }; + } + + public async Task Upload(FileStorageProfile profile, byte[] fileData, string fileName) + { + ArgumentNullException.ThrowIfNull(profile); + + var b2File = await GetClient(profile).Files.Upload(fileData, fileName).ConfigureAwait(false); + return new CloudFile + { + FileId = b2File.FileId, + Filename = b2File.FileName, + Size = b2File.Size, + }; + } + + public async Task Upload(FileStorageProfile profile, Stream fileData, string fileName, string contentType) + { + ArgumentNullException.ThrowIfNull(profile); + + var client = GetClient(profile); + var uploadUrl = await client.Files.GetUploadUrl().ConfigureAwait(false); + var b2File = await client.Files.Upload(fileData, fileName, uploadUrl, contentType, true, dontSHA: true).ConfigureAwait(false); + return new CloudFile + { + FileId = b2File.FileId, + Filename = b2File.FileName, + Size = b2File.Size, + }; + } + + public async Task Download(FileStorageProfile profile, string fileId) + { + ArgumentNullException.ThrowIfNull(profile); + + var client = GetClient(profile); + var b2Info = await client.Files.GetInfo(fileId).ConfigureAwait(false); + var b2File = await client.Files.DownloadById(b2Info.FileId).ConfigureAwait(false); + + using var memoryStream = new MemoryStream(); + await b2File.FileData.CopyToAsync(memoryStream).ConfigureAwait(false); + + return new CloudFileWithData(new CloudFile + { + FileId = b2Info.FileId, + Filename = b2Info.FileName, + Size = b2Info.Size, + }, memoryStream.ToArray()); + } + + public async Task Download(FileStorageProfile profile, CloudFile file) + { + ArgumentNullException.ThrowIfNull(profile); + ArgumentNullException.ThrowIfNull(file); + + var b2File = await GetClient(profile).Files.DownloadById(file.FileId).ConfigureAwait(false); + + using var memoryStream = new MemoryStream(); + await b2File.FileData.CopyToAsync(memoryStream).ConfigureAwait(false); + + return new CloudFileWithData(file, memoryStream.ToArray()); + } + + public async Task Delete(FileStorageProfile profile, string fileId) + { + ArgumentNullException.ThrowIfNull(profile); + + var client = GetClient(profile); + var b2Info = await client.Files.GetInfo(fileId).ConfigureAwait(false); + await client.Files.Delete(b2Info.FileId, b2Info.FileName).ConfigureAwait(false); + } + + public async Task Delete(FileStorageProfile profile, CloudFile file) + { + ArgumentNullException.ThrowIfNull(profile); + ArgumentNullException.ThrowIfNull(file); + + await GetClient(profile).Files.Delete(file.FileId, file.Filename).ConfigureAwait(false); + } + + public async Task GetAuthorizedDownloadUrl(FileStorageProfile profile, string fileName, int timeoutInSeconds = 15) + { + ArgumentNullException.ThrowIfNull(profile); + ArgumentNullException.ThrowIfNull(fileName); + + var client = GetClient(profile); + var authorization = await client.Files.GetDownloadAuthorization(fileName, timeoutInSeconds, _configuration.GetSection($"CloudFramework:B2:{profile.Name}:BucketId").Value).ConfigureAwait(false); + return _configuration.GetSection($"CloudFramework:B2:{profile.Name}:DownloadPrefix").Value + fileName.Replace("+", "%2B", StringComparison.InvariantCultureIgnoreCase) + "?Authorization=" + HttpUtility.UrlEncode(authorization.AuthorizationToken); + } + + public async Task> GetList(FileStorageProfile profile, string prefix) + { + ArgumentNullException.ThrowIfNull(profile); + + var client = GetClient(profile); + + var files = new List(); + var startFileName = string.Empty; + do + { + var list = await client.Files.GetListWithPrefixOrDemiliter(startFileName, prefix).ConfigureAwait(false); + startFileName = list.NextFileName; + foreach (var file in list.Files) + { + files.Add(new CloudFile + { + FileId = file.FileId, + Filename = file.FileName, + Size = file.Size, + }); + } + } + while (startFileName != null); + + return files; + } + } +} diff --git a/UET/Redpoint.CloudFramework/Storage/IFileStorage.cs b/UET/Redpoint.CloudFramework/Storage/IFileStorage.cs new file mode 100644 index 00000000..4163852f --- /dev/null +++ b/UET/Redpoint.CloudFramework/Storage/IFileStorage.cs @@ -0,0 +1,66 @@ +namespace Redpoint.CloudFramework.Storage +{ + using Newtonsoft.Json; + using System.Collections.Generic; + using System.IO; + using System.Threading.Tasks; + + public class CloudFile + { + [JsonProperty("fileId")] + public string FileId { get; set; } = string.Empty; + + [JsonProperty("filename")] + public string Filename { get; set; } = string.Empty; + + [JsonProperty("size")] + public long Size { get; internal set; } + } + + public class CloudFileWithData : CloudFile + { + public CloudFileWithData(CloudFile file, byte[] data) + { + ArgumentNullException.ThrowIfNull(file); + + FileId = file.FileId; + Filename = file.Filename; + FileData = data; + } + + public ReadOnlyMemory FileData { get; set; } + } + + public class FileStorageProfile + { + public FileStorageProfile(string name) + { + Name = name; + } + + public string Name { get; } + + public static FileStorageProfile Default { get; } = new FileStorageProfile("Default"); + } + + public interface IFileStorage + { + Task GetInfo(FileStorageProfile profile, string fileId); + + Task Download(FileStorageProfile profile, string fileId); + + Task Download(FileStorageProfile profile, CloudFile file); + + Task Upload(FileStorageProfile profile, byte[] fileData, string fileName); + + Task Upload(FileStorageProfile profile, Stream fileData, string fileName, string contentType); + + Task Delete(FileStorageProfile profile, string fileId); + + Task Delete(FileStorageProfile profile, CloudFile file); + + Task GetAuthorizedDownloadUrl(FileStorageProfile profile, string fileName, int timeoutInSeconds = 15); + + Task> GetList(FileStorageProfile profile, string prefix); + } +} diff --git a/UET/Redpoint.CloudFramework/Storage/LocalFileStorage.cs b/UET/Redpoint.CloudFramework/Storage/LocalFileStorage.cs new file mode 100644 index 00000000..03220349 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Storage/LocalFileStorage.cs @@ -0,0 +1,145 @@ +namespace Redpoint.CloudFramework.Storage +{ + using Microsoft.Extensions.Logging; + using NodaTime; + using System; + using System.Collections.Generic; + using System.IO; + using System.Threading.Tasks; + + public class LocalFileStorage : IFileStorage + { + private string _storageDataFolder = Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.LocalApplicationData), "CloudFrameworkTemp", "Data"); + private string _storageNameFolder = Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.LocalApplicationData), "CloudFrameworkTemp", "Names"); + + public LocalFileStorage(ILogger logger) + { + logger.LogInformation("Using local file storage to store files"); + } + + private static int NextRandomInt() + { +#pragma warning disable CA5394 // Do not use insecure randomness + return Random.Shared.Next(); +#pragma warning restore CA5394 // Do not use insecure randomness + } + + public Task CreateLocalDirectoryStructure(FileStorageProfile profile) + { + ArgumentNullException.ThrowIfNull(profile); + + Directory.CreateDirectory(Path.Combine(_storageDataFolder, profile.Name)); + Directory.CreateDirectory(Path.Combine(_storageNameFolder, profile.Name)); + return Task.CompletedTask; + } + + public Task Delete(FileStorageProfile profile, string fileId) + { + ArgumentNullException.ThrowIfNull(profile); + + File.Delete(Path.Combine(_storageDataFolder, profile.Name, fileId)); + File.Delete(Path.Combine(_storageNameFolder, profile.Name, fileId)); + return Task.CompletedTask; + } + + public Task Delete(FileStorageProfile profile, CloudFile file) + { + ArgumentNullException.ThrowIfNull(profile); + ArgumentNullException.ThrowIfNull(file); + + File.Delete(Path.Combine(_storageDataFolder, profile.Name, file.FileId)); + File.Delete(Path.Combine(_storageNameFolder, profile.Name, file.FileId)); + return Task.CompletedTask; + } + + public Task Download(FileStorageProfile profile, string fileId) + { + CreateLocalDirectoryStructure(profile); + + var data = File.ReadAllBytes(Path.Combine(_storageDataFolder, profile.Name, fileId)); + var name = File.ReadAllText(Path.Combine(_storageNameFolder, profile.Name, fileId)).Trim(); + return Task.FromResult(new CloudFileWithData(new CloudFile + { + FileId = fileId, + Filename = name, + }, data)); + } + + public Task Download(FileStorageProfile profile, CloudFile file) + { + ArgumentNullException.ThrowIfNull(file); + + CreateLocalDirectoryStructure(profile); + + var data = File.ReadAllBytes(Path.Combine(_storageDataFolder, profile.Name, file.FileId)); + return Task.FromResult(new CloudFileWithData(file, data)); + } + + public Task Upload(FileStorageProfile profile, byte[] fileData, string fileName) + { + CreateLocalDirectoryStructure(profile); + + var fileId = NextRandomInt() + "-" + SystemClock.Instance.GetCurrentInstant().ToUnixTimeMilliseconds(); + File.WriteAllBytes(Path.Combine(_storageDataFolder, profile.Name, fileId), fileData); + File.WriteAllText(Path.Combine(_storageNameFolder, profile.Name, fileId), fileName); + return Task.FromResult(new CloudFile + { + FileId = fileId, + Filename = fileName, + }); + } + + public Task Upload(FileStorageProfile profile, Stream fileData, string fileName, string contentType) + { + ArgumentNullException.ThrowIfNull(fileData); + + CreateLocalDirectoryStructure(profile); + + var fileId = NextRandomInt() + "-" + SystemClock.Instance.GetCurrentInstant().ToUnixTimeMilliseconds(); + using (var stream = File.OpenWrite(Path.Combine(_storageDataFolder, profile.Name, fileId))) + { + fileData.CopyTo(stream); + fileData.Flush(); + } + File.WriteAllText(Path.Combine(_storageNameFolder, profile.Name, fileId), fileName); + return Task.FromResult(new CloudFile + { + FileId = fileId, + Filename = fileName, + }); + } + + public Task GetAuthorizedDownloadUrl(FileStorageProfile profile, string fileName, int timeoutInSeconds = 15) + { + throw new InvalidOperationException("GetAuthorizedDownloadUrl not supported with LocalFileStorage implementation."); + } + + public Task GetInfo(FileStorageProfile profile, string fileId) + { + ArgumentNullException.ThrowIfNull(profile); + + var name = File.ReadAllText(Path.Combine(_storageNameFolder, profile.Name, fileId)).Trim(); + return Task.FromResult(new CloudFile + { + FileId = fileId, + Filename = name, + }); + } + + public Task> GetList(FileStorageProfile profile, string prefix) + { + ArgumentNullException.ThrowIfNull(profile); + + var results = new List(); + foreach (var fileInfo in new DirectoryInfo(Path.Combine(_storageNameFolder, profile.Name)).GetFiles()) + { + results.Add(new CloudFile + { + FileId = fileInfo.Name, + Filename = File.ReadAllText(fileInfo.FullName).Trim(), + }); + } + return Task.FromResult(results); + } + } +} diff --git a/UET/Redpoint.CloudFramework/Tracing/IManagedTracer.cs b/UET/Redpoint.CloudFramework/Tracing/IManagedTracer.cs new file mode 100644 index 00000000..6665eb5d --- /dev/null +++ b/UET/Redpoint.CloudFramework/Tracing/IManagedTracer.cs @@ -0,0 +1,7 @@ +namespace Redpoint.CloudFramework.Tracing +{ + public interface IManagedTracer + { + ISpan StartSpan(string name, string? description = null); + } +} diff --git a/UET/Redpoint.CloudFramework/Tracing/ISpan.cs b/UET/Redpoint.CloudFramework/Tracing/ISpan.cs new file mode 100644 index 00000000..6dafa847 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Tracing/ISpan.cs @@ -0,0 +1,10 @@ +namespace Redpoint.CloudFramework.Tracing +{ + using System; + + public interface ISpan : IDisposable + { + void SetTag(string key, string value); + void SetExtra(string key, object? value); + } +} diff --git a/UET/Redpoint.CloudFramework/Tracing/NullManagedTracer.cs b/UET/Redpoint.CloudFramework/Tracing/NullManagedTracer.cs new file mode 100644 index 00000000..77230ab3 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Tracing/NullManagedTracer.cs @@ -0,0 +1,10 @@ +namespace Redpoint.CloudFramework.Tracing +{ + public class NullManagedTracer : IManagedTracer + { + public ISpan StartSpan(string name, string? description) + { + return NullSpan._instance; + } + } +} diff --git a/UET/Redpoint.CloudFramework/Tracing/NullSpan.cs b/UET/Redpoint.CloudFramework/Tracing/NullSpan.cs new file mode 100644 index 00000000..ba6a2e77 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Tracing/NullSpan.cs @@ -0,0 +1,23 @@ +namespace Redpoint.CloudFramework.Tracing +{ + internal class NullSpan : ISpan + { + internal static NullSpan _instance = new NullSpan(); + + private NullSpan() + { + } + + public void Dispose() + { + } + + public void SetExtra(string key, object? value) + { + } + + public void SetTag(string key, string value) + { + } + } +} diff --git a/UET/Redpoint.CloudFramework/Tracing/SentryManagedTracer.cs b/UET/Redpoint.CloudFramework/Tracing/SentryManagedTracer.cs new file mode 100644 index 00000000..b047cd49 --- /dev/null +++ b/UET/Redpoint.CloudFramework/Tracing/SentryManagedTracer.cs @@ -0,0 +1,49 @@ +namespace Redpoint.CloudFramework.Tracing +{ + using Sentry; + + public class SentryManagedTracer : IManagedTracer + { + private readonly IHub _hub; + + public SentryManagedTracer(IHub hub) + { + _hub = hub; + } + + public ISpan StartSpan(string name, string? description) + { + var sentrySpanObject = _hub.GetSpan()?.StartChild(name, description); + if (sentrySpanObject != null) + { + return new SentrySpan(sentrySpanObject); + } + return NullSpan._instance; + } + + private class SentrySpan : ISpan + { + private readonly Sentry.ISpan _span; + + public SentrySpan(Sentry.ISpan span) + { + _span = span; + } + + public void SetTag(string key, string value) + { + _span.SetTag(key, value); + } + + public void SetExtra(string key, object? value) + { + _span.SetExtra(key, value); + } + + public void Dispose() + { + _span.Finish(); + } + } + } +} diff --git a/UET/Redpoint.CloudFramework/TypedRouting/TypedRoutingExtensions.cs b/UET/Redpoint.CloudFramework/TypedRouting/TypedRoutingExtensions.cs new file mode 100644 index 00000000..0de9719f --- /dev/null +++ b/UET/Redpoint.CloudFramework/TypedRouting/TypedRoutingExtensions.cs @@ -0,0 +1,67 @@ +namespace Redpoint.CloudFramework.TypedRouting +{ + using Microsoft.AspNetCore.Mvc; + + /// + /// Provides helper functions for routing to actions in different controllers, since nameof(MyController) + /// does not produce the correct value for the controllerName parameter. + /// + public static class TypedRoutingExtensions + { + private static string GetControllerName() + { + var controllerName = typeof(T).Name; + if (controllerName.EndsWith("Controller", StringComparison.Ordinal)) + { + controllerName = controllerName.Substring(0, controllerName.Length - "Controller".Length); + } + return controllerName; + } + + public static IActionResult RedirectToAction(this Controller currentController, string actionName) where T : Controller + { + ArgumentNullException.ThrowIfNull(currentController); + + return currentController.RedirectToAction(actionName, GetControllerName()); + } + + public static IActionResult RedirectToAction(this Controller currentController, string actionName, object routeValues) where T : Controller + { + ArgumentNullException.ThrowIfNull(currentController); + + return currentController.RedirectToAction(actionName, GetControllerName(), routeValues); + } + + public static IActionResult RedirectToAction(this Controller currentController, string actionName, object routeValues, string fragment) where T : Controller + { + ArgumentNullException.ThrowIfNull(currentController); + + return currentController.RedirectToAction(actionName, GetControllerName(), routeValues, fragment); + } + + public static string? Action(this IUrlHelper urlHelper, string actionName) where T : Controller + { + return urlHelper.Action(actionName, GetControllerName()); + } + + public static string? Action(this IUrlHelper urlHelper, string actionName, object values) where T : Controller + { + return urlHelper.Action(actionName, GetControllerName(), values); + } + + public static string? Action(this IUrlHelper urlHelper, string actionName, object values, string protocol) where T : Controller + { + return urlHelper.Action(actionName, GetControllerName(), values, protocol); + } + + public static string? Action(this IUrlHelper urlHelper, string actionName, object values, string protocol, string host) where T : Controller + { + return urlHelper.Action(actionName, GetControllerName(), values, protocol, host); + } + + public static string? Action(this IUrlHelper urlHelper, string actionName, object values, string protocol, string host, string fragment) where T : Controller + { + return urlHelper.Action(actionName, GetControllerName(), values, protocol, host, fragment); + } + } +} diff --git a/UET/UET.sln b/UET/UET.sln index bf76e3a7..2219ad21 100644 --- a/UET/UET.sln +++ b/UET/UET.sln @@ -313,6 +313,22 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Redpoint.Uba", "Redpoint.Ub EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Redpoint.PackageManagement", "Redpoint.PackageManagement\Redpoint.PackageManagement.csproj", "{2136131B-7D12-45D5-9CEF-A255FCF26A44}" EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Redpoint.CloudFramework", "Redpoint.CloudFramework", "{A93D92E5-865B-4681-AC53-4B1689F1B3E8}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Redpoint.CloudFramework", "Redpoint.CloudFramework\Redpoint.CloudFramework.csproj", "{1AAAACDA-D6D7-4C4B-8641-EB35B807AEFB}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Redpoint.CloudFramework.CLI", "Redpoint.CloudFramework.CLI\Redpoint.CloudFramework.CLI.csproj", "{0CCDC2FD-49C6-44B9-B230-0EB9222BFD49}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Redpoint.CloudFramework.Tests.Shared", "Redpoint.CloudFramework.Tests.Shared\Redpoint.CloudFramework.Tests.Shared.csproj", "{07962749-5B1F-4510-8C58-40A393C9C044}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Redpoint.CloudFramework.Tests", "Redpoint.CloudFramework.Tests\Redpoint.CloudFramework.Tests.csproj", "{696936DC-396A-449A-96ED-CF7018CEE558}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Redpoint.ThirdParty.React.Core", "Lib\Redpoint.ThirdParty.React.Core\Redpoint.ThirdParty.React.Core.csproj", "{6DAC890A-6F44-48E4-8856-CA0F4B386ABD}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Redpoint.ThirdParty.React.AspNet", "Lib\Redpoint.ThirdParty.React.AspNet\Redpoint.ThirdParty.React.AspNet.csproj", "{E8A9F0BC-A88B-4ECC-AF23-5CE16D034AD4}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Redpoint.ThirdParty.React.AspNet.Middleware", "Lib\Redpoint.ThirdParty.React.AspNet.Middleware\Redpoint.ThirdParty.React.AspNet.Middleware.csproj", "{3170055B-07AC-4BEB-ACFA-233108B09BCA}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU @@ -851,6 +867,34 @@ Global {2136131B-7D12-45D5-9CEF-A255FCF26A44}.Debug|Any CPU.Build.0 = Debug|Any CPU {2136131B-7D12-45D5-9CEF-A255FCF26A44}.Release|Any CPU.ActiveCfg = Release|Any CPU {2136131B-7D12-45D5-9CEF-A255FCF26A44}.Release|Any CPU.Build.0 = Release|Any CPU + {1AAAACDA-D6D7-4C4B-8641-EB35B807AEFB}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {1AAAACDA-D6D7-4C4B-8641-EB35B807AEFB}.Debug|Any CPU.Build.0 = Debug|Any CPU + {1AAAACDA-D6D7-4C4B-8641-EB35B807AEFB}.Release|Any CPU.ActiveCfg = Release|Any CPU + {1AAAACDA-D6D7-4C4B-8641-EB35B807AEFB}.Release|Any CPU.Build.0 = Release|Any CPU + {0CCDC2FD-49C6-44B9-B230-0EB9222BFD49}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {0CCDC2FD-49C6-44B9-B230-0EB9222BFD49}.Debug|Any CPU.Build.0 = Debug|Any CPU + {0CCDC2FD-49C6-44B9-B230-0EB9222BFD49}.Release|Any CPU.ActiveCfg = Release|Any CPU + {0CCDC2FD-49C6-44B9-B230-0EB9222BFD49}.Release|Any CPU.Build.0 = Release|Any CPU + {07962749-5B1F-4510-8C58-40A393C9C044}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {07962749-5B1F-4510-8C58-40A393C9C044}.Debug|Any CPU.Build.0 = Debug|Any CPU + {07962749-5B1F-4510-8C58-40A393C9C044}.Release|Any CPU.ActiveCfg = Release|Any CPU + {07962749-5B1F-4510-8C58-40A393C9C044}.Release|Any CPU.Build.0 = Release|Any CPU + {696936DC-396A-449A-96ED-CF7018CEE558}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {696936DC-396A-449A-96ED-CF7018CEE558}.Debug|Any CPU.Build.0 = Debug|Any CPU + {696936DC-396A-449A-96ED-CF7018CEE558}.Release|Any CPU.ActiveCfg = Release|Any CPU + {696936DC-396A-449A-96ED-CF7018CEE558}.Release|Any CPU.Build.0 = Release|Any CPU + {6DAC890A-6F44-48E4-8856-CA0F4B386ABD}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {6DAC890A-6F44-48E4-8856-CA0F4B386ABD}.Debug|Any CPU.Build.0 = Debug|Any CPU + {6DAC890A-6F44-48E4-8856-CA0F4B386ABD}.Release|Any CPU.ActiveCfg = Release|Any CPU + {6DAC890A-6F44-48E4-8856-CA0F4B386ABD}.Release|Any CPU.Build.0 = Release|Any CPU + {E8A9F0BC-A88B-4ECC-AF23-5CE16D034AD4}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {E8A9F0BC-A88B-4ECC-AF23-5CE16D034AD4}.Debug|Any CPU.Build.0 = Debug|Any CPU + {E8A9F0BC-A88B-4ECC-AF23-5CE16D034AD4}.Release|Any CPU.ActiveCfg = Release|Any CPU + {E8A9F0BC-A88B-4ECC-AF23-5CE16D034AD4}.Release|Any CPU.Build.0 = Release|Any CPU + {3170055B-07AC-4BEB-ACFA-233108B09BCA}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {3170055B-07AC-4BEB-ACFA-233108B09BCA}.Debug|Any CPU.Build.0 = Debug|Any CPU + {3170055B-07AC-4BEB-ACFA-233108B09BCA}.Release|Any CPU.ActiveCfg = Release|Any CPU + {3170055B-07AC-4BEB-ACFA-233108B09BCA}.Release|Any CPU.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE @@ -959,6 +1003,13 @@ Global {7CDD0F44-C516-47E9-99E5-3885AD24636F} = {A10A6C63-109E-4825-AA79-81B0AE279A76} {D19B5289-C0A3-4025-834D-FA8AB1C19A32} = {1AE4AFCD-0F49-4CEA-8439-F1AAA2CDD183} {2AAD5971-F06A-4B7A-9E11-538BEC82F247} = {586E33AB-CC82-4ADF-92F3-3B287A5AEEAC} + {1AAAACDA-D6D7-4C4B-8641-EB35B807AEFB} = {A93D92E5-865B-4681-AC53-4B1689F1B3E8} + {0CCDC2FD-49C6-44B9-B230-0EB9222BFD49} = {A93D92E5-865B-4681-AC53-4B1689F1B3E8} + {07962749-5B1F-4510-8C58-40A393C9C044} = {A93D92E5-865B-4681-AC53-4B1689F1B3E8} + {696936DC-396A-449A-96ED-CF7018CEE558} = {A93D92E5-865B-4681-AC53-4B1689F1B3E8} + {6DAC890A-6F44-48E4-8856-CA0F4B386ABD} = {39698A12-7C9B-47F5-BA1A-9F4884A770AF} + {E8A9F0BC-A88B-4ECC-AF23-5CE16D034AD4} = {39698A12-7C9B-47F5-BA1A-9F4884A770AF} + {3170055B-07AC-4BEB-ACFA-233108B09BCA} = {39698A12-7C9B-47F5-BA1A-9F4884A770AF} EndGlobalSection GlobalSection(ExtensibilityGlobals) = postSolution SolutionGuid = {8598A278-509A-48A6-A7B3-3E3B0D1011F1}