From 064064aa1ca552522da3d08fc36f4f7f5d10e301 Mon Sep 17 00:00:00 2001 From: Jedd Morgan <45512892+JR-Morgan@users.noreply.github.com> Date: Thu, 13 Apr 2023 23:28:27 +0100 Subject: [PATCH] reformat(core): Core reformatted (#2416) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat(formatting): Added vscode tasks for global formatting * ensured escaped backslashes in slnf * Core format * reformat(core): reformatted core * Full cleanup on Core + Tests + Integration + Examples * fix(core): Fix Credential tests by null properlly implementing Equals comparison We should not assume an object (or account) is non-null when checking equality. * feat(core): Implemented account getHashCode and removed redundant code * Revert "feat(core): Implemented account getHashCode and removed redundant code" This reverts commit 9e4c65ed4ae65878d6b2cd814ad489e552a2ea1e. * fix(core): Disable pragma warning for Equals override in Accounts.cs * Attempt at fixing our CI on mac after the reformat (#2420) * feat: get-ci-tools will now try to mirror branches * fix(ci): Build mac connectors with dotnet * fix(ci): build-with-mono instead of build-with-msbuild * fix(ci): Git ci-tools checkout fix * fix(ci): Build with mono path fix * fix(ci): SDK changes for avalonia to build rhino with dotnet targeting win * fix(ci): new dotnet build for mac * fix(ci): no build with mono param * fix(ci): run slnf * fix: do not use self-contained for connector mac build * fix(ci): Env vars not being propagated... 🤦🏼‍♂️ * fix(ci): PEBKAC * fix(ci): Install zip * fix(ci): case sensitive name of installer * fix(ci): Testing SDK build instead * fix(ci): PEBKAC round 2002 * fix(ci): Do not wait for integration tests to build connectors, just wait for deploy * fix(ci): Rename `build-core` to `build-sdk` * fix(ci): 🤦🏼‍♂️ missing the parameter * fix(ci): Name cannot be used as parameter name * fix(ci): pebkac yet again * fix(ci): this is it! * fix(ci): use net6.0 image * fix(ci): rollback to use net6 in tests --------- Co-authored-by: Alan Rynne --- .circleci/scripts/common-jobs.yml | 2 +- .circleci/scripts/config-generator.py | 10 +- .circleci/scripts/config-template.yml | 307 ++--- .circleci/scripts/connector-jobs.yml | 41 +- .config/dotnet-tools.json | 6 + .csharpierrc.yaml | 7 + .editorconfig | 8 +- .vscode/tasks.json | 100 ++ All.sln.DotSettings | 75 ++ ConnectorRhino/ConnectorRhino.slnf | 42 +- .../ConnectorRhino6/ConnectorRhino6.csproj | 42 +- .../ConnectorRhino7/ConnectorRhino7.csproj | 64 +- Core/Core/Api/Exceptions.cs | 125 +- .../Client.ActivityOperations.cs | 128 ++- .../Client.BranchOperations.cs | 327 +++--- .../Client.CommentOperations.cs | 148 +-- .../Client.CommitOperations.cs | 360 +++--- .../Client.ObjectOperations.cs | 125 +- .../Client.ServerOperations.cs | 45 +- .../Client.StreamOperations.cs | 895 ++++++++------- .../Client.UserOperations.cs | 157 +-- .../Client.Subscriptions.Branch.cs | 161 ++- .../Client.Subscriptions.Commit.cs | 167 ++- .../Client.Subscriptions.Stream.cs | 204 ++-- Core/Core/Api/GraphQL/Client.cs | 658 ++++++----- Core/Core/Api/GraphQL/Models.cs | 940 +++++++-------- .../Serializer/ConstantCaseEnumConverter.cs | 44 +- .../Api/GraphQL/Serializer/MapConverter.cs | 116 +- .../Serializer/NewtonsoftJsonSerializer.cs | 100 +- Core/Core/Api/GraphQL/SubscriptionModels.cs | 175 ++- Core/Core/Api/Helpers.cs | 489 ++++---- .../Core/Api/Operations/Operations.Receive.cs | 459 ++++---- Core/Core/Api/Operations/Operations.Send.cs | 329 +++--- .../Api/Operations/Operations.Serialize.cs | 241 ++-- Core/Core/Api/Operations/Operations.cs | 68 +- Core/Core/Credentials/Account.cs | 148 +-- Core/Core/Credentials/AccountManager.cs | 1021 ++++++++--------- Core/Core/Credentials/Exceptions.cs | 30 +- Core/Core/Credentials/Responses.cs | 76 +- Core/Core/Credentials/StreamWrapper.cs | 535 ++++----- Core/Core/Helpers/Crypt.cs | 26 +- Core/Core/Helpers/Http.cs | 378 +++--- Core/Core/Helpers/Path.cs | 297 +++-- Core/Core/Kits/Applications.cs | 266 +++-- Core/Core/Kits/Attributes.cs | 121 +- Core/Core/Kits/ISpeckleConverter.cs | 225 ++-- Core/Core/Kits/ISpeckleKit.cs | 102 +- Core/Core/Kits/KitDeclaration.cs | 93 +- Core/Core/Kits/KitManager.cs | 491 ++++---- Core/Core/Kits/Units.cs | 626 +++++----- Core/Core/Logging/Analytics.cs | 420 +++---- Core/Core/Logging/Log.cs | 160 +-- Core/Core/Logging/Setup.cs | 98 +- Core/Core/Logging/SpeckleException.cs | 71 +- Core/Core/Logging/SpeckleLog.cs | 467 ++++---- Core/Core/Models/Attributes.cs | 66 +- Core/Core/Models/Base.cs | 496 ++++---- Core/Core/Models/DynamicBase.cs | 496 ++++---- Core/Core/Models/DynamicBaseMemberType.cs | 75 +- Core/Core/Models/Extensions.cs | 183 ++- Core/Core/Models/Extras.cs | 602 +++++----- .../Models/GraphTraversal/DefaultTraversal.cs | 265 +++-- .../Models/GraphTraversal/GraphTraversal.cs | 209 ++-- .../Models/GraphTraversal/ITraversalRule.cs | 61 +- .../Core/Models/GraphTraversal/RuleBuilder.cs | 141 ++- Core/Core/Models/InvalidPropNameException.cs | 17 +- Core/Core/Models/Utilities.cs | 316 ++--- .../Serialisation/BaseObjectDeserializerV2.cs | 510 ++++---- .../Serialisation/BaseObjectSerializer.cs | 967 ++++++++-------- .../Serialisation/BaseObjectSerializerV2.cs | 671 ++++++----- .../BaseObjectSerialzerUtilities.cs | 595 +++++----- .../DeserializationWorkerThreads.cs | 153 ++- Core/Core/Serialisation/ValueConverter.cs | 367 +++--- Core/Core/Transports/ITransport.cs | 213 ++-- Core/Core/Transports/Memory.cs | 198 ++-- Core/Core/Transports/SQLite.cs | 757 ++++++------ Core/Core/Transports/Server.cs | 957 ++++++++------- .../Transports/ServerUtils/GzipContent.cs | 75 +- .../Core/Transports/ServerUtils/IServerApi.cs | 35 +- .../ServerUtils/ParallelServerAPI.cs | 543 +++++---- Core/Core/Transports/ServerUtils/ServerAPI.cs | 761 ++++++------ Core/Core/Transports/ServerV2.cs | 653 +++++------ Core/Core/Transports/Utilities.cs | 45 +- Core/Examples/Program.cs | 130 ++- Core/Examples/Subscriptions.cs | 9 +- Core/IntegrationTests/Api.cs | 856 +++++++------- Core/IntegrationTests/Fixtures.cs | 227 ++-- Core/IntegrationTests/GraphQLCLient.cs | 42 +- Core/IntegrationTests/ServerTransportTests.cs | 311 ++--- .../Subscriptions/Branches.cs | 174 ++- .../IntegrationTests/Subscriptions/Commits.cs | 254 ++-- .../IntegrationTests/Subscriptions/Streams.cs | 152 +-- Core/IntegrationTests/Usings.cs | 2 +- Core/Tests/Accounts.cs | 150 ++- Core/Tests/BaseTests.cs | 421 +++---- Core/Tests/ClosureTests.cs | 178 ++- Core/Tests/ExceptionTests.cs | 31 +- Core/Tests/Fixtures.cs | 83 +- Core/Tests/GraphQLClient.cs | 211 ++-- Core/Tests/Hashing.cs | 123 +- Core/Tests/Helpers.cs | 55 +- Core/Tests/Kits.cs | 24 +- .../ObjectTraversal/GraphTraversalTests.cs | 100 +- .../ObjectTraversal/TraversalMockObjects.cs | 14 +- .../ObjectTraversal/TraversalRuleTests.cs | 7 +- Core/Tests/Path.cs | 154 ++- Core/Tests/SendReceiveLocal.cs | 451 ++++---- Core/Tests/SerializationTests.cs | 448 ++++---- Core/Tests/SerializerNonBreakingChanges.cs | 159 +-- Core/Tests/SpeckleType.cs | 41 +- Core/Tests/TestKit.cs | 337 +++--- Core/Tests/TraversalTests.cs | 172 ++- Core/Tests/Wrapper.cs | 109 +- .../Transports/DiskTransport/DiskTransport.cs | 212 ++-- Core/Transports/MongoDBTransport/MongoDB.cs | 423 ++++--- Directory.Build.targets | 2 +- Objects/Tests/Tests.csproj | 38 +- SDK.slnf | 16 +- 118 files changed, 15058 insertions(+), 14301 deletions(-) create mode 100644 .csharpierrc.yaml create mode 100644 .vscode/tasks.json create mode 100644 All.sln.DotSettings diff --git a/.circleci/scripts/common-jobs.yml b/.circleci/scripts/common-jobs.yml index 32bacd7d06..53eac01827 100644 --- a/.circleci/scripts/common-jobs.yml +++ b/.circleci/scripts/common-jobs.yml @@ -1,4 +1,4 @@ core: - test-core: requires: - - build-core + - build-sdk diff --git a/.circleci/scripts/config-generator.py b/.circleci/scripts/config-generator.py index a63fa8ef7d..e745376b22 100644 --- a/.circleci/scripts/config-generator.py +++ b/.circleci/scripts/config-generator.py @@ -129,10 +129,8 @@ def createConfigFile(deploy: bool, outputPath: str, external_build: bool): if "requires" not in jobAttrs.keys(): jobAttrs["requires"] = [] # Require objects to build for all connectors - jobAttrs["requires"] += ["build-objects"] - if build_core: - # Require core tests too if core needs rebuilding. - jobAttrs["requires"] += ["test-core"] + jobAttrs["requires"] += ["build-sdk"] + # Add name to all jobs name = f"{slug}-build" if "name" not in jobAttrs.keys(): @@ -148,7 +146,9 @@ def createConfigFile(deploy: bool, outputPath: str, external_build: bool): # Append connector jobs to main workflow jobs main_workflow["jobs"] += connector_jobs[connector] - + if build_core: + # Require core tests too if core needs rebuilding. + jobs_before_deploy.append("test-core") # Modify jobs for deployment if deploy: deploy_job = {} diff --git a/.circleci/scripts/config-template.yml b/.circleci/scripts/config-template.yml index 5a737f0c50..d3e47010c9 100644 --- a/.circleci/scripts/config-template.yml +++ b/.circleci/scripts/config-template.yml @@ -7,21 +7,30 @@ orbs: wait-for: cobli/wait-for@0.0.2 discord: antonioned/discord@0.1.0 +# The main workflows for our monorepo pipeline. + +workflows: + build: + jobs: + - get-ci-tools: + context: github-dev-bot + - build-sdk: + requires: + - get-ci-tools + nuget: + jobs: + - build-sdk: + name: nuget-deploy-core + filters: + branches: + ignore: /.*/ + tags: + only: /^(nuget-core|nugets)\/([0-9]+)\.([0-9]+)\.([0-9]+)(?:-\w{1,10})?$/ + post-steps: + - packandpublish + context: nuget + commands: - packandpublish: - parameters: - projectfilepath: - type: string - steps: - - run: - name: Publish nuget package - command: | - $tag = if([string]::IsNullOrEmpty($env:CIRCLE_TAG)) { "2.0.999.$($env:WORKFLOW_NUM)" } else { $env:CIRCLE_TAG } - $version = $tag.Split("/")[1] - msbuild <> /p:Version="$version" /p:Configuration=Release /p:WarningLevel=0 /p:IsDesktopBuild=false -t:pack - dotnet nuget push "**/*.nupkg" -s https://api.nuget.org/v3/index.json -k $env:NUGET_APIKEY -n --skip-duplicate - environment: - WORKFLOW_NUM: << pipeline.number >> cached-checkout: steps: - restore_cache: @@ -34,35 +43,59 @@ commands: key: *source-cache paths: - ".git" - packandpublish-bash: - parameters: - projectfilepath: - type: string + + packandpublish: steps: - run: - name: Publish nuget package + name: Build nuget packages command: | TAG=$(if [ "${CIRCLE_TAG}" ]; then echo $CIRCLE_TAG; else echo "2.0.999.$WORKFLOW_NUM"; fi;) SEMVER=$(echo "$TAG" | sed -e 's/\/[a-zA-Z-]+//') VERSION=$(echo "$SEMVER" | sed -e 's/[a-zA-Z]*\///') - msbuild <> /p:Version="$VERSION" /p:Configuration=Release /p:WarningLevel=0 /p:IsDesktopBuild=false -t:pack - ~/.dotnet/dotnet nuget push "**/*.nupkg" -s https://api.nuget.org/v3/index.json -k $NUGET_APIKEY -n --skip-duplicate + dotnet pack All.sln -p:Version=$VERSION -c Release -p:WarningLevel=0 -p:IsDesktopBuild=false environment: WORKFLOW_NUM: << pipeline.number >> + - run: + name: Push nuget packages + command: dotnet nuget push "**/*.nupkg" -s https://api.nuget.org/v3/index.json -k $NUGET_APIKEY -n --skip-duplicate + + run-tests: + parameters: + project: + type: string + title: + type: string + steps: + - run: + name: << parameters.title >> + command: dotnet test << parameters.project >> + -c Release + -v q + --logger:"junit;LogFileName={assembly}.results.xml" + --results-directory=TestResults + --collect:"XPlat Code Coverage" -- DataCollectionRunSettings.DataCollectors.DataCollector.Configuration.Format=opencover + jobs: # Each project will have individual jobs for each specific task it has to execute (build, release...) - build-core: + build-sdk: docker: - image: mcr.microsoft.com/dotnet/sdk:6.0 steps: - cached-checkout - run: - name: Build Core - command: dotnet build Core/Core.sln -c Release -v q + name: Build SDK Projects + command: dotnet build SDK.slnf -c Release -v m -p:WarningLevel=0 -p:IsDesktopBuild=false + - run-tests: + title: Core Unit Tests + project: Core/Tests/TestsUnit.csproj + - run-tests: + title: Objects Unit Tests + project: Objects/Tests/Tests.csproj + - store_test_results: + path: TestResults test-core: docker: - image: "mcr.microsoft.com/dotnet/sdk:6.0" # dotnet - # - image: "mcr.microsoft.com/dotnet/core/sdk:2.1-focal" # dotnet core 2.1 sdk (for netstandard support on build) # Node, redis, postgres and speckle-server images for test server - image: "cimg/node:16.15" - image: "cimg/redis:7.0.5" @@ -92,48 +125,11 @@ jobs: # Each project will have individual jobs for each specific task it has to WAIT_HOSTS: 127.0.0.1:5432, 127.0.0.1:6379, 127.0.0.1:9000 steps: - cached-checkout - - run: - name: Unit Test - command: dotnet test Core/Tests/TestsUnit.csproj -c Release -v q --logger:"junit;LogFileName={assembly}.results.xml" --results-directory=TestResults --collect:"XPlat Code Coverage" -- DataCollectionRunSettings.DataCollectors.DataCollector.Configuration.Format=opencover - - run: - name: Integration Tests # The integration tests are runinng on our test sevrer - command: dotnet test Core/IntegrationTests/TestsIntegration.csproj -c Release -v q --logger:"junit;LogFileName={assembly}.results.xml" --results-directory=TestResults --collect:"XPlat Code Coverage" -- DataCollectionRunSettings.DataCollectors.DataCollector.Configuration.Format=opencover - - store_artifacts: - path: TestResults - - #- codecov/upload - - build-objects: - docker: - - image: mcr.microsoft.com/dotnet/sdk:6.0 - steps: - - cached-checkout - - run: - name: Restore Objects - command: | # Restore only the projects we're about to build. - dotnet restore Objects/Objects/Objects.csproj - dotnet restore Objects/Tests/Tests.csproj - - run: - name: Build Objects - command: dotnet build --no-restore Objects/Objects/Objects.csproj -c Release /p:WarningLevel=0 /p:IsDesktopBuild=false - - run: - name: Test Objects - command: dotnet test Objects/Tests/Tests.csproj --no-restore -c - Release -v q --logger:"junit;LogFileName={assembly}.results.xml" --results-directory=TestResults - --collect:"XPlat Code Coverage" -- DataCollectionRunSettings.DataCollectors.DataCollector.Configuration.Format=opencover + - run-tests: + title: Core Integration Tests + project: Core/IntegrationTests/TestsIntegration.csproj - store_test_results: path: TestResults - - store_artifacts: - path: TestResults - - build-desktopui: - docker: - - image: mcr.microsoft.com/dotnet/sdk:6.0 - steps: - - cached-checkout - - run: - name: Build DesktopUI2 - command: dotnet build DesktopUI2/DesktopUI2/DesktopUI2.csproj -c Release -v q build-connector: # Reusable job for basic connectors executor: @@ -173,7 +169,7 @@ jobs: # Each project will have individual jobs for each specific task it has to $semver = if($tag.Contains('/')) {$tag.Split("/")[0] } else { $tag } $ver = if($semver.Contains('-')) {$semver.Split("-")[0] } else { $semver } $version = "$($ver).$($env:WORKFLOW_NUM)" - msbuild << parameters.slnname >>/<< parameters.slnname >>.sln /p:Configuration=Release /p:WarningLevel=0 /p:IsDesktopBuild=false /p:AssemblyVersionNumber=$version /p:AssemblyInformationalVersion=$semver /p:Version=$semver + msbuild << parameters.slnname >>/<< parameters.slnname >>.sln /p:Configuration=Release /p:WarningLevel=0 /p:IsDesktopBuild=false /p:Version=$semver environment: WORKFLOW_NUM: << pipeline.number >> - unless: @@ -186,7 +182,7 @@ jobs: # Each project will have individual jobs for each specific task it has to $semver = if($tag.Contains('/')) {$tag.Split("/")[0] } else { $tag } $ver = if($semver.Contains('-')) {$semver.Split("-")[0] } else { $semver } $version = "$($ver).$($env:WORKFLOW_NUM)" - dotnet publish << parameters.slnname >>/<< parameters.slnname >>/<< parameters.projname >>.csproj -c Release -v q -r win-x64 --self-contained /p:WarningLevel=0 /p:AssemblyVersionNumber=$version /p:AssemblyInformationalVersion=$semver /p:Version=$semver + dotnet publish << parameters.slnname >>/<< parameters.slnname >>/<< parameters.projname >>.csproj -c Release -v q -r win-x64 --self-contained /p:WarningLevel=0 /p:Version=$semver environment: WORKFLOW_NUM: << pipeline.number >> - run: @@ -204,15 +200,6 @@ jobs: # Each project will have individual jobs for each specific task it has to shell: cmd.exe #does not work in powershell environment: SLUG: << parameters.slug >> - # - when: - # condition: - # equal: [teklastructures, << parameters.slug >>] # Tekla Structures has it's own "plugin packager", so we do this instead of the ISS step - # steps: - # - run: - # name: Build TSEP - # command: speckle-sharp-ci-tools/TSEPBuilder/TeklaExtensionPackage.BatchBuilder.exe -I "$($env:CIRCLE_WORKING_DIRECTORY)\ConnectorTeklaStructures\ConnectorTeklaStructures2021\ManifestGenerator.xml" -O "$($env:CIRCLE_WORKING_DIRECTORY)\speckle-sharp-ci-tools\Installers\<>\<>.tsep" - # - store_artifacts: - # path: speckle-sharp-ci-tools/Installers/<>/ - when: condition: << pipeline.git.tag >> steps: @@ -223,7 +210,7 @@ jobs: # Each project will have individual jobs for each specific task it has to deploy-connector-new: docker: - - image: mcr.microsoft.com/dotnet/sdk:6.0 + - image: mcr.microsoft.com/dotnet/6.0 parameters: slug: type: string @@ -343,20 +330,9 @@ jobs: # Each project will have individual jobs for each specific task it has to command: | TAG=$(if [ "${CIRCLE_TAG}" ]; then echo $CIRCLE_TAG; else echo "2.0.999.$WORKFLOW_NUM"; fi;) SEMVER=$(echo "$TAG" | sed -e 's/\/[a-zA-Z-]*//') - $HOME/.dotnet/dotnet publish << parameters.slnname >>/<< parameters.slnname >>/<< parameters.projname >>.csproj -c Release -v q -r osx-x64 --self-contained /p:WarningLevel=0 /p:Version=$SEMVER + $HOME/.dotnet/dotnet publish << parameters.slnname >>/<< parameters.projname >>/<< parameters.projname >>.csproj -c Release -v q -r osx-x64 --self-contained /p:WarningLevel=0 /p:Version=$SEMVER environment: WORKFLOW_NUM: << pipeline.number >> - # $HOME/.dotnet/dotnet msbuild << parameters.slnname >>/<< parameters.slnname >>/<< parameters.projname >>.csproj -t:BundleApp -p:Configuration=Release -p:RuntimeIdentifier=osx-x64 -p:SelfContained=true /p:WarningLevel=0 -p:Version=$TAG - # $HOME/.dotnet/dotnet publish << parameters.slnname >>/<< parameters.slnname >>/<< parameters.projname >>.csproj -c Release -v q -r osx-arm64 --self-contained /p:WarningLevel=0 /p:Version=$TAG - # $HOME/.dotnet/dotnet msbuild << parameters.slnname >>/<< parameters.slnname >>/<< parameters.projname >>.csproj -t:BundleApp -p:Configuration=Release -p:RuntimeIdentifier=osx-arm64 -p:SelfContained=true /p:WarningLevel=0 -p:Version=$TAG - # see: https://www.kenmuse.com/blog/notarizing-dotnet-console-apps-for-macos/ - # merges the binaries inside the arm64 .app and then moves it up to the top level folder - # need to update dotnet to avoid an issue while lipo-ing the universal binaries - #- run: - # name: Lipo Merge Binaries - # command: | - # lipo -create -output "./<< parameters.slnname >>/<< parameters.slnname >>/bin/Release/net6.0/osx-arm64/publish/<< parameters.bundlename >>/Contents/MacOS/<< parameters.projname >>" "./<< parameters.slnname >>/<< parameters.slnname >>/bin/Release/net6.0/osx-arm64/publish/<< parameters.bundlename >>/Contents/MacOS/<< parameters.projname >>" "./<< parameters.slnname >>/<< parameters.slnname >>/bin/Release/net6.0/osx-x64/publish/<< parameters.bundlename >>/Contents/MacOS/<< parameters.projname >>" - # Compress build files - run: name: Zip Connector files command: | @@ -398,6 +374,95 @@ jobs: # Each project will have individual jobs for each specific task it has to paths: - speckle-sharp-ci-tools/Installers + build-connector-dotnet-mac: + docker: + - image: mcr.microsoft.com/dotnet/sdk:7.0 + parameters: + slnname: + type: string + projname: + type: string + default: "" + slug: + type: string + default: "" + converter-files: + type: string + default: "" + installername: + type: string + default: "" + build-config: + type: string + default: Release + steps: + - cached-checkout + - attach_workspace: + at: ./ + - run: + name: Set environment variables + command: | + mkdir -p speckle-sharp-ci-tools/Installers/<< parameters.slug >> + environment: + WORKFLOW_NUM: << pipeline.number >> + - run: + name: Build + command: | + TAG=$(if [ "${CIRCLE_TAG}" ]; then echo $CIRCLE_TAG; else echo "2.0.999.$WORKFLOW_NUM"; fi;) + SEMVER=$(echo "$TAG" | sed -e 's/\/[a-zA-Z-]*//') + dotnet build << parameters.slnname >>/<< parameters.slnname >>.slnf -c Release -v q -p:WarningLevel=0 -p:Version=$SEMVER -p:IsDesktopBuild=false + environment: + WORKFLOW_NUM: << pipeline.number >> + - run: + name: Install Zip + command: | + apt-get update -y + apt-get install zip -y + - run: + name: Zip Objects Kit files + command: | + zip -j Objects.zip << parameters.converter-files >> + - run: + name: Zip Connector files + command: | + cd << parameters.slnname >>/<< parameters.projname >>/bin/ + zip -r <>.zip ./ + - run: + name: Copy files to installer + command: | + mkdir -p speckle-sharp-ci-tools/Mac/<>/.installationFiles/ + cp Objects.zip speckle-sharp-ci-tools/Mac/<>/.installationFiles + cp << parameters.slnname >>/<< parameters.projname >>/bin/<>.zip speckle-sharp-ci-tools/Mac/<>/.installationFiles + # Create installer + - run: + name: Exit if External PR + command: if [ "$CIRCLE_PR_REPONAME" ]; then circleci-agent step halt; fi + - run: + name: Build Mac installer + command: dotnet publish speckle-sharp-ci-tools/Mac/<>/<>.sln -r osx-x64 -c Release + - run: + name: Zip installer + command: | + cd speckle-sharp-ci-tools/Mac/<>/bin/Release/net6.0/osx-x64/publish/ + zip -r <>.zip ./ + - store_artifacts: + path: speckle-sharp-ci-tools/Mac/<>/bin/Release/net6.0/osx-x64/publish/<>.zip + - run: + name: Copy to installer location + command: | + TAG=$(if [ "${CIRCLE_TAG}" ]; then echo $CIRCLE_TAG; else echo "2.0.999.$WORKFLOW_NUM"; fi;) + SEMVER=$(echo "$TAG" | sed -e 's/\/[a-zA-Z-]*//') + cp speckle-sharp-ci-tools/Mac/<>/bin/Release/net6.0/osx-x64/publish/<>.zip speckle-sharp-ci-tools/Installers/<< parameters.slug >>/<>-$SEMVER.zip + environment: + WORKFLOW_NUM: << pipeline.number >> + - when: + condition: << pipeline.git.tag >> + steps: + - persist_to_workspace: + root: ./ + paths: + - speckle-sharp-ci-tools/Installers + get-ci-tools: # Clones our ci tools and persists them to the workspace docker: - image: cimg/base:2021.01 @@ -410,9 +475,14 @@ jobs: # Each project will have individual jobs for each specific task it has to command: | mkdir ~/.ssh ssh-keyscan github.com >> ~/.ssh/known_hosts - - run: # Could not get ssh to work, so using a personal token + - run: name: Clone command: git clone git@github.com:specklesystems/speckle-sharp-ci-tools.git speckle-sharp-ci-tools + - run: + name: Checkout branch + command: | + cd speckle-sharp-ci-tools + git checkout ${CIRCLE_BRANCH} || git checkout main - persist_to_workspace: root: ./ paths: @@ -481,7 +551,7 @@ jobs: # Each project will have individual jobs for each specific task it has to - run: name: Install cmake command: | - brew install cmake + HOMEBREW_NO_AUTO_UPDATE=1 HOMEBREW_NO_INSTALL_CLEANUP=1 brew install cmake - run: name: Get Archicad devkit command: | @@ -524,6 +594,7 @@ jobs: # Each project will have individual jobs for each specific task it has to - run: name: Proceed to deploy command: echo "This step is just here to wait for all build jobs before proceeding to deploy each of them individually. If a job fails, no connector will be deployed." + notify-deploy: docker: - image: cimg/base:2021.01 @@ -534,57 +605,3 @@ jobs: # Each project will have individual jobs for each specific task it has to ":tada: a new version of Speckle-Sharp connectors was build successfully!" failure_message: ":red_circle: oh no! Speckle-Sharp connectors build has failed!" -# The main workflows for our monorepo pipeline. -# There should be at least one workflow per project in the monorepo. Each workflow should be run only when a boolean parameter is passed that corresponds to the pattern 'run_{PROJECT_NAME}'. -# These parameters are set by the 'selective-ci' job. -workflows: - build: - jobs: - - get-ci-tools: - context: github-dev-bot - - build-core: - requires: - - get-ci-tools - - build-objects: - requires: - - build-core - - build-desktopui: - requires: - - build-core - nuget: - jobs: - # CORE Nuget Publish - - build-core: - name: nuget-deploy-core - filters: - branches: - ignore: /.*/ # For testing only: /ci\/.*/ - tags: - only: /^(nuget-core|nugets)\/([0-9]+)\.([0-9]+)\.([0-9]+)(?:-\w{1,10})?$/ - post-steps: - - packandpublish: - projectfilepath: Core/Core.sln - context: nuget - - build-objects: - name: nuget-deploy-objects - filters: - branches: - ignore: /.*/ # For testing only: /ci\/.*/ - tags: - only: /^(nuget-objects|nugets)\/([0-9]+)\.([0-9]+)\.([0-9]+)(?:-\w{1,10})?$/ - post-steps: - - packandpublish-bash: - projectfilepath: Objects/Objects.sln - context: nuget - - build-desktopui: - name: nuget-deploy-desktopui - filters: - branches: - ignore: /.*/ # For testing only: /ci\/.*/ - tags: - only: /^(nuget-dui|nugets)\/([0-9]+)\.([0-9]+)\.([0-9]+)(?:-\w{1,10})?$/ - post-steps: - - packandpublish: - projectfilepath: DesktopUI2/DesktopUI2/DesktopUI2.csproj - context: nuget -# VS Code Extension Version: 1.4.0 diff --git a/.circleci/scripts/connector-jobs.yml b/.circleci/scripts/connector-jobs.yml index 539884fd4c..69c0062f9c 100644 --- a/.circleci/scripts/connector-jobs.yml +++ b/.circleci/scripts/connector-jobs.yml @@ -8,25 +8,19 @@ rhino: slnname: ConnectorRhino dllname: SpeckleConnectorRhino.rhp slug: rhino - requires: - - build-desktopui context: innosetup - build-connector: slnname: ConnectorRhino dllname: SpeckleConnectorRhino.rhp slug: grasshopper - requires: - - build-desktopui context: innosetup - - build-connector-mac: + - build-connector-dotnet-mac: name: rhino-build-mac slnname: ConnectorRhino projname: ConnectorRhino7 installername: SpeckleRhinoInstall build-config: Release Mac slug: rhino - requires: - - build-desktopui converter-files: " Objects/Converters/ConverterRhinoGh/ConverterRhino7/bin/Release/net48/Objects.dll Objects/Converters/ConverterRhinoGh/ConverterRhino7/bin/Release/net48/Objects.Converter.Rhino7.dll @@ -34,16 +28,13 @@ rhino: Objects/Converters/ConverterRhinoGh/ConverterGrasshopper7/bin/Release/net48/Objects.Converter.Grasshopper7.dll Objects/Converters/ConverterRhinoGh/ConverterGrasshopper6/bin/Release/netstandard2.0/Objects.Converter.Grasshopper6.dll " - - build-connector-mac: + - build-connector-dotnet-mac: name: grasshopper-build-mac slnname: ConnectorRhino projname: ConnectorRhino7 build-config: Release Mac - installername: SpeckleGhInstall - installer: true + installername: SpeckleGHInstall slug: grasshopper - requires: - - build-desktopui converter-files: " Objects/Converters/ConverterRhinoGh/ConverterRhino7/bin/Release/net48/Objects.dll Objects/Converters/ConverterRhinoGh/ConverterRhino7/bin/Release/net48/Objects.Converter.Rhino7.dll @@ -63,8 +54,6 @@ revit: slnname: ConnectorRevit dllname: SpeckleConnectorRevit.dll slug: revit - requires: - - build-desktopui context: innosetup autocadcivil: @@ -72,15 +61,11 @@ autocadcivil: slnname: ConnectorAutocadCivil dllname: SpeckleConnectorAutocad.dll slug: autocad - requires: - - build-desktopui context: innosetup - build-connector: slnname: ConnectorAutocadCivil dllname: SpeckleConnectorAutocad.dll slug: civil3d - requires: - - build-desktopui context: innosetup bentley: @@ -88,22 +73,16 @@ bentley: slnname: ConnectorBentley dllname: SpeckleConnectorMicroStation.dll slug: microstation - requires: - - build-desktopui context: innosetup - build-connector: slnname: ConnectorBentley dllname: SpeckleConnectorOpenBuildings.dll slug: openbuildings - requires: - - build-desktopui context: innosetup - build-connector: slnname: ConnectorBentley dllname: SpeckleConnectorOpenRail.dll slug: openrail - requires: - - build-desktopui context: innosetup - build-connector: slnname: ConnectorBentley @@ -116,37 +95,27 @@ teklastructures: slnname: ConnectorTeklaStructures dllname: SpeckleConnectorTeklaStructures.dll slug: teklastructures - requires: - - build-desktopui context: innosetup csi: - build-connector: slnname: ConnectorCSI dllname: SpeckleConnectorCSI.dll slug: etabs - requires: - - build-desktopui context: innosetup - build-connector: slnname: ConnectorCSI dllname: SpeckleConnectorCSI.dll slug: sap2000 - requires: - - build-desktopui context: innosetup - build-connector: slnname: ConnectorCSI dllname: SpeckleConnectorCSI.dll slug: safe - requires: - - build-desktopui context: innosetup - build-connector: slnname: ConnectorCSI dllname: SpeckleConnectorCSI.dll slug: csibridge - requires: - - build-desktopui context: innosetup archicad: @@ -166,7 +135,6 @@ archicad: requires: - build-archicad-add-on-25 - build-archicad-add-on-26 - - build-desktopui slnname: ConnectorArchicad projname: ConnectorArchicad dllname: ConnectorArchicad.dll @@ -192,7 +160,6 @@ archicad: requires: - build-archicad-add-on-25-mac - build-archicad-add-on-26-mac - - build-desktopui slnname: ConnectorArchicad projname: ConnectorArchicad slug: archicad @@ -204,6 +171,4 @@ navisworks: slnname: ConnectorNavisworks dllname: SpeckleConnectorNavisworks.dll slug: navisworks - requires: - - build-desktopui context: innosetup diff --git a/.config/dotnet-tools.json b/.config/dotnet-tools.json index 115c21ff82..b4895f64a4 100644 --- a/.config/dotnet-tools.json +++ b/.config/dotnet-tools.json @@ -13,6 +13,12 @@ "commands": [ "jb" ] + }, + "csharpier": { + "version": "0.23.0", + "commands": [ + "dotnet-csharpier" + ] } } } \ No newline at end of file diff --git a/.csharpierrc.yaml b/.csharpierrc.yaml new file mode 100644 index 0000000000..152a7153be --- /dev/null +++ b/.csharpierrc.yaml @@ -0,0 +1,7 @@ +printWidth: 120 +useTabs: false +tabWidth: 2 +preprocessorSymbolSets: + - "" + - "DEBUG" + - "DEBUG,CODE_STYLE" diff --git a/.editorconfig b/.editorconfig index 87810c5e51..0f961608c2 100644 --- a/.editorconfig +++ b/.editorconfig @@ -29,9 +29,10 @@ resharper_csharp_align_multiline_parameter = false resharper_csharp_int_align_comments = false resharper_csharp_outdent_commas = true resharper_csharp_outdent_dots = false -resharper_csharp_wrap_after_declaration_lpar = false +resharper_csharp_wrap_after_declaration_lpar = true resharper_csharp_wrap_after_invocation_lpar = true resharper_csharp_wrap_arguments_style = chop_if_long +resharper_csharp_wrap_before_declaration_rpar = true resharper_csharp_wrap_before_invocation_rpar = false resharper_csharp_wrap_extends_list_style = chop_if_long resharper_csharp_wrap_parameters_style = chop_if_long @@ -49,7 +50,7 @@ resharper_keep_existing_declaration_block_arrangement = true resharper_keep_existing_declaration_parens_arrangement = false resharper_keep_existing_embedded_block_arrangement = true resharper_keep_existing_enum_arrangement = true -resharper_keep_existing_expr_member_arrangement = false +resharper_keep_existing_expr_member_arrangement = true resharper_keep_existing_initializer_arrangement = false resharper_local_function_body = expression_body resharper_max_attribute_length_for_same_line = 20 @@ -59,7 +60,7 @@ resharper_max_invocation_arguments_on_line = 5 resharper_outdent_binary_ops = true resharper_outdent_dots = false resharper_place_constructor_initializer_on_same_line = false -resharper_place_simple_initializer_on_single_line = false +resharper_place_simple_initializer_on_single_line = true resharper_prefer_explicit_discard_declaration = false resharper_wrap_after_declaration_lpar = false resharper_wrap_before_invocation_rpar = false @@ -181,6 +182,7 @@ csharp_preserve_single_line_statements = true csharp_preserve_single_line_blocks = true # Analyzer settings +dotnet_diagnostic.ide0055.severity = none # Maintainability rules dotnet_diagnostic.ca1501.severity = warning diff --git a/.vscode/tasks.json b/.vscode/tasks.json new file mode 100644 index 0000000000..00d21cf94f --- /dev/null +++ b/.vscode/tasks.json @@ -0,0 +1,100 @@ +{ + // See https://go.microsoft.com/fwlink/?LinkId=733558 + // for the documentation about the tasks.json format + "version": "2.0.0", + "inputs": [ + { + "type": "promptString", + "id": "projectPath", + "description": "The path to the csproj, sln or slnf you want formatting", + "default": "./All.sln" + } + ], + "tasks": [ + { + "label": "speckle.format.csharpier", + "command": "dotnet", + "type": "shell", + "args": ["csharpier", "${input:projectPath}/.."], + "group": "build", + "presentation": { + "reveal": "silent" + }, + "problemMatcher": "$msCompile", + "hide": true + }, + { + "label": "speckle.format.style", + "command": "dotnet", + "type": "shell", + "args": ["format", "style", "${input:projectPath}"], + "group": "build", + "presentation": { + "reveal": "silent" + }, + "problemMatcher": "$msCompile", + "hide": true + }, + { + "label": "speckle.format.whitespace", + "command": "dotnet", + "type": "shell", + "args": ["format", "whitespace", "${input:projectPath}"], + "group": "build", + "presentation": { + "reveal": "silent" + }, + "problemMatcher": "$msCompile", + "hide": true + }, + { + "label": "speckle.format.analyzers", + "command": "dotnet", + "type": "shell", + "args": ["format", "analyzers", "${input:projectPath}"], + "group": "build", + "presentation": { + "reveal": "silent" + }, + "problemMatcher": "$msCompile", + "hide": true + }, + { + "label": "speckle.format.resharper", + "command": "dotnet", + "type": "shell", + "args": [ + "tool", + "run", + "jb", + "cleanupcode", + "${input:projectPath}", + "--profile=\"Speckle:\\sCustom\\sCleanup\"", + "--settings=\"All.sln.DotSettings\"", + ], + "group": "build", + "problemMatcher": "$msCompile", + "hide": false + }, + { + "label": "Speckle: Format Code", + "dependsOn": [ + "speckle.format.whitespace", + "speckle.format.style", + "speckle.format.csharpier", + ], + "dependsOrder": "sequence", + "problemMatcher": [] + }, + { + "label": "Speckle: Cleanup Code", + "dependsOn": [ + "Speckle: Format Code", + "speckle.format.analyzers", + "speckle.format.resharper", + ], + "dependsOrder": "sequence", + "problemMatcher": [] + } + ] +} diff --git a/All.sln.DotSettings b/All.sln.DotSettings new file mode 100644 index 0000000000..bcd3a0b513 --- /dev/null +++ b/All.sln.DotSettings @@ -0,0 +1,75 @@ + + + <?xml version="1.0" encoding="utf-16"?><Profile name="Speckle: Custom Cleanup"><AspOptimizeRegisterDirectives>True</AspOptimizeRegisterDirectives><CppCodeStyleCleanupDescriptor ArrangeBraces="True" ArrangeAuto="True" ArrangeFunctionDeclarations="True" ArrangeNestedNamespaces="True" ArrangeTypeAliases="True" ArrangeCVQualifiers="True" ArrangeSlashesInIncludeDirectives="True" ArrangeOverridingFunctions="True" SortIncludeDirectives="True" SortMemberInitializers="True" /><CSCodeStyleAttributes ArrangeTypeAccessModifier="True" ArrangeTypeMemberAccessModifier="True" SortModifiers="True" ArrangeArgumentsStyle="True" RemoveRedundantParentheses="True" AddMissingParentheses="True" ArrangeBraces="True" ArrangeAttributes="True" ArrangeCodeBodyStyle="True" ArrangeTrailingCommas="True" ArrangeObjectCreation="True" ArrangeDefaultValue="True" ArrangeNamespaces="True" /><Xaml.RemoveRedundantNamespaceAlias>True</Xaml.RemoveRedundantNamespaceAlias><CSArrangeQualifiers>True</CSArrangeQualifiers><CSFixBuiltinTypeReferences>True</CSFixBuiltinTypeReferences><VBOptimizeImports>True</VBOptimizeImports><VBShortenReferences>True</VBShortenReferences><CSOptimizeUsings><OptimizeUsings>True</OptimizeUsings></CSOptimizeUsings><CSShortenReferences>True</CSShortenReferences><FormatAttributeQuoteDescriptor>True</FormatAttributeQuoteDescriptor><IDEA_SETTINGS>&lt;profile version="1.0"&gt; + &lt;option name="myName" value="Speckle: Custom Cleanup" /&gt; + &lt;inspection_tool class="ES6ShorthandObjectProperty" enabled="false" level="WARNING" enabled_by_default="false" /&gt; + &lt;inspection_tool class="JSArrowFunctionBracesCanBeRemoved" enabled="false" level="WARNING" enabled_by_default="false" /&gt; + &lt;inspection_tool class="JSPrimitiveTypeWrapperUsage" enabled="false" level="WARNING" enabled_by_default="false" /&gt; + &lt;inspection_tool class="JSRemoveUnnecessaryParentheses" enabled="false" level="WARNING" enabled_by_default="false" /&gt; + &lt;inspection_tool class="JSUnnecessarySemicolon" enabled="false" level="WARNING" enabled_by_default="false" /&gt; + &lt;inspection_tool class="TypeScriptExplicitMemberType" enabled="false" level="WARNING" enabled_by_default="false" /&gt; + &lt;inspection_tool class="UnnecessaryContinueJS" enabled="false" level="WARNING" enabled_by_default="false" /&gt; + &lt;inspection_tool class="UnnecessaryLabelJS" enabled="false" level="WARNING" enabled_by_default="false" /&gt; + &lt;inspection_tool class="UnnecessaryLabelOnBreakStatementJS" enabled="false" level="WARNING" enabled_by_default="false" /&gt; + &lt;inspection_tool class="UnnecessaryLabelOnContinueStatementJS" enabled="false" level="WARNING" enabled_by_default="false" /&gt; + &lt;inspection_tool class="UnnecessaryReturnJS" enabled="false" level="WARNING" enabled_by_default="false" /&gt; + &lt;inspection_tool class="WrongPropertyKeyValueDelimiter" enabled="false" level="WEAK WARNING" enabled_by_default="false" /&gt; +&lt;/profile&gt;</IDEA_SETTINGS><RIDER_SETTINGS>&lt;profile&gt; + &lt;Language id="CSS"&gt; + &lt;Reformat&gt;false&lt;/Reformat&gt; + &lt;Rearrange&gt;true&lt;/Rearrange&gt; + &lt;/Language&gt; + &lt;Language id="EditorConfig"&gt; + &lt;Reformat&gt;false&lt;/Reformat&gt; + &lt;/Language&gt; + &lt;Language id="HTML"&gt; + &lt;Reformat&gt;false&lt;/Reformat&gt; + &lt;OptimizeImports&gt;true&lt;/OptimizeImports&gt; + &lt;Rearrange&gt;true&lt;/Rearrange&gt; + &lt;/Language&gt; + &lt;Language id="HTTP Request"&gt; + &lt;Reformat&gt;false&lt;/Reformat&gt; + &lt;/Language&gt; + &lt;Language id="Handlebars"&gt; + &lt;Reformat&gt;false&lt;/Reformat&gt; + &lt;/Language&gt; + &lt;Language id="Ini"&gt; + &lt;Reformat&gt;false&lt;/Reformat&gt; + &lt;/Language&gt; + &lt;Language id="JSON"&gt; + &lt;Reformat&gt;false&lt;/Reformat&gt; + &lt;/Language&gt; + &lt;Language id="Jade"&gt; + &lt;Reformat&gt;false&lt;/Reformat&gt; + &lt;/Language&gt; + &lt;Language id="JavaScript"&gt; + &lt;Reformat&gt;false&lt;/Reformat&gt; + &lt;OptimizeImports&gt;true&lt;/OptimizeImports&gt; + &lt;Rearrange&gt;true&lt;/Rearrange&gt; + &lt;/Language&gt; + &lt;Language id="Markdown"&gt; + &lt;Reformat&gt;false&lt;/Reformat&gt; + &lt;/Language&gt; + &lt;Language id="Properties"&gt; + &lt;Reformat&gt;false&lt;/Reformat&gt; + &lt;/Language&gt; + &lt;Language id="RELAX-NG"&gt; + &lt;Reformat&gt;false&lt;/Reformat&gt; + &lt;/Language&gt; + &lt;Language id="SQL"&gt; + &lt;Reformat&gt;false&lt;/Reformat&gt; + &lt;/Language&gt; + &lt;Language id="VueExpr"&gt; + &lt;Reformat&gt;false&lt;/Reformat&gt; + &lt;/Language&gt; + &lt;Language id="XML"&gt; + &lt;Reformat&gt;false&lt;/Reformat&gt; + &lt;OptimizeImports&gt;true&lt;/OptimizeImports&gt; + &lt;Rearrange&gt;true&lt;/Rearrange&gt; + &lt;/Language&gt; + &lt;Language id="yaml"&gt; + &lt;Reformat&gt;false&lt;/Reformat&gt; + &lt;/Language&gt; +&lt;/profile&gt;</RIDER_SETTINGS><CSReorderTypeMembers>True</CSReorderTypeMembers><RemoveCodeRedundancies>True</RemoveCodeRedundancies></Profile> + Speckle: Custom Cleanup + ExternalToolData|CSharpier|csharpier||csharpier|$FILE$ \ No newline at end of file diff --git a/ConnectorRhino/ConnectorRhino.slnf b/ConnectorRhino/ConnectorRhino.slnf index 31e5dc8d70..1f170bec50 100644 --- a/ConnectorRhino/ConnectorRhino.slnf +++ b/ConnectorRhino/ConnectorRhino.slnf @@ -1,27 +1,27 @@ { "solution": { - "path": "../All.sln", + "path": "..\\All.sln", "projects": [ - "ConnectorGrasshopper/ConnectorGrasshopper6/ConnectorGrasshopper6.csproj", - "ConnectorGrasshopper/ConnectorGrasshopper7/ConnectorGrasshopper7.csproj", - "ConnectorGrasshopper/ConnectorGrasshopperShared/ConnectorGrasshopperShared.shproj", - "ConnectorGrasshopper/ConnectorGrasshopperUtils/ConnectorGrasshopperUtils.csproj", - "ConnectorRhino/ConnectorRhino/ConnectorRhinoShared/ConnectorRhinoShared.shproj", - "ConnectorRhino/ConnectorRhino6/ConnectorRhino6.csproj", - "ConnectorRhino/ConnectorRhino7/ConnectorRhino7.csproj", - "Core/Core/Core.csproj", - "Core/Tests/TestsUnit.csproj", - "Core/Transports/DiskTransport/DiskTransport.csproj", - "Core/Transports/MongoDBTransport/MongoDBTransport.csproj", - "DesktopUI2/AvaloniaHwndHost/AvaloniaHwndHost.csproj", - "DesktopUI2/DesktopUI2/DesktopUI2.csproj", - "Objects/Converters/ConverterRhinoGh/ConverterGrasshopper6/ConverterGrasshopper6.csproj", - "Objects/Converters/ConverterRhinoGh/ConverterGrasshopper7/ConverterGrasshopper7.csproj", - "Objects/Converters/ConverterRhinoGh/ConverterRhino6/ConverterRhino6.csproj", - "Objects/Converters/ConverterRhinoGh/ConverterRhino7/ConverterRhino7.csproj", - "Objects/Converters/ConverterRhinoGh/ConverterRhinoGhShared/ConverterRhinoGhShared.shproj", - "Objects/Objects/Objects.csproj", - "Objects/Tests/Tests.csproj" + "ConnectorGrasshopper\\ConnectorGrasshopper6\\ConnectorGrasshopper6.csproj", + "ConnectorGrasshopper\\ConnectorGrasshopper7\\ConnectorGrasshopper7.csproj", + "ConnectorGrasshopper\\ConnectorGrasshopperShared\\ConnectorGrasshopperShared.shproj", + "ConnectorGrasshopper\\ConnectorGrasshopperUtils\\ConnectorGrasshopperUtils.csproj", + "ConnectorRhino\\ConnectorRhino\\ConnectorRhinoShared\\ConnectorRhinoShared.shproj", + "ConnectorRhino\\ConnectorRhino6\\ConnectorRhino6.csproj", + "ConnectorRhino\\ConnectorRhino7\\ConnectorRhino7.csproj", + "Core\\Core\\Core.csproj", + "Core\\Tests\\TestsUnit.csproj", + "Core\\Transports\\DiskTransport\\DiskTransport.csproj", + "Core\\Transports\\MongoDBTransport\\MongoDBTransport.csproj", + "DesktopUI2\\AvaloniaHwndHost\\AvaloniaHwndHost.csproj", + "DesktopUI2\\DesktopUI2\\DesktopUI2.csproj", + "Objects\\Converters\\ConverterRhinoGh\\ConverterGrasshopper6\\ConverterGrasshopper6.csproj", + "Objects\\Converters\\ConverterRhinoGh\\ConverterGrasshopper7\\ConverterGrasshopper7.csproj", + "Objects\\Converters\\ConverterRhinoGh\\ConverterRhino6\\ConverterRhino6.csproj", + "Objects\\Converters\\ConverterRhinoGh\\ConverterRhino7\\ConverterRhino7.csproj", + "Objects\\Converters\\ConverterRhinoGh\\ConverterRhinoGhShared\\ConverterRhinoGhShared.shproj", + "Objects\\Objects\\Objects.csproj", + "Objects\\Tests\\Tests.csproj" ] } } diff --git a/ConnectorRhino/ConnectorRhino6/ConnectorRhino6.csproj b/ConnectorRhino/ConnectorRhino6/ConnectorRhino6.csproj index baf177fac2..f705f80ee7 100644 --- a/ConnectorRhino/ConnectorRhino6/ConnectorRhino6.csproj +++ b/ConnectorRhino/ConnectorRhino6/ConnectorRhino6.csproj @@ -1,4 +1,4 @@ - + net462 @@ -14,32 +14,34 @@ x64 win-x64 true + true - + - - - + + + - + - - - - + + + + - - - + + + C:\Program Files\Rhino 6\System\Rhino.exe @@ -48,7 +50,7 @@ TRACE;RHINO6 - + TRACE;RHINO6;MAC @@ -59,13 +61,17 @@ TRACE;RHINO6;MAC - + - + True - + True - + \ No newline at end of file diff --git a/ConnectorRhino/ConnectorRhino7/ConnectorRhino7.csproj b/ConnectorRhino/ConnectorRhino7/ConnectorRhino7.csproj index b1a62c35fe..37fafbbfae 100644 --- a/ConnectorRhino/ConnectorRhino7/ConnectorRhino7.csproj +++ b/ConnectorRhino/ConnectorRhino7/ConnectorRhino7.csproj @@ -1,4 +1,4 @@ - + net48 @@ -14,7 +14,7 @@ true - x64 win-x64 + true - + - - - - + + + + - + - - - + + + - all + all - + - - - - + + + + - - - + + + - + - + - + @@ -90,13 +96,17 @@ TRACE;RHINO7;MAC - + - + True - + True - + \ No newline at end of file diff --git a/Core/Core/Api/Exceptions.cs b/Core/Core/Api/Exceptions.cs index 3e846bc910..b00e5c91b0 100644 --- a/Core/Core/Api/Exceptions.cs +++ b/Core/Core/Api/Exceptions.cs @@ -6,61 +6,84 @@ using GraphQL; using Speckle.Core.Logging; -namespace Speckle.Core.Api +namespace Speckle.Core.Api; + +/// +/// Base class for GraphQL API exceptions +/// +public class SpeckleGraphQLException : SpeckleException { - /// - /// Base class for GraphQL API exceptions - /// - public class SpeckleGraphQLException : SpeckleException - { - private GraphQLRequest _request; - public GraphQLResponse? Response; - - public IEnumerable ErrorMessages => - Response?.Errors != null ? Response.Errors.Select(e => e.Message) : new string[] { }; - public IDictionary? Extensions => Response?.Extensions; - - public SpeckleGraphQLException( - string message, - GraphQLRequest request, - GraphQLResponse? response - ) : base(message) - { - _request = request; - Response = response; - } - - public SpeckleGraphQLException( - string message, - Exception inner, - GraphQLRequest request, - GraphQLResponse? response - ) : base(message, inner) { } - } + private GraphQLRequest _request; + public GraphQLResponse? Response; - public class SpeckleGraphQLException : SpeckleGraphQLException + public SpeckleGraphQLException( + string message, + GraphQLRequest request, + GraphQLResponse? response + ) + : base(message) { - public SpeckleGraphQLException( - string message, - GraphQLRequest request, - GraphQLResponse? response - ) : base(message, request, response) { } + _request = request; + Response = response; } - /// - /// Represents a "FORBIDDEN" on "UNAUTHORIZED" GraphQL error as an exception. - /// https://www.apollographql.com/docs/apollo-server/v2/data/errors/#unauthenticated - /// https://www.apollographql.com/docs/apollo-server/v2/data/errors/#forbidden - /// - public class SpeckleGraphQLForbiddenException : SpeckleGraphQLException - { - public SpeckleGraphQLForbiddenException(GraphQLRequest request, GraphQLResponse response) - : base("Your request was forbidden", request, response) { } - } + public SpeckleGraphQLException( + string message, + Exception inner, + GraphQLRequest request, + GraphQLResponse? response + ) + : base(message, inner) { } - public class SpeckleGraphQLInternalErrorException : SpeckleGraphQLException - { - public SpeckleGraphQLInternalErrorException(GraphQLRequest request, GraphQLResponse response) - : base("Your request failed on the server side", request, response) { } - } + public IEnumerable ErrorMessages => + Response?.Errors != null ? Response.Errors.Select(e => e.Message) : new string[] { }; + + public IDictionary? Extensions => Response?.Extensions; + + public SpeckleGraphQLException() { } + + public SpeckleGraphQLException(string message) + : base(message) { } +} + +public class SpeckleGraphQLException : SpeckleGraphQLException +{ + public SpeckleGraphQLException( + string message, + GraphQLRequest request, + GraphQLResponse? response + ) + : base(message, request, response) { } + + public SpeckleGraphQLException() { } + + public SpeckleGraphQLException(string message) + : base(message) { } +} + +/// +/// Represents a "FORBIDDEN" on "UNAUTHORIZED" GraphQL error as an exception. +/// https://www.apollographql.com/docs/apollo-server/v2/data/errors/#unauthenticated +/// https://www.apollographql.com/docs/apollo-server/v2/data/errors/#forbidden +/// +public class SpeckleGraphQLForbiddenException : SpeckleGraphQLException +{ + public SpeckleGraphQLForbiddenException(GraphQLRequest request, GraphQLResponse response) + : base("Your request was forbidden", request, response) { } + + public SpeckleGraphQLForbiddenException() { } + + public SpeckleGraphQLForbiddenException(string message) + : base(message) { } +} + +public class SpeckleGraphQLInternalErrorException : SpeckleGraphQLException +{ + public SpeckleGraphQLInternalErrorException(GraphQLRequest request, GraphQLResponse response) + : base("Your request failed on the server side", request, response) { } + + public SpeckleGraphQLInternalErrorException() { } + + public SpeckleGraphQLInternalErrorException(string message) + : base(message) { } } diff --git a/Core/Core/Api/GraphQL/Client.GraphqlCleintOperations/Client.ActivityOperations.cs b/Core/Core/Api/GraphQL/Client.GraphqlCleintOperations/Client.ActivityOperations.cs index ce6d81ac77..1f3398186d 100644 --- a/Core/Core/Api/GraphQL/Client.GraphqlCleintOperations/Client.ActivityOperations.cs +++ b/Core/Core/Api/GraphQL/Client.GraphqlCleintOperations/Client.ActivityOperations.cs @@ -6,56 +6,66 @@ using System.Threading.Tasks; using GraphQL; -namespace Speckle.Core.Api +namespace Speckle.Core.Api; + +public partial class Client { - public partial class Client + /// + /// Gets the activity of a stream + /// + /// Id of the stream to get the activity from + /// Only show activity after this DateTime + /// Only show activity before this DateTime + /// Time to filter the activity with + /// Time to filter the activity with + /// Max number of activity items to get + /// + public Task> StreamGetActivity( + string streamId, + DateTime? after = null, + DateTime? before = null, + DateTime? cursor = null, + string actionType = "", + int limit = 10 + ) { - /// - /// Gets the activity of a stream - /// - /// Id of the stream to get the activity from - /// Only show activity after this DateTime - /// Only show activity before this DateTime - /// Time to filter the activity with - /// Time to filter the activity with - /// Max number of activity items to get - /// - public Task> StreamGetActivity( - string streamId, - DateTime? after = null, - DateTime? before = null, - DateTime? cursor = null, - string actionType = "", - int limit = 10 - ) => - StreamGetActivity(CancellationToken.None, streamId, after, before, cursor, actionType, limit); + return StreamGetActivity( + CancellationToken.None, + streamId, + after, + before, + cursor, + actionType, + limit + ); + } - /// - /// Gets the activity of a stream - /// - /// - /// Id of the stream to get the activity from - /// Only show activity after this DateTime - /// Only show activity before this DateTime - /// Time to filter the activity with - /// Time to filter the activity with - /// Max number of commits to get - /// - /// - public async Task> StreamGetActivity( - CancellationToken cancellationToken, - string id, - DateTime? after = null, - DateTime? before = null, - DateTime? cursor = null, - string actionType = "", - int limit = 25 - ) + /// + /// Gets the activity of a stream + /// + /// + /// Id of the stream to get the activity from + /// Only show activity after this DateTime + /// Only show activity before this DateTime + /// Time to filter the activity with + /// Time to filter the activity with + /// Max number of commits to get + /// + /// + public async Task> StreamGetActivity( + CancellationToken cancellationToken, + string id, + DateTime? after = null, + DateTime? before = null, + DateTime? cursor = null, + string actionType = "", + int limit = 25 + ) + { + var request = new GraphQLRequest { - var request = new GraphQLRequest - { - Query = - @"query Stream($id: String!, $before: DateTime,$after: DateTime, $cursor: DateTime, $activity: String, $limit: Int!) { + Query = + @"query Stream($id: String!, $before: DateTime,$after: DateTime, $cursor: DateTime, $activity: String, $limit: Int!) { stream(id: $id) { activity (actionType: $activity, after: $after, before: $before, cursor: $cursor, limit: $limit) { totalCount @@ -73,19 +83,19 @@ public async Task> StreamGetActivity( } } }", - Variables = new - { - id, - limit, - actionType, - after, - before, - cursor - } - }; + Variables = new + { + id, + limit, + actionType, + after, + before, + cursor + } + }; - var res = await ExecuteGraphQLRequest(request, cancellationToken); - return res.stream.activity.items; - } + var res = await ExecuteGraphQLRequest(request, cancellationToken) + .ConfigureAwait(false); + return res.stream.activity.items; } } diff --git a/Core/Core/Api/GraphQL/Client.GraphqlCleintOperations/Client.BranchOperations.cs b/Core/Core/Api/GraphQL/Client.GraphqlCleintOperations/Client.BranchOperations.cs index 53c816c7ed..6240d96ed6 100644 --- a/Core/Core/Api/GraphQL/Client.GraphqlCleintOperations/Client.BranchOperations.cs +++ b/Core/Core/Api/GraphQL/Client.GraphqlCleintOperations/Client.BranchOperations.cs @@ -6,43 +6,46 @@ using System.Threading.Tasks; using GraphQL; -namespace Speckle.Core.Api +namespace Speckle.Core.Api; + +public partial class Client { - public partial class Client + /// + /// Get branches from a given stream + /// + /// Id of the stream to get the branches from + /// Max number of branches to retrieve + /// Max number of commits to retrieve + /// + public Task> StreamGetBranches( + string streamId, + int branchesLimit = 10, + int commitsLimit = 10 + ) + { + return StreamGetBranches(CancellationToken.None, streamId, branchesLimit, commitsLimit); + } + + /// + /// Get branches from a given stream + /// + /// + /// Id of the stream to get the branches from + /// Max number of branches to retrieve + /// Max number of commits to retrieve + /// + /// + public async Task> StreamGetBranches( + CancellationToken cancellationToken, + string streamId, + int branchesLimit = 10, + int commitsLimit = 10 + ) { - /// - /// Get branches from a given stream - /// - /// Id of the stream to get the branches from - /// Max number of branches to retrieve - /// Max number of commits to retrieve - /// - public Task> StreamGetBranches( - string streamId, - int branchesLimit = 10, - int commitsLimit = 10 - ) => StreamGetBranches(CancellationToken.None, streamId, branchesLimit, commitsLimit); - - /// - /// Get branches from a given stream - /// - /// - /// Id of the stream to get the branches from - /// Max number of branches to retrieve - /// Max number of commits to retrieve - /// - /// - public async Task> StreamGetBranches( - CancellationToken cancellationToken, - string streamId, - int branchesLimit = 10, - int commitsLimit = 10 - ) + var request = new GraphQLRequest { - var request = new GraphQLRequest - { - Query = - $@"query Stream ($streamId: String!) {{ + Query = + $@"query Stream ($streamId: String!) {{ stream(id: $streamId) {{ branches(limit: {branchesLimit}) {{ items {{ @@ -68,68 +71,74 @@ public async Task> StreamGetBranches( }} }} }}", - Variables = new { streamId } - }; - var res = await ExecuteGraphQLRequest(request, cancellationToken); - return res.stream.branches.items; - } - - /// - /// Creates a branch on a stream. - /// - /// - /// The stream's id. - public Task BranchCreate(BranchCreateInput branchInput) => - BranchCreate(CancellationToken.None, branchInput); - - /// - /// Creates a branch on a stream. - /// - /// - /// The branch id. - public async Task BranchCreate( - CancellationToken cancellationToken, - BranchCreateInput branchInput - ) + Variables = new { streamId } + }; + var res = await ExecuteGraphQLRequest(request, cancellationToken) + .ConfigureAwait(false); + return res.stream.branches.items; + } + + /// + /// Creates a branch on a stream. + /// + /// + /// The stream's id. + public Task BranchCreate(BranchCreateInput branchInput) + { + return BranchCreate(CancellationToken.None, branchInput); + } + + /// + /// Creates a branch on a stream. + /// + /// + /// The branch id. + public async Task BranchCreate( + CancellationToken cancellationToken, + BranchCreateInput branchInput + ) + { + var request = new GraphQLRequest { - var request = new GraphQLRequest - { - Query = - @"mutation branchCreate($myBranch: BranchCreateInput!){ branchCreate(branch: $myBranch)}", - Variables = new { myBranch = branchInput } - }; - - var res = await ExecuteGraphQLRequest>(request, cancellationToken); - return (string)res["branchCreate"]; - } - - /// - /// Gets a given branch from a stream. - /// - /// Id of the stream to get the branch from - /// Name of the branch to get - /// - public Task BranchGet(string streamId, string branchName, int commitsLimit = 10) => - BranchGet(CancellationToken.None, streamId, branchName, commitsLimit); - - /// - /// Gets a given branch from a stream. - /// - /// - /// Id of the stream to get the branch from - /// Name of the branch to get - /// - public async Task BranchGet( - CancellationToken cancellationToken, - string streamId, - string branchName, - int commitsLimit = 10 - ) + Query = + @"mutation branchCreate($myBranch: BranchCreateInput!){ branchCreate(branch: $myBranch)}", + Variables = new { myBranch = branchInput } + }; + + var res = await ExecuteGraphQLRequest>(request, cancellationToken) + .ConfigureAwait(false); + return (string)res["branchCreate"]; + } + + /// + /// Gets a given branch from a stream. + /// + /// Id of the stream to get the branch from + /// Name of the branch to get + /// + public Task BranchGet(string streamId, string branchName, int commitsLimit = 10) + { + return BranchGet(CancellationToken.None, streamId, branchName, commitsLimit); + } + + /// + /// Gets a given branch from a stream. + /// + /// + /// Id of the stream to get the branch from + /// Name of the branch to get + /// + public async Task BranchGet( + CancellationToken cancellationToken, + string streamId, + string branchName, + int commitsLimit = 10 + ) + { + var request = new GraphQLRequest { - var request = new GraphQLRequest - { - Query = - $@"query Stream($streamId: String!, $branchName: String!) {{ + Query = + $@"query Stream($streamId: String!, $branchName: String!) {{ stream(id: $streamId) {{ branch(name: $branchName){{ id, @@ -154,69 +163,75 @@ public async Task BranchGet( }} }} }}", - Variables = new { streamId, branchName } - }; - - var res = await ExecuteGraphQLRequest(request, cancellationToken); - return res.stream.branch; - } - - /// - /// Updates a branch. - /// - /// - /// The stream's id. - public Task BranchUpdate(BranchUpdateInput branchInput) => - BranchUpdate(CancellationToken.None, branchInput); - - /// - /// Updates a branch. - /// - /// - /// The stream's id. - public async Task BranchUpdate( - CancellationToken cancellationToken, - BranchUpdateInput branchInput - ) + Variables = new { streamId, branchName } + }; + + var res = await ExecuteGraphQLRequest(request, cancellationToken) + .ConfigureAwait(false); + return res.stream.branch; + } + + /// + /// Updates a branch. + /// + /// + /// The stream's id. + public Task BranchUpdate(BranchUpdateInput branchInput) + { + return BranchUpdate(CancellationToken.None, branchInput); + } + + /// + /// Updates a branch. + /// + /// + /// The stream's id. + public async Task BranchUpdate( + CancellationToken cancellationToken, + BranchUpdateInput branchInput + ) + { + var request = new GraphQLRequest { - var request = new GraphQLRequest - { - Query = - @"mutation branchUpdate($myBranch: BranchUpdateInput!){ branchUpdate(branch: $myBranch)}", - Variables = new { myBranch = branchInput } - }; - - var res = await ExecuteGraphQLRequest>(request, cancellationToken); - return (bool)res["branchUpdate"]; - } - - /// - /// Deletes a stream. - /// - /// - /// - public Task BranchDelete(BranchDeleteInput branchInput) => - BranchDelete(CancellationToken.None, branchInput); - - /// - /// Deletes a stream. - /// - /// - /// - public async Task BranchDelete( - CancellationToken cancellationToken, - BranchDeleteInput branchInput - ) + Query = + @"mutation branchUpdate($myBranch: BranchUpdateInput!){ branchUpdate(branch: $myBranch)}", + Variables = new { myBranch = branchInput } + }; + + var res = await ExecuteGraphQLRequest>(request, cancellationToken) + .ConfigureAwait(false); + return (bool)res["branchUpdate"]; + } + + /// + /// Deletes a stream. + /// + /// + /// + public Task BranchDelete(BranchDeleteInput branchInput) + { + return BranchDelete(CancellationToken.None, branchInput); + } + + /// + /// Deletes a stream. + /// + /// + /// + public async Task BranchDelete( + CancellationToken cancellationToken, + BranchDeleteInput branchInput + ) + { + var request = new GraphQLRequest { - var request = new GraphQLRequest - { - Query = - @"mutation branchDelete($myBranch: BranchDeleteInput!){ branchDelete(branch: $myBranch)}", - Variables = new { myBranch = branchInput } - }; - - var res = await ExecuteGraphQLRequest>(request, cancellationToken); - return (bool)res["branchDelete"]; - } + Query = + @"mutation branchDelete($myBranch: BranchDeleteInput!){ branchDelete(branch: $myBranch)}", + Variables = new { myBranch = branchInput } + }; + + var res = await ExecuteGraphQLRequest>(request, cancellationToken) + .ConfigureAwait(false); + return (bool)res["branchDelete"]; } } diff --git a/Core/Core/Api/GraphQL/Client.GraphqlCleintOperations/Client.CommentOperations.cs b/Core/Core/Api/GraphQL/Client.GraphqlCleintOperations/Client.CommentOperations.cs index 93200eff68..cfcea2bf61 100644 --- a/Core/Core/Api/GraphQL/Client.GraphqlCleintOperations/Client.CommentOperations.cs +++ b/Core/Core/Api/GraphQL/Client.GraphqlCleintOperations/Client.CommentOperations.cs @@ -5,43 +5,42 @@ using GraphQL; using Speckle.Core.Logging; -namespace Speckle.Core.Api +namespace Speckle.Core.Api; + +public partial class Client { - public partial class Client + /// + /// Gets the comments on a Stream + /// + /// Id of the stream to get the comments from + /// The number of comments to get + /// Time to filter the comments with + /// + public Task StreamGetComments(string streamId, int limit = 25, string cursor = null) { - /// - /// Gets the comments on a Stream - /// - /// Id of the stream to get the comments from - /// The number of comments to get - /// Time to filter the comments with - /// - public Task StreamGetComments( - string streamId, - int limit = 25, - string cursor = null - ) => StreamGetComments(CancellationToken.None, streamId, limit, cursor); + return StreamGetComments(CancellationToken.None, streamId, limit, cursor); + } - /// - /// Gets the comments on a Stream - /// - /// - /// Id of the stream to get the comments from - /// The number of comments to get - /// Time to filter the comments with - /// - /// - public async Task StreamGetComments( - CancellationToken cancellationToken, - string streamId, - int limit = 25, - string cursor = null - ) + /// + /// Gets the comments on a Stream + /// + /// + /// Id of the stream to get the comments from + /// The number of comments to get + /// Time to filter the comments with + /// + /// + public async Task StreamGetComments( + CancellationToken cancellationToken, + string streamId, + int limit = 25, + string cursor = null + ) + { + var request = new GraphQLRequest { - var request = new GraphQLRequest - { - Query = - @"query Comments($streamId: String!, $cursor: String, $limit: Int!) { + Query = + @"query Comments($streamId: String!, $cursor: String, $limit: Int!) { comments(streamId: $streamId, cursor: $cursor, limit: $limit) { totalCount cursor @@ -76,56 +75,59 @@ public async Task StreamGetComments( } } }", - Variables = new - { - streamId, - cursor, - limit - } - }; + Variables = new + { + streamId, + cursor, + limit + } + }; - var res = await ExecuteGraphQLRequest(request, cancellationToken); - return res.comments; - } + var res = await ExecuteGraphQLRequest(request, cancellationToken) + .ConfigureAwait(false); + return res.comments; + } - /// - /// Gets the screenshot of a Comment - /// - /// Id of the comment - /// Id of the stream to get the comment from - /// - public Task StreamGetCommentScreenshot(string id, string streamId) => - StreamGetCommentScreenshot(CancellationToken.None, id, streamId); + /// + /// Gets the screenshot of a Comment + /// + /// Id of the comment + /// Id of the stream to get the comment from + /// + public Task StreamGetCommentScreenshot(string id, string streamId) + { + return StreamGetCommentScreenshot(CancellationToken.None, id, streamId); + } - /// - /// Gets the screenshot of a Comment - /// - /// - /// Id of the comment - /// Id of the stream to get the comment from - /// - /// - public async Task StreamGetCommentScreenshot( - CancellationToken cancellationToken, - string id, - string streamId - ) + /// + /// Gets the screenshot of a Comment + /// + /// + /// Id of the comment + /// Id of the stream to get the comment from + /// + /// + public async Task StreamGetCommentScreenshot( + CancellationToken cancellationToken, + string id, + string streamId + ) + { + var request = new GraphQLRequest { - var request = new GraphQLRequest - { - Query = - @"query Comment($id: String!, $streamId: String!) { + Query = + @"query Comment($id: String!, $streamId: String!) { comment(id: $id, streamId: $streamId) { id screenshot } } ", - Variables = new { id, streamId } - }; + Variables = new { id, streamId } + }; - var res = await ExecuteGraphQLRequest(request, cancellationToken); - return res.comment.screenshot; - } + var res = await ExecuteGraphQLRequest(request, cancellationToken) + .ConfigureAwait(false); + return res.comment.screenshot; } } diff --git a/Core/Core/Api/GraphQL/Client.GraphqlCleintOperations/Client.CommitOperations.cs b/Core/Core/Api/GraphQL/Client.GraphqlCleintOperations/Client.CommitOperations.cs index 923950e32c..d45f09190e 100644 --- a/Core/Core/Api/GraphQL/Client.GraphqlCleintOperations/Client.CommitOperations.cs +++ b/Core/Core/Api/GraphQL/Client.GraphqlCleintOperations/Client.CommitOperations.cs @@ -6,36 +6,38 @@ using System.Threading.Tasks; using GraphQL; -namespace Speckle.Core.Api +namespace Speckle.Core.Api; + +public partial class Client { - public partial class Client + /// + /// Gets a given commit from a stream. + /// + /// Id of the stream to get the commit from + /// Id of the commit to get + /// + public Task CommitGet(string streamId, string commitId) { - /// - /// Gets a given commit from a stream. - /// - /// Id of the stream to get the commit from - /// Id of the commit to get - /// - public Task CommitGet(string streamId, string commitId) => - CommitGet(CancellationToken.None, streamId, commitId); - - /// - /// Gets a given commit from a stream. - /// - /// - /// Id of the stream to get the commit from - /// Id of the commit to get - /// - public async Task CommitGet( - CancellationToken cancellationToken, - string streamId, - string commitId - ) + return CommitGet(CancellationToken.None, streamId, commitId); + } + + /// + /// Gets a given commit from a stream. + /// + /// + /// Id of the stream to get the commit from + /// Id of the commit to get + /// + public async Task CommitGet( + CancellationToken cancellationToken, + string streamId, + string commitId + ) + { + var request = new GraphQLRequest { - var request = new GraphQLRequest - { - Query = - $@"query Stream($streamId: String!, $commitId: String!) {{ + Query = + $@"query Stream($streamId: String!, $commitId: String!) {{ stream(id: $streamId) {{ commit(id: $commitId){{ id, @@ -50,40 +52,43 @@ string commitId }} }} }}", - Variables = new { streamId, commitId } - }; - - var res = await ExecuteGraphQLRequest(request, cancellationToken); - return res.stream.commit; - } - - /// - /// Gets the latest commits from a stream - /// - /// Id of the stream to get the commits from - /// Max number of commits to get - /// - public Task> StreamGetCommits(string streamId, int limit = 10) => - StreamGetCommits(CancellationToken.None, streamId, limit); - - /// - /// Gets the latest commits from a stream - /// - /// - /// Id of the stream to get the commits from - /// Max number of commits to get - /// - /// - public async Task> StreamGetCommits( - CancellationToken cancellationToken, - string streamId, - int limit = 10 - ) + Variables = new { streamId, commitId } + }; + + var res = await ExecuteGraphQLRequest(request, cancellationToken) + .ConfigureAwait(false); + return res.stream.commit; + } + + /// + /// Gets the latest commits from a stream + /// + /// Id of the stream to get the commits from + /// Max number of commits to get + /// + public Task> StreamGetCommits(string streamId, int limit = 10) + { + return StreamGetCommits(CancellationToken.None, streamId, limit); + } + + /// + /// Gets the latest commits from a stream + /// + /// + /// Id of the stream to get the commits from + /// Max number of commits to get + /// + /// + public async Task> StreamGetCommits( + CancellationToken cancellationToken, + string streamId, + int limit = 10 + ) + { + var request = new GraphQLRequest { - var request = new GraphQLRequest - { - Query = - @"query Stream($streamId: String!, $limit: Int!) { + Query = + @"query Stream($streamId: String!, $limit: Int!) { stream(id: $streamId) { commits(limit: $limit) { items { @@ -102,123 +107,134 @@ public async Task> StreamGetCommits( } } }", - Variables = new { streamId, limit } - }; - - var res = await ExecuteGraphQLRequest(request, cancellationToken); - return res.stream.commits.items; - } - - /// - /// Creates a commit on a branch. - /// - /// - /// The commit id. - public Task CommitCreate(CommitCreateInput commitInput) => - CommitCreate(CancellationToken.None, commitInput); - - - /// - /// - public async Task CommitCreate( - CancellationToken cancellationToken, - CommitCreateInput commitInput - ) + Variables = new { streamId, limit } + }; + + var res = await ExecuteGraphQLRequest(request, cancellationToken) + .ConfigureAwait(false); + return res.stream.commits.items; + } + + /// + /// Creates a commit on a branch. + /// + /// + /// The commit id. + public Task CommitCreate(CommitCreateInput commitInput) + { + return CommitCreate(CancellationToken.None, commitInput); + } + + /// + /// + public async Task CommitCreate( + CancellationToken cancellationToken, + CommitCreateInput commitInput + ) + { + var request = new GraphQLRequest { - var request = new GraphQLRequest - { - Query = - @"mutation commitCreate($myCommit: CommitCreateInput!){ commitCreate(commit: $myCommit)}", - Variables = new { myCommit = commitInput } - }; - - var res = await ExecuteGraphQLRequest>(request, cancellationToken); - return (string)res["commitCreate"]; - } - - /// - /// Updates a commit. - /// - /// - /// The stream's id. - public Task CommitUpdate(CommitUpdateInput commitInput) => - CommitUpdate(CancellationToken.None, commitInput); - - /// - /// Updates a commit. - /// - /// - /// The stream's id. - public async Task CommitUpdate( - CancellationToken cancellationToken, - CommitUpdateInput commitInput - ) + Query = + @"mutation commitCreate($myCommit: CommitCreateInput!){ commitCreate(commit: $myCommit)}", + Variables = new { myCommit = commitInput } + }; + + var res = await ExecuteGraphQLRequest>(request, cancellationToken) + .ConfigureAwait(false); + return (string)res["commitCreate"]; + } + + /// + /// Updates a commit. + /// + /// + /// The stream's id. + public Task CommitUpdate(CommitUpdateInput commitInput) + { + return CommitUpdate(CancellationToken.None, commitInput); + } + + /// + /// Updates a commit. + /// + /// + /// The stream's id. + public async Task CommitUpdate( + CancellationToken cancellationToken, + CommitUpdateInput commitInput + ) + { + var request = new GraphQLRequest { - var request = new GraphQLRequest - { - Query = - @"mutation commitUpdate($myCommit: CommitUpdateInput!){ commitUpdate(commit: $myCommit)}", - Variables = new { myCommit = commitInput } - }; - - var res = await ExecuteGraphQLRequest>(request, cancellationToken); - return (bool)res["commitUpdate"]; - } - - /// - /// Deletes a commit. - /// - /// - /// - public Task CommitDelete(CommitDeleteInput commitInput) => - CommitDelete(CancellationToken.None, commitInput); - - /// - /// Deletes a commit. - /// - /// - /// - public async Task CommitDelete( - CancellationToken cancellationToken, - CommitDeleteInput commitInput - ) + Query = + @"mutation commitUpdate($myCommit: CommitUpdateInput!){ commitUpdate(commit: $myCommit)}", + Variables = new { myCommit = commitInput } + }; + + var res = await ExecuteGraphQLRequest>(request, cancellationToken) + .ConfigureAwait(false); + return (bool)res["commitUpdate"]; + } + + /// + /// Deletes a commit. + /// + /// + /// + public Task CommitDelete(CommitDeleteInput commitInput) + { + return CommitDelete(CancellationToken.None, commitInput); + } + + /// + /// Deletes a commit. + /// + /// + /// + public async Task CommitDelete( + CancellationToken cancellationToken, + CommitDeleteInput commitInput + ) + { + var request = new GraphQLRequest { - var request = new GraphQLRequest - { - Query = - @"mutation commitDelete($myCommit: CommitDeleteInput!){ commitDelete(commit: $myCommit)}", - Variables = new { myCommit = commitInput } - }; - - var res = await ExecuteGraphQLRequest>(request, cancellationToken); - return (bool)res["commitDelete"]; - } - - /// - /// Sends a commitReceived mutation, affirming a commit has been received. - /// - /// Used for read receipts - /// - /// - public Task CommitReceived(CommitReceivedInput commitReceivedInput) => - CommitReceived(CancellationToken.None, commitReceivedInput); - - /// - /// - public async Task CommitReceived( - CancellationToken cancellationToken, - CommitReceivedInput commitReceivedInput - ) + Query = + @"mutation commitDelete($myCommit: CommitDeleteInput!){ commitDelete(commit: $myCommit)}", + Variables = new { myCommit = commitInput } + }; + + var res = await ExecuteGraphQLRequest>(request, cancellationToken) + .ConfigureAwait(false); + return (bool)res["commitDelete"]; + } + + /// + /// Sends a commitReceived mutation, affirming a commit has been received. + /// + /// Used for read receipts + /// + /// + public Task CommitReceived(CommitReceivedInput commitReceivedInput) + { + return CommitReceived(CancellationToken.None, commitReceivedInput); + } + + /// + /// + public async Task CommitReceived( + CancellationToken cancellationToken, + CommitReceivedInput commitReceivedInput + ) + { + var request = new GraphQLRequest { - var request = new GraphQLRequest - { - Query = @"mutation($myInput:CommitReceivedInput!){ commitReceive(input:$myInput) }", - Variables = new { myInput = commitReceivedInput } - }; + Query = @"mutation($myInput:CommitReceivedInput!){ commitReceive(input:$myInput) }", + Variables = new { myInput = commitReceivedInput } + }; - var res = await ExecuteGraphQLRequest>(request, cancellationToken); + var res = await ExecuteGraphQLRequest>(request, cancellationToken) + .ConfigureAwait(false); - return (bool)res["commitReceive"]; - } + return (bool)res["commitReceive"]; } } diff --git a/Core/Core/Api/GraphQL/Client.GraphqlCleintOperations/Client.ObjectOperations.cs b/Core/Core/Api/GraphQL/Client.GraphqlCleintOperations/Client.ObjectOperations.cs index c2b13b36b6..070e74e9e2 100644 --- a/Core/Core/Api/GraphQL/Client.GraphqlCleintOperations/Client.ObjectOperations.cs +++ b/Core/Core/Api/GraphQL/Client.GraphqlCleintOperations/Client.ObjectOperations.cs @@ -4,36 +4,38 @@ using System.Threading.Tasks; using GraphQL; -namespace Speckle.Core.Api +namespace Speckle.Core.Api; + +public partial class Client { - public partial class Client + /// + /// Gets a given object from a stream. + /// + /// Id of the stream to get the object from + /// Id of the object to get + /// + public Task ObjectGet(string streamId, string objectId) { - /// - /// Gets a given object from a stream. - /// - /// Id of the stream to get the object from - /// Id of the object to get - /// - public Task ObjectGet(string streamId, string objectId) => - ObjectGet(CancellationToken.None, streamId, objectId); + return ObjectGet(CancellationToken.None, streamId, objectId); + } - /// - /// Gets a given object from a stream. - /// - /// - /// Id of the stream to get the object from - /// Id of the object to get - /// - public async Task ObjectGet( - CancellationToken cancellationToken, - string streamId, - string objectId - ) + /// + /// Gets a given object from a stream. + /// + /// + /// Id of the stream to get the object from + /// Id of the object to get + /// + public async Task ObjectGet( + CancellationToken cancellationToken, + string streamId, + string objectId + ) + { + var request = new GraphQLRequest { - var request = new GraphQLRequest - { - Query = - $@"query Stream($streamId: String!, $objectId: String!) {{ + Query = + $@"query Stream($streamId: String!, $objectId: String!) {{ stream(id: $streamId) {{ object(id: $objectId){{ id @@ -43,50 +45,53 @@ string objectId }} }} }}", - Variables = new { streamId, objectId } - }; + Variables = new { streamId, objectId } + }; - var res = await ExecuteGraphQLRequest(request, cancellationToken); - return res.stream.@object; - } + var res = await ExecuteGraphQLRequest(request, cancellationToken) + .ConfigureAwait(false); + return res.stream.@object; + } - /// - /// Gets a given object from a stream. - /// - /// - /// - /// - public Task ObjectCountGet(string streamId, string objectId) => - ObjectCountGet(CancellationToken.None, streamId, objectId); + /// + /// Gets a given object from a stream. + /// + /// + /// + /// + public Task ObjectCountGet(string streamId, string objectId) + { + return ObjectCountGet(CancellationToken.None, streamId, objectId); + } - /// - /// Gets a given object from a stream. - /// - /// - /// - /// - /// - public async Task ObjectCountGet( - CancellationToken cancellationToken, - string streamId, - string objectId - ) + /// + /// Gets a given object from a stream. + /// + /// + /// + /// + /// + public async Task ObjectCountGet( + CancellationToken cancellationToken, + string streamId, + string objectId + ) + { + var request = new GraphQLRequest { - var request = new GraphQLRequest - { - Query = - $@"query Stream($streamId: String!, $objectId: String!) {{ + Query = + $@"query Stream($streamId: String!, $objectId: String!) {{ stream(id: $streamId) {{ object(id: $objectId){{ totalChildrenCount }} }} }}", - Variables = new { streamId, objectId } - }; + Variables = new { streamId, objectId } + }; - var res = await ExecuteGraphQLRequest(request, cancellationToken); - return res.stream.@object; - } + var res = await ExecuteGraphQLRequest(request, cancellationToken) + .ConfigureAwait(false); + return res.stream.@object; } } diff --git a/Core/Core/Api/GraphQL/Client.GraphqlCleintOperations/Client.ServerOperations.cs b/Core/Core/Api/GraphQL/Client.GraphqlCleintOperations/Client.ServerOperations.cs index db44eea0dc..79196294a9 100644 --- a/Core/Core/Api/GraphQL/Client.GraphqlCleintOperations/Client.ServerOperations.cs +++ b/Core/Core/Api/GraphQL/Client.GraphqlCleintOperations/Client.ServerOperations.cs @@ -1,4 +1,4 @@ -#nullable enable +#nullable enable using System.Text.RegularExpressions; using System.Threading; @@ -6,35 +6,36 @@ using GraphQL; using Speckle.Core.Logging; -namespace Speckle.Core.Api +namespace Speckle.Core.Api; + +public partial class Client { - public partial class Client + /// + /// Gets the version of the current server. Useful for guarding against unsupported api calls on newer or older servers. + /// + /// [Optional] defaults to an empty cancellation token + /// object excluding any strings (eg "2.7.2-alpha.6995" becomes "2.7.2.6995") + /// + /// + public async Task GetServerVersion(CancellationToken cancellationToken = default) { - - /// - /// Gets the version of the current server. Useful for guarding against unsupported api calls on newer or older servers. - /// - /// [Optional] defaults to an empty cancellation token - /// object excluding any strings (eg "2.7.2-alpha.6995" becomes "2.7.2.6995") - /// - /// - public async Task GetServerVersion(CancellationToken cancellationToken = default) + var request = new GraphQLRequest { - var request = new GraphQLRequest - { - Query = @"query Server { + Query = + @"query Server { serverInfo { version } - }", - }; + }" + }; - var res = (await ExecuteGraphQLRequest(request, cancellationToken)); + var res = await ExecuteGraphQLRequest(request, cancellationToken) + .ConfigureAwait(false); - if (res.serverInfo.version.Contains("dev")) return new System.Version(999, 999, 999); + if (res.serverInfo.version.Contains("dev")) + return new System.Version(999, 999, 999); - ServerVersion = new System.Version(Regex.Replace(res.serverInfo.version, "[-a-zA-Z]+", "")); - return ServerVersion; - } + ServerVersion = new System.Version(Regex.Replace(res.serverInfo.version, "[-a-zA-Z]+", "")); + return ServerVersion; } } diff --git a/Core/Core/Api/GraphQL/Client.GraphqlCleintOperations/Client.StreamOperations.cs b/Core/Core/Api/GraphQL/Client.GraphqlCleintOperations/Client.StreamOperations.cs index 8173f07656..c5e33071be 100644 --- a/Core/Core/Api/GraphQL/Client.GraphqlCleintOperations/Client.StreamOperations.cs +++ b/Core/Core/Api/GraphQL/Client.GraphqlCleintOperations/Client.StreamOperations.cs @@ -5,37 +5,39 @@ using GraphQL; using Speckle.Core.Logging; -namespace Speckle.Core.Api +namespace Speckle.Core.Api; + +public partial class Client { - public partial class Client + /// + /// Gets a stream by id including basic branch info (id, name, description, and total commit count). + /// For detailed commit and branch info, use StreamGetCommits and StreamGetBranches respectively. + /// + /// Id of the stream to get + /// Max number of branches to retrieve + /// + public Task StreamGet(string id, int branchesLimit = 10) { - /// - /// Gets a stream by id including basic branch info (id, name, description, and total commit count). - /// For detailed commit and branch info, use StreamGetCommits and StreamGetBranches respectively. - /// - /// Id of the stream to get - /// Max number of branches to retrieve - /// - public Task StreamGet(string id, int branchesLimit = 10) => - StreamGet(CancellationToken.None, id, branchesLimit); - - /// - /// Gets a stream by id including basic branch info (id, name, description, and total commit count). - /// For detailed commit and branch info, use StreamGetCommits and StreamGetBranches respectively. - /// - /// Id of the stream to get - /// Max number of branches to retrieve - /// - public async Task StreamGet( - CancellationToken cancellationToken, - string id, - int branchesLimit = 10 - ) + return StreamGet(CancellationToken.None, id, branchesLimit); + } + + /// + /// Gets a stream by id including basic branch info (id, name, description, and total commit count). + /// For detailed commit and branch info, use StreamGetCommits and StreamGetBranches respectively. + /// + /// Id of the stream to get + /// Max number of branches to retrieve + /// + public async Task StreamGet( + CancellationToken cancellationToken, + string id, + int branchesLimit = 10 + ) + { + var request = new GraphQLRequest { - var request = new GraphQLRequest - { - Query = - $@"query Stream($id: String!) {{ + Query = + $@"query Stream($id: String!) {{ stream(id: $id) {{ id name @@ -67,30 +69,34 @@ public async Task StreamGet( }} }} }}", - Variables = new { id } - }; - return (await ExecuteGraphQLRequest(request, cancellationToken)).stream; - } - - /// - /// Gets all streams for the current user - /// - /// Max number of streams to return - /// - public Task> StreamsGet(int limit = 10) => - StreamsGet(CancellationToken.None, limit); - - /// - /// Gets all streams for the current user - /// - /// Max number of streams to return - /// - public async Task> StreamsGet(CancellationToken cancellationToken, int limit = 10) + Variables = new { id } + }; + return ( + await ExecuteGraphQLRequest(request, cancellationToken).ConfigureAwait(false) + ).stream; + } + + /// + /// Gets all streams for the current user + /// + /// Max number of streams to return + /// + public Task> StreamsGet(int limit = 10) + { + return StreamsGet(CancellationToken.None, limit); + } + + /// + /// Gets all streams for the current user + /// + /// Max number of streams to return + /// + public async Task> StreamsGet(CancellationToken cancellationToken, int limit = 10) + { + var request = new GraphQLRequest { - var request = new GraphQLRequest - { - Query = - $@"query User {{ + Query = + $@"query User {{ activeUser{{ id, email, @@ -125,36 +131,37 @@ public async Task> StreamsGet(CancellationToken cancellationToken, }} }} }}" - }; + }; - var res = await ExecuteGraphQLRequest(request, cancellationToken); + var res = await ExecuteGraphQLRequest(request, cancellationToken) + .ConfigureAwait(false); - if (res?.activeUser == null) - { - throw new SpeckleException( - "User is not authenticated, or the credentials were not valid. Check the provided account is still valid, remove it from manager and add it again." - ); - } - return res.activeUser.streams.items; - } - - public Task> FavoriteStreamsGet(int limit = 10) => - FavoriteStreamsGet(CancellationToken.None, limit); - - /// - /// Gets all favorite streams for the current user - /// - /// Max number of streams to return - /// - public async Task> FavoriteStreamsGet( - CancellationToken cancellationToken, - int limit = 10 - ) + if (res?.activeUser == null) + throw new SpeckleException( + "User is not authenticated, or the credentials were not valid. Check the provided account is still valid, remove it from manager and add it again." + ); + return res.activeUser.streams.items; + } + + public Task> FavoriteStreamsGet(int limit = 10) + { + return FavoriteStreamsGet(CancellationToken.None, limit); + } + + /// + /// Gets all favorite streams for the current user + /// + /// Max number of streams to return + /// + public async Task> FavoriteStreamsGet( + CancellationToken cancellationToken, + int limit = 10 + ) + { + var request = new GraphQLRequest { - var request = new GraphQLRequest - { - Query = - $@"query User {{ + Query = + $@"query User {{ activeUser{{ id, email, @@ -189,38 +196,42 @@ public async Task> FavoriteStreamsGet( }} }} }}" - }; - return (await ExecuteGraphQLRequest(request, cancellationToken)) - .activeUser - .favoriteStreams - .items; - } - - /// - /// Searches the user's streams by name, description, and ID - /// - /// String query to search for - /// Max number of streams to return - /// - public Task> StreamSearch(string query, int limit = 10) => - StreamSearch(CancellationToken.None, query, limit); - - /// - /// Searches the user's streams by name, description, and ID - /// - /// String query to search for - /// Max number of streams to return - /// - public async Task> StreamSearch( - CancellationToken cancellationToken, - string query, - int limit = 10 + }; + return ( + await ExecuteGraphQLRequest(request, cancellationToken).ConfigureAwait(false) ) + .activeUser + .favoriteStreams + .items; + } + + /// + /// Searches the user's streams by name, description, and ID + /// + /// String query to search for + /// Max number of streams to return + /// + public Task> StreamSearch(string query, int limit = 10) + { + return StreamSearch(CancellationToken.None, query, limit); + } + + /// + /// Searches the user's streams by name, description, and ID + /// + /// String query to search for + /// Max number of streams to return + /// + public async Task> StreamSearch( + CancellationToken cancellationToken, + string query, + int limit = 10 + ) + { + var request = new GraphQLRequest { - var request = new GraphQLRequest - { - Query = - @"query Streams ($query: String!, $limit: Int!) { + Query = + @"query Streams ($query: String!, $limit: Int!) { streams(query: $query, limit: $limit) { totalCount, cursor, @@ -242,216 +253,239 @@ public async Task> StreamSearch( } } }", - Variables = new { query, limit } - }; - - var res = await GQLClient - .SendMutationAsync(request, cancellationToken) - .ConfigureAwait(false); - return (await ExecuteGraphQLRequest(request, cancellationToken)).streams.items; - } - - /// - /// Creates a stream. - /// - /// - /// The stream's id. - public Task StreamCreate(StreamCreateInput streamInput) => - StreamCreate(CancellationToken.None, streamInput); - - /// - /// Creates a stream. - /// - /// - /// The stream's id. - public async Task StreamCreate( - CancellationToken cancellationToken, - StreamCreateInput streamInput + Variables = new { query, limit } + }; + + var res = await GQLClient + .SendMutationAsync(request, cancellationToken) + .ConfigureAwait(false); + return ( + await ExecuteGraphQLRequest(request, cancellationToken).ConfigureAwait(false) ) + .streams + .items; + } + + /// + /// Creates a stream. + /// + /// + /// The stream's id. + public Task StreamCreate(StreamCreateInput streamInput) + { + return StreamCreate(CancellationToken.None, streamInput); + } + + /// + /// Creates a stream. + /// + /// + /// The stream's id. + public async Task StreamCreate( + CancellationToken cancellationToken, + StreamCreateInput streamInput + ) + { + var request = new GraphQLRequest { - var request = new GraphQLRequest - { - Query = - @"mutation streamCreate($myStream: StreamCreateInput!) { streamCreate(stream: $myStream) }", - Variables = new { myStream = streamInput } - }; - var res = await ExecuteGraphQLRequest>(request, cancellationToken); - return (string)res["streamCreate"]; - } - - /// - /// Updates a stream. - /// - /// Note: the id field needs to be a valid stream id. - /// The stream's id. - public Task StreamUpdate(StreamUpdateInput streamInput) => - StreamUpdate(CancellationToken.None, streamInput); - - /// - /// Updates a stream. - /// - /// - /// Note: the id field needs to be a valid stream id. - /// The stream's id. - public async Task StreamUpdate( - CancellationToken cancellationToken, - StreamUpdateInput streamInput - ) + Query = + @"mutation streamCreate($myStream: StreamCreateInput!) { streamCreate(stream: $myStream) }", + Variables = new { myStream = streamInput } + }; + var res = await ExecuteGraphQLRequest>(request, cancellationToken) + .ConfigureAwait(false); + return (string)res["streamCreate"]; + } + + /// + /// Updates a stream. + /// + /// Note: the id field needs to be a valid stream id. + /// The stream's id. + public Task StreamUpdate(StreamUpdateInput streamInput) + { + return StreamUpdate(CancellationToken.None, streamInput); + } + + /// + /// Updates a stream. + /// + /// + /// Note: the id field needs to be a valid stream id. + /// The stream's id. + public async Task StreamUpdate( + CancellationToken cancellationToken, + StreamUpdateInput streamInput + ) + { + var request = new GraphQLRequest { - var request = new GraphQLRequest - { - Query = - @"mutation streamUpdate($myStream: StreamUpdateInput!) { streamUpdate(stream:$myStream) }", - Variables = new { myStream = streamInput } - }; - - var res = await ExecuteGraphQLRequest>(request, cancellationToken); - - return (bool)res["streamUpdate"]; - } - - /// - /// Deletes a stream. - /// - /// Id of the stream to be deleted - /// - public Task StreamDelete(string id) => StreamDelete(CancellationToken.None, id); - - /// - /// Deletes a stream. - /// - /// - /// Id of the stream to be deleted - /// - public async Task StreamDelete(CancellationToken cancellationToken, string id) + Query = + @"mutation streamUpdate($myStream: StreamUpdateInput!) { streamUpdate(stream:$myStream) }", + Variables = new { myStream = streamInput } + }; + + var res = await ExecuteGraphQLRequest>(request, cancellationToken) + .ConfigureAwait(false); + + return (bool)res["streamUpdate"]; + } + + /// + /// Deletes a stream. + /// + /// Id of the stream to be deleted + /// + public Task StreamDelete(string id) + { + return StreamDelete(CancellationToken.None, id); + } + + /// + /// Deletes a stream. + /// + /// + /// Id of the stream to be deleted + /// + public async Task StreamDelete(CancellationToken cancellationToken, string id) + { + var request = new GraphQLRequest { - var request = new GraphQLRequest - { - Query = @"mutation streamDelete($id: String!) { streamDelete(id:$id) }", - Variables = new { id } - }; - var res = await ExecuteGraphQLRequest>(request, cancellationToken); - return (bool)res["streamDelete"]; - } - - /// - /// Grants permissions to a user on a given stream. - /// - /// - /// - [Obsolete("Please use the `StreamUpdatePermission` method", true)] - public Task StreamGrantPermission(StreamPermissionInput permissionInput) => - StreamGrantPermission(CancellationToken.None, permissionInput); - - /// - /// Grants permissions to a user on a given stream. - /// - /// - /// - /// - [Obsolete("Please use the `StreamUpdatePermission` method", true)] - public async Task StreamGrantPermission( - CancellationToken cancellationToken, - StreamPermissionInput permissionInput - ) + Query = @"mutation streamDelete($id: String!) { streamDelete(id:$id) }", + Variables = new { id } + }; + var res = await ExecuteGraphQLRequest>(request, cancellationToken) + .ConfigureAwait(false); + return (bool)res["streamDelete"]; + } + + /// + /// Grants permissions to a user on a given stream. + /// + /// + /// + [Obsolete("Please use the `StreamUpdatePermission` method", true)] + public Task StreamGrantPermission(StreamPermissionInput permissionInput) + { + return StreamGrantPermission(CancellationToken.None, permissionInput); + } + + /// + /// Grants permissions to a user on a given stream. + /// + /// + /// + /// + [Obsolete("Please use the `StreamUpdatePermission` method", true)] + public async Task StreamGrantPermission( + CancellationToken cancellationToken, + StreamPermissionInput permissionInput + ) + { + var request = new GraphQLRequest { - var request = new GraphQLRequest - { - Query = - @" + Query = + @" mutation streamGrantPermission($permissionParams: StreamGrantPermissionInput!) { streamGrantPermission(permissionParams:$permissionParams) }", - Variables = new { permissionParams = permissionInput } - }; - - var res = await ExecuteGraphQLRequest>(request, cancellationToken); - return (bool)res["streamGrantPermission"]; - } - - /// - /// Revokes permissions of a user on a given stream. - /// - /// - /// - public Task StreamRevokePermission(StreamRevokePermissionInput permissionInput) => - StreamRevokePermission(CancellationToken.None, permissionInput); - - /// - /// Revokes permissions of a user on a given stream. - /// - /// - /// - /// - public async Task StreamRevokePermission( - CancellationToken cancellationToken, - StreamRevokePermissionInput permissionInput - ) + Variables = new { permissionParams = permissionInput } + }; + + var res = await ExecuteGraphQLRequest>(request, cancellationToken) + .ConfigureAwait(false); + return (bool)res["streamGrantPermission"]; + } + + /// + /// Revokes permissions of a user on a given stream. + /// + /// + /// + public Task StreamRevokePermission(StreamRevokePermissionInput permissionInput) + { + return StreamRevokePermission(CancellationToken.None, permissionInput); + } + + /// + /// Revokes permissions of a user on a given stream. + /// + /// + /// + /// + public async Task StreamRevokePermission( + CancellationToken cancellationToken, + StreamRevokePermissionInput permissionInput + ) + { + var request = new GraphQLRequest { - var request = new GraphQLRequest - { - Query = - @"mutation streamRevokePermission($permissionParams: StreamRevokePermissionInput!) { + Query = + @"mutation streamRevokePermission($permissionParams: StreamRevokePermissionInput!) { streamRevokePermission(permissionParams: $permissionParams) }", - Variables = new { permissionParams = permissionInput } - }; - - var res = await ExecuteGraphQLRequest>(request, cancellationToken); - return (bool)res["streamRevokePermission"]; - } - - /// - /// Updates permissions for a user on a given stream. - /// - /// includes the streamId, the userId of the user to update, and the user's new role - /// - /// - /// - public async Task StreamUpdatePermission( - StreamPermissionInput updatePermissionInput, - CancellationToken cancellationToken = default - ) + Variables = new { permissionParams = permissionInput } + }; + + var res = await ExecuteGraphQLRequest>(request, cancellationToken) + .ConfigureAwait(false); + return (bool)res["streamRevokePermission"]; + } + + /// + /// Updates permissions for a user on a given stream. + /// + /// includes the streamId, the userId of the user to update, and the user's new role + /// + /// + /// + public async Task StreamUpdatePermission( + StreamPermissionInput updatePermissionInput, + CancellationToken cancellationToken = default + ) + { + var request = new GraphQLRequest { - var request = new GraphQLRequest - { - Query = - @" + Query = + @" mutation streamUpdatePermission($permissionParams: StreamUpdatePermissionInput!) { streamUpdatePermission(permissionParams:$permissionParams) }", - Variables = new { permissionParams = updatePermissionInput } - }; - - var res = await ExecuteGraphQLRequest>(request, cancellationToken); - return (bool)res["streamUpdatePermission"]; - } - - /// - /// Gets the pending collaborators of a stream by id. - /// Requires the user to be an owner of the stream. - /// - /// Id of the stream to get - /// - public Task StreamGetPendingCollaborators(string id) => - StreamGetPendingCollaborators(CancellationToken.None, id); - - /// - /// Gets the pending collaborators of a stream by id. - /// Requires the user to be an owner of the stream. - /// - /// Id of the stream to get - /// Max number of branches to retrieve - /// - public async Task StreamGetPendingCollaborators( - CancellationToken cancellationToken, - string id - ) + Variables = new { permissionParams = updatePermissionInput } + }; + + var res = await ExecuteGraphQLRequest>(request, cancellationToken) + .ConfigureAwait(false); + return (bool)res["streamUpdatePermission"]; + } + + /// + /// Gets the pending collaborators of a stream by id. + /// Requires the user to be an owner of the stream. + /// + /// Id of the stream to get + /// + public Task StreamGetPendingCollaborators(string id) + { + return StreamGetPendingCollaborators(CancellationToken.None, id); + } + + /// + /// Gets the pending collaborators of a stream by id. + /// Requires the user to be an owner of the stream. + /// + /// Id of the stream to get + /// Max number of branches to retrieve + /// + public async Task StreamGetPendingCollaborators( + CancellationToken cancellationToken, + string id + ) + { + var request = new GraphQLRequest { - var request = new GraphQLRequest - { - Query = - $@"query Stream($id: String!) {{ + Query = + $@"query Stream($id: String!) {{ stream(id: $id) {{ id pendingCollaborators {{ @@ -465,140 +499,145 @@ string id }} }} }}", - Variables = new { id } - }; - var res = await GQLClient - .SendMutationAsync(request, cancellationToken) - .ConfigureAwait(false); - return (await ExecuteGraphQLRequest(request, cancellationToken)).stream; - } - - /// - /// Sends an email invite to join a stream and assigns them a collaborator role. - /// - /// - /// - public Task StreamInviteCreate(StreamInviteCreateInput streamCreateInput) => - StreamInviteCreate(CancellationToken.None, streamCreateInput); - - /// - /// Sends an email invite to join a stream and assigns them a collaborator role. - /// - /// - /// - /// - public async Task StreamInviteCreate( - CancellationToken cancellationToken, - StreamInviteCreateInput inviteCreateInput - ) + Variables = new { id } + }; + var res = await GQLClient + .SendMutationAsync(request, cancellationToken) + .ConfigureAwait(false); + return ( + await ExecuteGraphQLRequest(request, cancellationToken).ConfigureAwait(false) + ).stream; + } + + /// + /// Sends an email invite to join a stream and assigns them a collaborator role. + /// + /// + /// + public Task StreamInviteCreate(StreamInviteCreateInput streamCreateInput) + { + return StreamInviteCreate(CancellationToken.None, streamCreateInput); + } + + /// + /// Sends an email invite to join a stream and assigns them a collaborator role. + /// + /// + /// + /// + public async Task StreamInviteCreate( + CancellationToken cancellationToken, + StreamInviteCreateInput inviteCreateInput + ) + { + if ((inviteCreateInput.email == null) & (inviteCreateInput.userId == null)) + throw new ArgumentException( + "You must provide either an email or a user id to create a stream invite" + ); + var request = new GraphQLRequest { - if (inviteCreateInput.email == null & inviteCreateInput.userId == null) - throw new ArgumentException( - "You must provide either an email or a user id to create a stream invite" - ); - var request = new GraphQLRequest - { - Query = - @" + Query = + @" mutation streamInviteCreate($input: StreamInviteCreateInput!) { streamInviteCreate(input: $input) }", - Variables = new { input = inviteCreateInput } - }; - - var res = await ExecuteGraphQLRequest>(request, cancellationToken); - return (bool)res["streamInviteCreate"]; - } - - /// - /// Cancels an invite to join a stream. - /// - /// Id of the stream - /// Id of the invite to cancel - /// - /// - public async Task StreamInviteCancel( - string streamId, - string inviteId, - CancellationToken cancellationToken = default - ) + Variables = new { input = inviteCreateInput } + }; + + var res = await ExecuteGraphQLRequest>(request, cancellationToken) + .ConfigureAwait(false); + return (bool)res["streamInviteCreate"]; + } + + /// + /// Cancels an invite to join a stream. + /// + /// Id of the stream + /// Id of the invite to cancel + /// + /// + public async Task StreamInviteCancel( + string streamId, + string inviteId, + CancellationToken cancellationToken = default + ) + { + var request = new GraphQLRequest { - var request = new GraphQLRequest - { - Query = - @" + Query = + @" mutation streamInviteCancel( $streamId: String!, $inviteId: String! ) { streamInviteCancel(streamId: $streamId, inviteId: $inviteId) }", - Variables = new { streamId, inviteId } - }; - - var res = await ExecuteGraphQLRequest>(request, cancellationToken); - return (bool)res["streamInviteCancel"]; - } - - /// - /// Checks if Speckle Server version is at least v2.6.4 meaning stream invites are supported. - /// - /// - /// true if invites are supported - /// if Speckle Server version is less than v2.6.4 - [Obsolete("We're not supporting 2.6.4 version any more", true)] - public async Task _CheckStreamInvitesSupported( - CancellationToken cancellationToken = default - ) - { - var version = ServerVersion ?? await GetServerVersion(cancellationToken); - if (version < new System.Version("2.6.4")) - throw new SpeckleException( - "Stream invites are only supported as of Speckle Server v2.6.4." - ); - return true; - } - - /// - /// Accept or decline a stream invite. - /// - /// - /// - /// - /// - /// - /// - public async Task StreamInviteUse( - string streamId, - string token, - bool accept = true, - CancellationToken cancellationToken = default - ) + Variables = new { streamId, inviteId } + }; + + var res = await ExecuteGraphQLRequest>(request, cancellationToken) + .ConfigureAwait(false); + return (bool)res["streamInviteCancel"]; + } + + /// + /// Checks if Speckle Server version is at least v2.6.4 meaning stream invites are supported. + /// + /// + /// true if invites are supported + /// if Speckle Server version is less than v2.6.4 + [Obsolete("We're not supporting 2.6.4 version any more", true)] + public async Task _CheckStreamInvitesSupported( + CancellationToken cancellationToken = default + ) + { + var version = ServerVersion ?? await GetServerVersion(cancellationToken).ConfigureAwait(false); + if (version < new System.Version("2.6.4")) + throw new SpeckleException("Stream invites are only supported as of Speckle Server v2.6.4."); + return true; + } + + /// + /// Accept or decline a stream invite. + /// + /// + /// + /// + /// + /// + /// + public async Task StreamInviteUse( + string streamId, + string token, + bool accept = true, + CancellationToken cancellationToken = default + ) + { + var request = new GraphQLRequest { - var request = new GraphQLRequest - { - Query = - @" + Query = + @" mutation streamInviteUse( $accept: Boolean!, $streamId: String!, $token: String! ) { streamInviteUse(accept: $accept, streamId: $streamId, token: $token) }", - Variables = new - { - streamId, - token, - accept - } - }; - - var res = await ExecuteGraphQLRequest>(request, cancellationToken); - return (bool)res["streamInviteUse"]; - } - - public async Task> GetAllPendingInvites( - CancellationToken cancellationToken = default - ) - { - var request = new GraphQLRequest + Variables = new { - Query = - @" + streamId, + token, + accept + } + }; + + var res = await ExecuteGraphQLRequest>(request, cancellationToken) + .ConfigureAwait(false); + return (bool)res["streamInviteUse"]; + } + + public async Task> GetAllPendingInvites( + CancellationToken cancellationToken = default + ) + { + var request = new GraphQLRequest + { + Query = + @" query StreamInvites { streamInvites{ id @@ -615,11 +654,11 @@ query StreamInvites { avatar } } - }", - }; + }" + }; - var res = await ExecuteGraphQLRequest(request, cancellationToken); - return res.streamInvites; - } + var res = await ExecuteGraphQLRequest(request, cancellationToken) + .ConfigureAwait(false); + return res.streamInvites; } } diff --git a/Core/Core/Api/GraphQL/Client.GraphqlCleintOperations/Client.UserOperations.cs b/Core/Core/Api/GraphQL/Client.GraphqlCleintOperations/Client.UserOperations.cs index 7f652e4de2..0ac05bbc90 100644 --- a/Core/Core/Api/GraphQL/Client.GraphqlCleintOperations/Client.UserOperations.cs +++ b/Core/Core/Api/GraphQL/Client.GraphqlCleintOperations/Client.UserOperations.cs @@ -4,28 +4,31 @@ using GraphQL; using Speckle.Core.Logging; -namespace Speckle.Core.Api +namespace Speckle.Core.Api; + +public partial class Client { - public partial class Client + /// + /// Gets the currently active user profile. + /// + /// + public Task ActiveUserGet() { - /// - /// Gets the currently active user profile. - /// - /// - public Task ActiveUserGet() => ActiveUserGet(CancellationToken.None); + return ActiveUserGet(CancellationToken.None); + } - /// - /// Gets the currently active user profile. - /// - /// - /// - /// - public async Task ActiveUserGet(CancellationToken cancellationToken) + /// + /// Gets the currently active user profile. + /// + /// + /// + /// + public async Task ActiveUserGet(CancellationToken cancellationToken) + { + var request = new GraphQLRequest { - var request = new GraphQLRequest - { - Query = - @"query User { + Query = + @"query User { activeUser { id, email, @@ -38,30 +41,35 @@ public async Task ActiveUserGet(CancellationToken cancellationToken) role, } }" - }; - return (await ExecuteGraphQLRequest(request, cancellationToken)).activeUser; - } + }; + return ( + await ExecuteGraphQLRequest(request, cancellationToken).ConfigureAwait(false) + ).activeUser; + } - /// - /// Get another user's profile by its user id. - /// - /// Id of the user you are looking for - /// - public Task OtherUserGet(string id) => OtherUserGet(CancellationToken.None, id); + /// + /// Get another user's profile by its user id. + /// + /// Id of the user you are looking for + /// + public Task OtherUserGet(string id) + { + return OtherUserGet(CancellationToken.None, id); + } - /// - /// Get another user's profile by its user id. - /// - /// - /// Id of the user you are looking for - /// - /// - public async Task OtherUserGet(CancellationToken cancellationToken, string id) + /// + /// Get another user's profile by its user id. + /// + /// + /// Id of the user you are looking for + /// + /// + public async Task OtherUserGet(CancellationToken cancellationToken, string id) + { + var request = new GraphQLRequest { - var request = new GraphQLRequest - { - Query = - @"query LimitedUser($id: String!) { + Query = + @"query LimitedUser($id: String!) { otherUser(id: $id){ id, name, @@ -72,36 +80,40 @@ public async Task OtherUserGet(CancellationToken cancellationToken, role, } }", - Variables = new { id } - }; - return (await ExecuteGraphQLRequest(request, cancellationToken)).otherUser; - } + Variables = new { id } + }; + return ( + await ExecuteGraphQLRequest(request, cancellationToken).ConfigureAwait(false) + ).otherUser; + } - /// - /// Searches for a user on the server. - /// - /// String to search for. Must be at least 3 characters - /// Max number of users to return - /// - public Task> UserSearch(string query, int limit = 10) => - UserSearch(CancellationToken.None, query: query, limit: limit); + /// + /// Searches for a user on the server. + /// + /// String to search for. Must be at least 3 characters + /// Max number of users to return + /// + public Task> UserSearch(string query, int limit = 10) + { + return UserSearch(CancellationToken.None, query, limit); + } - /// - /// Searches for a user on the server. - /// - /// String to search for. Must be at least 3 characters - /// Max number of users to return - /// - public async Task> UserSearch( - CancellationToken cancellationToken, - string query, - int limit = 10 - ) + /// + /// Searches for a user on the server. + /// + /// String to search for. Must be at least 3 characters + /// Max number of users to return + /// + public async Task> UserSearch( + CancellationToken cancellationToken, + string query, + int limit = 10 + ) + { + var request = new GraphQLRequest { - var request = new GraphQLRequest - { - Query = - @"query UserSearch($query: String!, $limit: Int!) { + Query = + @"query UserSearch($query: String!, $limit: Int!) { userSearch(query: $query, limit: $limit) { cursor, items { @@ -115,11 +127,12 @@ public async Task> UserSearch( } } }", - Variables = new { query, limit } - }; - return (await ExecuteGraphQLRequest(request, cancellationToken)) - .userSearch - .items; - } + Variables = new { query, limit } + }; + return ( + await ExecuteGraphQLRequest(request, cancellationToken).ConfigureAwait(false) + ) + .userSearch + .items; } } diff --git a/Core/Core/Api/GraphQL/Client.Subscriptions/Client.Subscriptions.Branch.cs b/Core/Core/Api/GraphQL/Client.Subscriptions/Client.Subscriptions.Branch.cs index 4c3ac01a04..7990033c04 100644 --- a/Core/Core/Api/GraphQL/Client.Subscriptions/Client.Subscriptions.Branch.cs +++ b/Core/Core/Api/GraphQL/Client.Subscriptions/Client.Subscriptions.Branch.cs @@ -1,4 +1,4 @@ -using System; +using System; using GraphQL; using Speckle.Core.Api.SubscriptionModels; @@ -7,93 +7,92 @@ // } -namespace Speckle.Core.Api +namespace Speckle.Core.Api; + +public partial class Client { - public partial class Client + #region BranchCreated + + public delegate void BranchCreatedHandler(object sender, BranchInfo e); + + public event BranchCreatedHandler OnBranchCreated; + public IDisposable BranchCreatedSubscription; + + /// + /// Subscribe to events of branch created for a stream + /// + /// + public void SubscribeBranchCreated(string streamId) { - #region BranchCreated - public delegate void BranchCreatedHandler(object sender, BranchInfo e); - public event BranchCreatedHandler OnBranchCreated; - public IDisposable BranchCreatedSubscription; - - /// - /// Subscribe to events of branch created for a stream - /// - /// - public void SubscribeBranchCreated(string streamId) + var request = new GraphQLRequest { - var request = new GraphQLRequest - { - Query = $@"subscription {{ branchCreated (streamId: ""{streamId}"") }}" - }; - - BranchCreatedSubscription = SubscribeTo( - request, - (sender, result) => OnBranchCreated?.Invoke(sender, result.branchCreated) - ); - } - - public bool HasSubscribedBranchCreated - { - get { return BranchCreatedSubscription != null; } - } - #endregion + Query = $@"subscription {{ branchCreated (streamId: ""{streamId}"") }}" + }; + BranchCreatedSubscription = SubscribeTo( + request, + (sender, result) => OnBranchCreated?.Invoke(sender, result.branchCreated) + ); + } - #region BranchUpdated - public delegate void BranchUpdatedHandler(object sender, BranchInfo e); - public event BranchUpdatedHandler OnBranchUpdated; - public IDisposable BranchUpdatedSubscription; + public bool HasSubscribedBranchCreated => BranchCreatedSubscription != null; - /// - /// Subscribe to events of branch updated for a stream - /// - /// - public void SubscribeBranchUpdated(string streamId, string branchId = null) - { - var request = new GraphQLRequest - { - Query = - $@"subscription {{ branchUpdated (streamId: ""{streamId}"", branchId: ""{branchId}"") }}" - }; - BranchUpdatedSubscription = SubscribeTo( - request, - (sender, result) => OnBranchUpdated?.Invoke(sender, result.branchUpdated) - ); - } - - public bool HasSubscribedBranchUpdated - { - get { return BranchUpdatedSubscription != null; } - } - #endregion - - #region BranchDeleted - public delegate void BranchDeletedHandler(object sender, BranchInfo e); - public event BranchDeletedHandler OnBranchDeleted; - public IDisposable BranchDeletedSubscription; - - /// - /// Subscribe to events of branch deleted for a stream - /// - /// - public void SubscribeBranchDeleted(string streamId) + #endregion + + + #region BranchUpdated + + public delegate void BranchUpdatedHandler(object sender, BranchInfo e); + + public event BranchUpdatedHandler OnBranchUpdated; + public IDisposable BranchUpdatedSubscription; + + /// + /// Subscribe to events of branch updated for a stream + /// + /// + public void SubscribeBranchUpdated(string streamId, string branchId = null) + { + var request = new GraphQLRequest { - var request = new GraphQLRequest - { - Query = $@"subscription {{ branchDeleted (streamId: ""{streamId}"") }}" - }; - - BranchDeletedSubscription = SubscribeTo( - request, - (sender, result) => OnBranchDeleted?.Invoke(sender, result.branchDeleted) - ); - } - - public bool HasSubscribedBranchDeleted + Query = + $@"subscription {{ branchUpdated (streamId: ""{streamId}"", branchId: ""{branchId}"") }}" + }; + BranchUpdatedSubscription = SubscribeTo( + request, + (sender, result) => OnBranchUpdated?.Invoke(sender, result.branchUpdated) + ); + } + + public bool HasSubscribedBranchUpdated => BranchUpdatedSubscription != null; + + #endregion + + #region BranchDeleted + + public delegate void BranchDeletedHandler(object sender, BranchInfo e); + + public event BranchDeletedHandler OnBranchDeleted; + public IDisposable BranchDeletedSubscription; + + /// + /// Subscribe to events of branch deleted for a stream + /// + /// + public void SubscribeBranchDeleted(string streamId) + { + var request = new GraphQLRequest { - get { return BranchDeletedSubscription != null; } - } - #endregion + Query = $@"subscription {{ branchDeleted (streamId: ""{streamId}"") }}" + }; + + BranchDeletedSubscription = SubscribeTo( + request, + (sender, result) => OnBranchDeleted?.Invoke(sender, result.branchDeleted) + ); } + + public bool HasSubscribedBranchDeleted => BranchDeletedSubscription != null; + + #endregion } diff --git a/Core/Core/Api/GraphQL/Client.Subscriptions/Client.Subscriptions.Commit.cs b/Core/Core/Api/GraphQL/Client.Subscriptions/Client.Subscriptions.Commit.cs index ed241acaf3..2b044ea6d4 100644 --- a/Core/Core/Api/GraphQL/Client.Subscriptions/Client.Subscriptions.Commit.cs +++ b/Core/Core/Api/GraphQL/Client.Subscriptions/Client.Subscriptions.Commit.cs @@ -1,94 +1,93 @@ -using System; +using System; using GraphQL; using Speckle.Core.Api.SubscriptionModels; -namespace Speckle.Core.Api +namespace Speckle.Core.Api; + +public partial class Client { - public partial class Client + #region CommitCreated + + public delegate void CommitCreatedHandler(object sender, CommitInfo e); + + public event CommitCreatedHandler OnCommitCreated; + public IDisposable CommitCreatedSubscription; + + /// + /// Subscribe to events of commit created for a stream + /// + /// + public void SubscribeCommitCreated(string streamId) { - #region CommitCreated - public delegate void CommitCreatedHandler(object sender, CommitInfo e); - public event CommitCreatedHandler OnCommitCreated; - public IDisposable CommitCreatedSubscription; - - /// - /// Subscribe to events of commit created for a stream - /// - /// - public void SubscribeCommitCreated(string streamId) - { - var request = new GraphQLRequest - { - Query = $@"subscription {{ commitCreated (streamId: ""{streamId}"") }}" - }; - - CommitCreatedSubscription = SubscribeTo( - request, - (sender, result) => OnCommitCreated?.Invoke(sender, result.commitCreated) - ); - } - - public bool HasSubscribedCommitCreated - { - get { return CommitCreatedSubscription != null; } - } - #endregion - - #region CommitUpdated - public delegate void CommitUpdatedHandler(object sender, CommitInfo e); - public event CommitUpdatedHandler OnCommitUpdated; - public IDisposable CommitUpdatedSubscription; - - /// - /// Subscribe to events of commit updated for a stream - /// - /// - public void SubscribeCommitUpdated(string streamId, string commitId = null) - { - var request = new GraphQLRequest - { - Query = - $@"subscription {{ commitUpdated (streamId: ""{streamId}"", commitId: ""{commitId}"") }}" - }; - - var res = GQLClient.CreateSubscriptionStream(request); - CommitUpdatedSubscription = SubscribeTo( - request, - (sender, result) => OnCommitUpdated?.Invoke(sender, result.commitUpdated) - ); - } - - public bool HasSubscribedCommitUpdated + var request = new GraphQLRequest { - get { return CommitUpdatedSubscription != null; } - } - #endregion - - #region CommitDeleted - public delegate void CommitDeletedHandler(object sender, CommitInfo e); - public event CommitDeletedHandler OnCommitDeleted; - public IDisposable CommitDeletedSubscription; - - /// - /// Subscribe to events of commit updated for a stream - /// - /// - public void SubscribeCommitDeleted(string streamId) + Query = $@"subscription {{ commitCreated (streamId: ""{streamId}"") }}" + }; + + CommitCreatedSubscription = SubscribeTo( + request, + (sender, result) => OnCommitCreated?.Invoke(sender, result.commitCreated) + ); + } + + public bool HasSubscribedCommitCreated => CommitCreatedSubscription != null; + + #endregion + + #region CommitUpdated + + public delegate void CommitUpdatedHandler(object sender, CommitInfo e); + + public event CommitUpdatedHandler OnCommitUpdated; + public IDisposable CommitUpdatedSubscription; + + /// + /// Subscribe to events of commit updated for a stream + /// + /// + public void SubscribeCommitUpdated(string streamId, string commitId = null) + { + var request = new GraphQLRequest { - var request = new GraphQLRequest - { - Query = $@"subscription {{ commitDeleted (streamId: ""{streamId}"") }}" - }; - CommitDeletedSubscription = SubscribeTo( - request, - (sender, result) => OnCommitDeleted?.Invoke(sender, result.commitDeleted) - ); - } - - public bool HasSubscribedCommitDeleted + Query = + $@"subscription {{ commitUpdated (streamId: ""{streamId}"", commitId: ""{commitId}"") }}" + }; + + var res = GQLClient.CreateSubscriptionStream(request); + CommitUpdatedSubscription = SubscribeTo( + request, + (sender, result) => OnCommitUpdated?.Invoke(sender, result.commitUpdated) + ); + } + + public bool HasSubscribedCommitUpdated => CommitUpdatedSubscription != null; + + #endregion + + #region CommitDeleted + + public delegate void CommitDeletedHandler(object sender, CommitInfo e); + + public event CommitDeletedHandler OnCommitDeleted; + public IDisposable CommitDeletedSubscription; + + /// + /// Subscribe to events of commit updated for a stream + /// + /// + public void SubscribeCommitDeleted(string streamId) + { + var request = new GraphQLRequest { - get { return CommitDeletedSubscription != null; } - } - #endregion + Query = $@"subscription {{ commitDeleted (streamId: ""{streamId}"") }}" + }; + CommitDeletedSubscription = SubscribeTo( + request, + (sender, result) => OnCommitDeleted?.Invoke(sender, result.commitDeleted) + ); } + + public bool HasSubscribedCommitDeleted => CommitDeletedSubscription != null; + + #endregion } diff --git a/Core/Core/Api/GraphQL/Client.Subscriptions/Client.Subscriptions.Stream.cs b/Core/Core/Api/GraphQL/Client.Subscriptions/Client.Subscriptions.Stream.cs index 5180330014..0b7f76801e 100644 --- a/Core/Core/Api/GraphQL/Client.Subscriptions/Client.Subscriptions.Stream.cs +++ b/Core/Core/Api/GraphQL/Client.Subscriptions/Client.Subscriptions.Stream.cs @@ -1,117 +1,113 @@ -using System; -using System.Collections.Generic; -using System.Threading; -using System.Threading.Tasks; +using System; using GraphQL; -using Sentry; using Speckle.Core.Api.SubscriptionModels; -using Speckle.Core.Logging; -namespace Speckle.Core.Api +namespace Speckle.Core.Api; + +public partial class Client { - public partial class Client + #region UserStreamAdded + + public delegate void UserStreamAddedHandler(object sender, StreamInfo e); + + public event UserStreamAddedHandler OnUserStreamAdded; + public IDisposable UserStreamAddedSubscription; + + /// + /// Subscribe to events of streams added for the current user + /// + /// + public void SubscribeUserStreamAdded() { - #region UserStreamAdded - public delegate void UserStreamAddedHandler(object sender, StreamInfo e); - public event UserStreamAddedHandler OnUserStreamAdded; - public IDisposable UserStreamAddedSubscription; - - /// - /// Subscribe to events of streams added for the current user - /// - /// - public void SubscribeUserStreamAdded() - { - var request = new GraphQLRequest { Query = @"subscription { userStreamAdded }" }; + var request = new GraphQLRequest { Query = @"subscription { userStreamAdded }" }; - UserStreamAddedSubscription = SubscribeTo( - request, - (sender, result) => OnUserStreamAdded?.Invoke(sender, result.userStreamAdded) - ); - } + UserStreamAddedSubscription = SubscribeTo( + request, + (sender, result) => OnUserStreamAdded?.Invoke(sender, result.userStreamAdded) + ); + } - public bool HasSubscribedUserStreamAdded - { - get { return UserStreamAddedSubscription != null; } - } - #endregion - #region StreamUpdated - public delegate void StreamUpdatedHandler(object sender, StreamInfo e); - public event StreamUpdatedHandler OnStreamUpdated; - public IDisposable StreamUpdatedSubscription; - - /// - /// Subscribe to events of streams updated for a specific streamId - /// - /// streamId - public void SubscribeStreamUpdated(string id) - { - var request = new GraphQLRequest - { - Query = $@"subscription {{ streamUpdated( streamId: ""{id}"") }}", - }; - StreamUpdatedSubscription = SubscribeTo( - request, - (sender, result) => OnStreamUpdated?.Invoke(sender, result.streamUpdated) - ); - } - - public bool HasSubscribedStreamUpdated - { - get { return StreamUpdatedSubscription != null; } - } - #endregion - #region StreamRemoved - public delegate void UserStreamRemovedHandler(object sender, StreamInfo e); - public event UserStreamRemovedHandler OnUserStreamRemoved; - public IDisposable UserStreamRemovedSubscription; - - /// - /// Subscribe to events of streams removed for the current user - /// - /// - public void SubscribeUserStreamRemoved() - { - var request = new GraphQLRequest { Query = $@"subscription {{ userStreamRemoved }}", }; + public bool HasSubscribedUserStreamAdded => UserStreamAddedSubscription != null; - UserStreamRemovedSubscription = SubscribeTo( - request, - (sender, result) => OnUserStreamRemoved?.Invoke(sender, result.userStreamRemoved) - ); - } + #endregion - public bool HasSubscribedUserStreamRemoved - { - get { return UserStreamRemovedSubscription != null; } - } - #endregion - - #region CommentActivity - public delegate void CommentActivityHandler(object sender, CommentItem e); - public event CommentActivityHandler OnCommentActivity; - public IDisposable CommentActivitySubscription; - - /// - /// Subscribe to new comment events - /// - /// - public void SubscribeCommentActivity(string streamId) + #region StreamUpdated + + public delegate void StreamUpdatedHandler(object sender, StreamInfo e); + + public event StreamUpdatedHandler OnStreamUpdated; + public IDisposable StreamUpdatedSubscription; + + /// + /// Subscribe to events of streams updated for a specific streamId + /// + /// streamId + public void SubscribeStreamUpdated(string id) + { + var request = new GraphQLRequest { - var request = new GraphQLRequest - { - Query = - $@"subscription {{ commentActivity( streamId: ""{streamId}"") {{ type comment {{ id authorId archived screenshot rawText }} }} }}", - }; - CommentActivitySubscription = SubscribeTo( - request, - (sender, result) => OnCommentActivity?.Invoke(sender, result.commentActivity.comment) - ); - } - - public bool HasSubscribedCommentActivity + Query = $@"subscription {{ streamUpdated( streamId: ""{id}"") }}" + }; + StreamUpdatedSubscription = SubscribeTo( + request, + (sender, result) => OnStreamUpdated?.Invoke(sender, result.streamUpdated) + ); + } + + public bool HasSubscribedStreamUpdated => StreamUpdatedSubscription != null; + + #endregion + + #region StreamRemoved + + public delegate void UserStreamRemovedHandler(object sender, StreamInfo e); + + public event UserStreamRemovedHandler OnUserStreamRemoved; + public IDisposable UserStreamRemovedSubscription; + + /// + /// Subscribe to events of streams removed for the current user + /// + /// + public void SubscribeUserStreamRemoved() + { + var request = new GraphQLRequest { Query = $@"subscription {{ userStreamRemoved }}" }; + + UserStreamRemovedSubscription = SubscribeTo( + request, + (sender, result) => OnUserStreamRemoved?.Invoke(sender, result.userStreamRemoved) + ); + } + + public bool HasSubscribedUserStreamRemoved => UserStreamRemovedSubscription != null; + + #endregion + + #region CommentActivity + + public delegate void CommentActivityHandler(object sender, CommentItem e); + + public event CommentActivityHandler OnCommentActivity; + public IDisposable CommentActivitySubscription; + + /// + /// Subscribe to new comment events + /// + /// + public void SubscribeCommentActivity(string streamId) + { + var request = new GraphQLRequest { - get { return CommentActivitySubscription != null; } - } - #endregion + Query = + $@"subscription {{ commentActivity( streamId: ""{streamId}"") {{ type comment {{ id authorId archived screenshot rawText }} }} }}" + }; + CommentActivitySubscription = SubscribeTo( + request, + (sender, result) => OnCommentActivity?.Invoke(sender, result.commentActivity.comment) + ); } + + public bool HasSubscribedCommentActivity => CommentActivitySubscription != null; + + #endregion } diff --git a/Core/Core/Api/GraphQL/Client.cs b/Core/Core/Api/GraphQL/Client.cs index bc237a5752..0106e5c7ef 100644 --- a/Core/Core/Api/GraphQL/Client.cs +++ b/Core/Core/Api/GraphQL/Client.cs @@ -1,408 +1,392 @@ -# nullable enable +# nullable enable using System; +using System.Collections.Generic; using System.Collections.Specialized; +using System.Diagnostics; +using System.Dynamic; +using System.Linq; using System.Net.Http; using System.Net.WebSockets; using System.Reflection; +using System.Threading; using System.Threading.Tasks; -using GraphQL.Client.Http; -using Speckle.Core.Api.GraphQL.Serializer; -using Speckle.Core.Credentials; -using Speckle.Core.Helpers; -using Speckle.Core.Logging; -using Speckle.Newtonsoft.Json; using GraphQL; +using GraphQL.Client.Http; using Polly; -using Serilog; using Polly.Contrib.WaitAndRetry; -using System.Threading; -using System.Linq; -using System.Collections.Generic; -using System.Diagnostics; using Serilog.Context; using Serilog.Core; using Serilog.Core.Enrichers; -using System.Dynamic; +using Speckle.Core.Api.GraphQL.Serializer; +using Speckle.Core.Credentials; +using Speckle.Core.Helpers; +using Speckle.Core.Logging; +using Speckle.Newtonsoft.Json; -namespace Speckle.Core.Api -{ - public partial class Client : IDisposable - { - public string ServerUrl => Account.serverInfo.url; +namespace Speckle.Core.Api; - public string ApiToken => Account.token; +public partial class Client : IDisposable +{ + public Client() { } - public System.Version ServerVersion { get; set; } + public Client(Account account) + { + if (account == null) + throw new SpeckleException($"Provided account is null."); - [JsonIgnore] - public Account Account { get; set; } + Account = account; - HttpClient HttpClient { get; set; } + HttpClient = Http.GetHttpProxyClient(); - public GraphQLHttpClient GQLClient { get; set; } + if (account.token.ToLowerInvariant().Contains("bearer")) + HttpClient.DefaultRequestHeaders.Add("Authorization", account.token); + else + HttpClient.DefaultRequestHeaders.Add("Authorization", $"Bearer {account.token}"); - public object UploadValues(string v1, string v2, NameValueCollection user_1) - { - throw new NotImplementedException(); - } + HttpClient.DefaultRequestHeaders.Add("apollographql-client-name", Setup.HostApplication); + HttpClient.DefaultRequestHeaders.Add( + "apollographql-client-version", + Assembly.GetExecutingAssembly().GetName().Version.ToString() + ); - public Client() { } + GQLClient = new GraphQLHttpClient( + new GraphQLHttpClientOptions + { + EndPoint = new Uri(new Uri(account.serverInfo.url), "/graphql"), + UseWebSocketForQueriesAndMutations = false, + ConfigureWebSocketConnectionInitPayload = (opts) => + { + return new { Authorization = $"Bearer {account.token}" }; + }, + OnWebsocketConnected = OnWebSocketConnect + }, + new NewtonsoftJsonSerializer(), + HttpClient + ); - public Client(Account account) + GQLClient.WebSocketReceiveErrors.Subscribe(e => { - if (account == null) - throw new SpeckleException($"Provided account is null."); + if (e is WebSocketException we) + Console.WriteLine( + $"WebSocketException: {we.Message} (WebSocketError {we.WebSocketErrorCode}, ErrorCode {we.ErrorCode}, NativeErrorCode {we.NativeErrorCode}" + ); + else + Console.WriteLine($"Exception in websocket receive stream: {e.ToString()}"); + }); + } - Account = account; + public string ServerUrl => Account.serverInfo.url; - HttpClient = Http.GetHttpProxyClient(); + public string ApiToken => Account.token; - if (account.token.ToLowerInvariant().Contains("bearer")) - { - HttpClient.DefaultRequestHeaders.Add("Authorization", account.token); - } - else - { - HttpClient.DefaultRequestHeaders.Add("Authorization", $"Bearer {account.token}"); - } + public System.Version ServerVersion { get; set; } - HttpClient.DefaultRequestHeaders.Add("apollographql-client-name", Setup.HostApplication); - HttpClient.DefaultRequestHeaders.Add( - "apollographql-client-version", - Assembly.GetExecutingAssembly().GetName().Version.ToString() - ); + [JsonIgnore] + public Account Account { get; set; } - GQLClient = new GraphQLHttpClient( - new GraphQLHttpClientOptions - { - EndPoint = new Uri(new Uri(account.serverInfo.url), "/graphql"), - UseWebSocketForQueriesAndMutations = false, - ConfigureWebSocketConnectionInitPayload = (opts) => - { - return new { Authorization = $"Bearer {account.token}" }; - }, - OnWebsocketConnected = OnWebSocketConnect, - }, - new NewtonsoftJsonSerializer(), - HttpClient - ); + private HttpClient HttpClient { get; set; } - GQLClient.WebSocketReceiveErrors.Subscribe(e => - { - if (e is WebSocketException we) - Console.WriteLine( - $"WebSocketException: {we.Message} (WebSocketError {we.WebSocketErrorCode}, ErrorCode {we.ErrorCode}, NativeErrorCode {we.NativeErrorCode}" - ); - else - Console.WriteLine($"Exception in websocket receive stream: {e.ToString()}"); - }); - } + public GraphQLHttpClient GQLClient { get; set; } - public Task OnWebSocketConnect(GraphQLHttpClient client) + public void Dispose() + { + try { - return Task.CompletedTask; + UserStreamAddedSubscription?.Dispose(); + UserStreamRemovedSubscription?.Dispose(); + StreamUpdatedSubscription?.Dispose(); + BranchCreatedSubscription?.Dispose(); + BranchUpdatedSubscription?.Dispose(); + BranchDeletedSubscription?.Dispose(); + CommitCreatedSubscription?.Dispose(); + CommitUpdatedSubscription?.Dispose(); + CommitDeletedSubscription?.Dispose(); + CommentActivitySubscription?.Dispose(); + GQLClient?.Dispose(); } + catch { } + } - public void Dispose() - { - try - { - UserStreamAddedSubscription?.Dispose(); - UserStreamRemovedSubscription?.Dispose(); - StreamUpdatedSubscription?.Dispose(); - BranchCreatedSubscription?.Dispose(); - BranchUpdatedSubscription?.Dispose(); - BranchDeletedSubscription?.Dispose(); - CommitCreatedSubscription?.Dispose(); - CommitUpdatedSubscription?.Dispose(); - CommitDeletedSubscription?.Dispose(); - CommentActivitySubscription?.Dispose(); - GQLClient?.Dispose(); - } - catch { } - } + public object UploadValues(string v1, string v2, NameValueCollection user_1) + { + throw new NotImplementedException(); + } - internal async Task ExecuteWithResiliencePolicies(Func> func) - { - // TODO: handle these in the HttpClient factory with a custom RequestHandler class - // 408 Request Timeout - // 425 Too Early - // 429 Too Many Requests - // 500 Internal Server Error - // 502 Bad Gateway - // 503 Service Unavailable - // 504 Gateway Timeout + public Task OnWebSocketConnect(GraphQLHttpClient client) + { + return Task.CompletedTask; + } + + internal async Task ExecuteWithResiliencePolicies(Func> func) + { + // TODO: handle these in the HttpClient factory with a custom RequestHandler class + // 408 Request Timeout + // 425 Too Early + // 429 Too Many Requests + // 500 Internal Server Error + // 502 Bad Gateway + // 503 Service Unavailable + // 504 Gateway Timeout - var delay = Backoff.DecorrelatedJitterBackoffV2( - medianFirstRetryDelay: TimeSpan.FromSeconds(1), - retryCount: 5 + var delay = Backoff.DecorrelatedJitterBackoffV2(TimeSpan.FromSeconds(1), 5); + var graphqlRetry = Policy + .Handle>() + .WaitAndRetryAsync( + delay, + (ex, timeout, context) => + { + var graphqlEx = ex as SpeckleGraphQLException; + SpeckleLog.Logger + .ForContext("graphqlExtensions", graphqlEx.Extensions) + .ForContext("graphqlErrorMessages", graphqlEx.ErrorMessages) + .Warning( + ex, + "The previous attempt at executing function to get {resultType} failed with {exceptionMessage}. Retrying after {timeout}.", + typeof(T).Name, + ex.Message, + timeout + ); + } ); - var graphqlRetry = Policy - .Handle>() - .WaitAndRetryAsync( - delay, - onRetry: (ex, timeout, context) => - { - var graphqlEx = ex as SpeckleGraphQLException; - SpeckleLog.Logger.ForContext("graphqlExtensions", graphqlEx.Extensions) - .ForContext("graphqlErrorMessages", graphqlEx.ErrorMessages) - .Warning( - ex, - "The previous attempt at executing function to get {resultType} failed with {exceptionMessage}. Retrying after {timeout}.", - typeof(T).Name, - ex.Message, - timeout - ); - } - ); - return await graphqlRetry.ExecuteAsync(func); - } + return await graphqlRetry.ExecuteAsync(func).ConfigureAwait(false); + } - internal async Task ExecuteGraphQLRequest( - GraphQLRequest request, - CancellationToken? cancellationToken - ) + internal async Task ExecuteGraphQLRequest( + GraphQLRequest request, + CancellationToken? cancellationToken + ) + { + using (LogContext.Push(_createEnrichers(request))) { - using (LogContext.Push(_createEnrichers(request))) + SpeckleLog.Logger.Debug( + "Starting execution of graphql request to get {resultType}", + typeof(T).Name + ); + var timer = new Stopwatch(); + var success = false; + timer.Start(); + try { - SpeckleLog.Logger.Debug("Starting execution of graphql request to get {resultType}", typeof(T).Name); - var timer = new Stopwatch(); - var success = false; - timer.Start(); - try - { - var result = await ExecuteWithResiliencePolicies(async () => + var result = await ExecuteWithResiliencePolicies(async () => { var result = await GQLClient .SendMutationAsync(request, cancellationToken ?? CancellationToken.None) .ConfigureAwait(false); MaybeThrowFromGraphQLErrors(request, result); return result.Data; - }); - success = true; - return result; - } - // cancellations are bubbling up with no logging - catch (OperationCanceledException) - { - throw; - } - // we catch forbidden to rethrow, making sure its not logged. - catch (SpeckleGraphQLForbiddenException) - { - throw; - } - // anything else related to graphql gets logged - catch (SpeckleGraphQLException gqlException) - { - SpeckleLog.Logger.ForContext("graphqlResponse", gqlException.Response) - .ForContext("graphqlExtensions", gqlException.Extensions) - .ForContext("graphqlErrorMessages", gqlException.ErrorMessages.ToList()) - .Warning( - gqlException, - "Execution of the graphql request to get {resultType} failed with {graphqlExceptionType} {exceptionMessage}.", - typeof(T).Name, - gqlException.GetType().Name, - gqlException.Message - ); - throw; - } - // we log and wrap anything that is not a graphql exception. - // this makes sure, that any graphql operation only throws SpeckleGraphQLExceptions - catch (Exception ex) - { - SpeckleLog.Logger.Warning( - ex, - "Execution of the graphql request to get {resultType} failed without a graphql response. Cause {exceptionMessage}", - typeof(T).Name, - ex.Message - ); - throw new SpeckleGraphQLException( - "The graphql request failed without a graphql response", - ex, - request, - null - ); - } - finally - { - // this is a performance metric log operation - // this makes sure that both success and failed operations report - // the same performance log - timer.Stop(); - var status = success ? "succeeded" : "failed"; - SpeckleLog.Logger.Information( - "Execution of graphql request to get {resultType} {resultStatus} after {elapsed} seconds", + }) + .ConfigureAwait(false); + success = true; + return result; + } + // cancellations are bubbling up with no logging + catch (OperationCanceledException) + { + throw; + } + // we catch forbidden to rethrow, making sure its not logged. + catch (SpeckleGraphQLForbiddenException) + { + throw; + } + // anything else related to graphql gets logged + catch (SpeckleGraphQLException gqlException) + { + SpeckleLog.Logger + .ForContext("graphqlResponse", gqlException.Response) + .ForContext("graphqlExtensions", gqlException.Extensions) + .ForContext("graphqlErrorMessages", gqlException.ErrorMessages.ToList()) + .Warning( + gqlException, + "Execution of the graphql request to get {resultType} failed with {graphqlExceptionType} {exceptionMessage}.", typeof(T).Name, - status, - timer.Elapsed.TotalSeconds + gqlException.GetType().Name, + gqlException.Message ); - } + throw; + } + // we log and wrap anything that is not a graphql exception. + // this makes sure, that any graphql operation only throws SpeckleGraphQLExceptions + catch (Exception ex) + { + SpeckleLog.Logger.Warning( + ex, + "Execution of the graphql request to get {resultType} failed without a graphql response. Cause {exceptionMessage}", + typeof(T).Name, + ex.Message + ); + throw new SpeckleGraphQLException( + "The graphql request failed without a graphql response", + ex, + request, + null + ); + } + finally + { + // this is a performance metric log operation + // this makes sure that both success and failed operations report + // the same performance log + timer.Stop(); + var status = success ? "succeeded" : "failed"; + SpeckleLog.Logger.Information( + "Execution of graphql request to get {resultType} {resultStatus} after {elapsed} seconds", + typeof(T).Name, + status, + timer.Elapsed.TotalSeconds + ); } } + } - internal void MaybeThrowFromGraphQLErrors( - GraphQLRequest request, - GraphQLResponse response - ) + internal void MaybeThrowFromGraphQLErrors(GraphQLRequest request, GraphQLResponse response) + { + // The errors reflect the Apollo server v2 API, which is deprecated. It is bound to change, + // once we migrate to a newer version. + var errors = response.Errors; + if (errors != null && errors.Any()) { - // The errors reflect the Apollo server v2 API, which is deprecated. It is bound to change, - // once we migrate to a newer version. - var errors = response.Errors; - if (errors != null && errors.Any()) - { - var errorMessages = errors.Select(e => e.Message); - if ( - errors.Any( - e => - e.Extensions != null - && ( - e.Extensions.Contains(new KeyValuePair("code", "FORBIDDEN")) - || e.Extensions.Contains( - new KeyValuePair("code", "UNAUTHENTICATED") - ) - ) - ) + var errorMessages = errors.Select(e => e.Message); + if ( + errors.Any( + e => + e.Extensions != null + && ( + e.Extensions.Contains(new KeyValuePair("code", "FORBIDDEN")) + || e.Extensions.Contains(new KeyValuePair("code", "UNAUTHENTICATED")) + ) ) - throw new SpeckleGraphQLForbiddenException(request, response); + ) + throw new SpeckleGraphQLForbiddenException(request, response); - if ( - errors.Any( - e => - e.Extensions != null - && e.Extensions.Contains( - new KeyValuePair("code", "INTERNAL_SERVER_ERROR") - ) - ) + if ( + errors.Any( + e => + e.Extensions != null + && e.Extensions.Contains( + new KeyValuePair("code", "INTERNAL_SERVER_ERROR") + ) ) - throw new SpeckleGraphQLInternalErrorException(request, response); + ) + throw new SpeckleGraphQLInternalErrorException(request, response); - throw new SpeckleGraphQLException("Request failed with errors", request, response); - } + throw new SpeckleGraphQLException("Request failed with errors", request, response); } + } - private Dictionary _convertExpandoToDict(ExpandoObject expando) + private Dictionary _convertExpandoToDict(ExpandoObject expando) + { + var variables = new Dictionary(); + foreach (KeyValuePair kvp in expando) { - var variables = new Dictionary(); - foreach (KeyValuePair kvp in expando) - { - object value; - if (kvp.Value is ExpandoObject ex) - { - value = _convertExpandoToDict(ex); - } - else - { - value = kvp.Value; - } - variables[kvp.Key] = value; - } - return variables; + object value; + if (kvp.Value is ExpandoObject ex) + value = _convertExpandoToDict(ex); + else + value = kvp.Value; + variables[kvp.Key] = value; } + return variables; + } - private ILogEventEnricher[] _createEnrichers(GraphQLRequest request) + private ILogEventEnricher[] _createEnrichers(GraphQLRequest request) + { + // i know this is double (de)serializing, but we need a recursive convert to + // dict here + var expando = JsonConvert.DeserializeObject( + JsonConvert.SerializeObject(request.Variables) + ); + var variables = + request.Variables != null && expando != null ? _convertExpandoToDict(expando) : null; + return new ILogEventEnricher[] { - // i know this is double (de)serializing, but we need a recursive convert to - // dict here - var expando = JsonConvert.DeserializeObject( - JsonConvert.SerializeObject(request.Variables) - ); - var variables = - request.Variables != null && expando != null ? _convertExpandoToDict(expando) : null; - return new ILogEventEnricher[] - { - new PropertyEnricher("serverUrl", ServerUrl), - new PropertyEnricher("graphqlQuery", request.Query), - new PropertyEnricher("graphqlVariables", variables), - new PropertyEnricher("resultType", typeof(T).Name) - }; - } + new PropertyEnricher("serverUrl", ServerUrl), + new PropertyEnricher("graphqlQuery", request.Query), + new PropertyEnricher("graphqlVariables", variables), + new PropertyEnricher("resultType", typeof(T).Name) + }; + } - internal IDisposable SubscribeTo(GraphQLRequest request, Action callback) - { - using (LogContext.Push(_createEnrichers(request))) + internal IDisposable SubscribeTo(GraphQLRequest request, Action callback) + { + using (LogContext.Push(_createEnrichers(request))) + try { - try - { - var res = GQLClient.CreateSubscriptionStream(request); - return res.Subscribe( - response => + var res = GQLClient.CreateSubscriptionStream(request); + return res.Subscribe( + response => + { + try { - try - { - MaybeThrowFromGraphQLErrors(request, response); + MaybeThrowFromGraphQLErrors(request, response); - if (response.Data != null) - { - callback(this, response.Data); - } - else - { - SpeckleLog.Logger.ForContext("graphqlResponse", response) - .Error( - "Cannot execute graphql callback for {resultType}, the response has no data.", - typeof(T).Name - ); - } - } - // we catch forbidden to rethrow, making sure its not logged. - catch (SpeckleGraphQLForbiddenException) - { - throw; - } - // anything else related to graphql gets logged - catch (SpeckleGraphQLException gqlException) - { - SpeckleLog.Logger.ForContext("graphqlResponse", gqlException.Response) - .ForContext("graphqlExtensions", gqlException.Extensions) - .ForContext("graphqlErrorMessages", gqlException.ErrorMessages.ToList()) - .Warning( - gqlException, - "Execution of the graphql request to get {resultType} failed with {graphqlExceptionType} {exceptionMessage}.", - typeof(T).Name, - gqlException.GetType().Name, - gqlException.Message + if (response.Data != null) + callback(this, response.Data); + else + SpeckleLog.Logger + .ForContext("graphqlResponse", response) + .Error( + "Cannot execute graphql callback for {resultType}, the response has no data.", + typeof(T).Name ); - throw; - } - // we're not handling the bare Exception type here, - // since we have a response object on the callback, we know the Exceptions - // can only be thrown from the MaybeThrowFromGraphQLErrors which wraps - // every exception into SpeckleGraphQLException - }, - ex => + } + // we catch forbidden to rethrow, making sure its not logged. + catch (SpeckleGraphQLForbiddenException) { - // we're logging this as an error for now, to keep track of failures - // so far we've swallowed these errors - SpeckleLog.Logger.Error( - ex, - "Subscription request for {resultType} failed with {exceptionMessage}", - typeof(T).Name, - ex.Message - ); - // we could be throwing like this: - // throw ex; + throw; } - ); - } - catch (Exception ex) - { - SpeckleLog.Logger.Warning( - ex, - "Subscribing to graphql {resultType} failed without a graphql response. Cause {exceptionMessage}", - typeof(T).Name, - ex.Message - ); - throw new SpeckleGraphQLException( - "The graphql request failed without a graphql response", - ex, - request, - null - ); - throw; - } + // anything else related to graphql gets logged + catch (SpeckleGraphQLException gqlException) + { + SpeckleLog.Logger + .ForContext("graphqlResponse", gqlException.Response) + .ForContext("graphqlExtensions", gqlException.Extensions) + .ForContext("graphqlErrorMessages", gqlException.ErrorMessages.ToList()) + .Warning( + gqlException, + "Execution of the graphql request to get {resultType} failed with {graphqlExceptionType} {exceptionMessage}.", + typeof(T).Name, + gqlException.GetType().Name, + gqlException.Message + ); + throw; + } + // we're not handling the bare Exception type here, + // since we have a response object on the callback, we know the Exceptions + // can only be thrown from the MaybeThrowFromGraphQLErrors which wraps + // every exception into SpeckleGraphQLException + }, + ex => + { + // we're logging this as an error for now, to keep track of failures + // so far we've swallowed these errors + SpeckleLog.Logger.Error( + ex, + "Subscription request for {resultType} failed with {exceptionMessage}", + typeof(T).Name, + ex.Message + ); + // we could be throwing like this: + // throw ex; + } + ); + } + catch (Exception ex) + { + SpeckleLog.Logger.Warning( + ex, + "Subscribing to graphql {resultType} failed without a graphql response. Cause {exceptionMessage}", + typeof(T).Name, + ex.Message + ); + throw new SpeckleGraphQLException( + "The graphql request failed without a graphql response", + ex, + request, + null + ); + throw; } - } } } diff --git a/Core/Core/Api/GraphQL/Models.cs b/Core/Core/Api/GraphQL/Models.cs index 707c96aede..bfee373b8b 100644 --- a/Core/Core/Api/GraphQL/Models.cs +++ b/Core/Core/Api/GraphQL/Models.cs @@ -1,554 +1,556 @@ -using System; +using System; using System.Collections.Generic; using System.Text.Json.Serialization; -namespace Speckle.Core.Api +namespace Speckle.Core.Api; + +#region inputs + +public class StreamCreateInput { - #region inputs + public string name { get; set; } + public string description { get; set; } + public bool isPublic { get; set; } = true; +} - public class StreamCreateInput - { - public string name { get; set; } - public string description { get; set; } - public bool isPublic { get; set; } = true; - } +public class StreamUpdateInput +{ + public string id { get; set; } + public string name { get; set; } + public string description { get; set; } + public bool isPublic { get; set; } = true; +} - public class StreamUpdateInput - { - public string id { get; set; } - public string name { get; set; } - public string description { get; set; } - public bool isPublic { get; set; } = true; - } +public class StreamPermissionInput +{ + public string streamId { get; set; } + public string userId { get; set; } + public string role { get; set; } +} - public class StreamPermissionInput - { - public string streamId { get; set; } - public string userId { get; set; } - public string role { get; set; } - } +public class StreamRevokePermissionInput +{ + public string streamId { get; set; } + public string userId { get; set; } +} - public class StreamRevokePermissionInput - { - public string streamId { get; set; } - public string userId { get; set; } - } +public class StreamInviteCreateInput +{ + public string streamId { get; set; } + public string userId { get; set; } + public string email { get; set; } + public string message { get; set; } + public string role { get; set; } +} - public class StreamInviteCreateInput - { - public string streamId { get; set; } - public string userId { get; set; } - public string email { get; set; } - public string message { get; set; } - public string role { get; set; } - } +public class BranchCreateInput +{ + public string streamId { get; set; } + public string name { get; set; } + public string description { get; set; } +} - public class BranchCreateInput - { - public string streamId { get; set; } - public string name { get; set; } - public string description { get; set; } - } +public class BranchUpdateInput +{ + public string streamId { get; set; } + public string id { get; set; } + public string name { get; set; } + public string description { get; set; } +} - public class BranchUpdateInput - { - public string streamId { get; set; } - public string id { get; set; } - public string name { get; set; } - public string description { get; set; } - } +public class BranchDeleteInput +{ + public string streamId { get; set; } + public string id { get; set; } +} - public class BranchDeleteInput - { - public string streamId { get; set; } - public string id { get; set; } - } +public class CommitCreateInput +{ + public string streamId { get; set; } + public string branchName { get; set; } + public string objectId { get; set; } + public string message { get; set; } + public string sourceApplication { get; set; } = ".net"; + public int totalChildrenCount { get; set; } + public List parents { get; set; } + + [Obsolete("Please use the parents property. This property will be removed in later versions")] + public List previousCommitIds { get; set; } +} - public class CommitCreateInput - { - public string streamId { get; set; } - public string branchName { get; set; } - public string objectId { get; set; } - public string message { get; set; } - public string sourceApplication { get; set; } = ".net"; - public int totalChildrenCount { get; set; } - public List parents { get; set; } - - [Obsolete("Please use the parents property. This property will be removed in later versions")] - public List previousCommitIds { get; set; } - } +public class CommitUpdateInput +{ + public string streamId { get; set; } + public string id { get; set; } + public string message { get; set; } +} - public class CommitUpdateInput - { - public string streamId { get; set; } - public string id { get; set; } - public string message { get; set; } - } +public class CommitDeleteInput +{ + public string streamId { get; set; } + public string id { get; set; } +} - public class CommitDeleteInput - { - public string streamId { get; set; } - public string id { get; set; } - } +public class CommitReceivedInput +{ + public string streamId { get; set; } + public string commitId { get; set; } + public string sourceApplication { get; set; } + public string message { get; set; } +} - public class CommitReceivedInput - { - public string streamId { get; set; } - public string commitId { get; set; } - public string sourceApplication { get; set; } - public string message { get; set; } - } +#endregion - #endregion +public class Stream +{ + public string id { get; set; } + public string name { get; set; } + public string description { get; set; } - public class Stream - { - public string id { get; set; } - public string name { get; set; } - public string description { get; set; } - - public bool isPublic { get; set; } - public string role { get; set; } - public string createdAt { get; set; } - public string updatedAt { get; set; } - public string favoritedDate { get; set; } - - public int commentCount { get; set; } - public int favoritesCount { get; set; } - - public List collaborators { get; set; } - public List pendingCollaborators { get; set; } = new List(); - public Branches branches { get; set; } - - /// - /// Set only in the case that you've requested this through . - /// - public Branch branch { get; set; } - - /// - /// Set only in the case that you've requested this through . - /// - public Commit commit { get; set; } - - /// - /// Set only in the case that you've requested this through - /// - public Commits commits { get; set; } - - public Activity activity { get; set; } - - public SpeckleObject @object { get; set; } - - public override string ToString() - { - return $"Stream ({name} | {id})"; - } - } + public bool isPublic { get; set; } + public string role { get; set; } + public string createdAt { get; set; } + public string updatedAt { get; set; } + public string favoritedDate { get; set; } - public class Collaborator - { - public string id { get; set; } - public string name { get; set; } - public string role { get; set; } - public string avatar { get; set; } - - public override string ToString() - { - return $"Collaborator ({name} | {role} | {id})"; - } - } + public int commentCount { get; set; } + public int favoritesCount { get; set; } - public class StreamInvitesResponse - { - public List streamInvites { get; set; } - } - public class PendingStreamCollaborator - { - public string id { get; set; } - public string inviteId { get; set; } - public string streamId { get; set; } - public string streamName { get; set; } - public string title { get; set; } - public string role { get; set; } - public User invitedBy { get; set; } - public User user { get; set; } - public string token { get; set; } - } + public List collaborators { get; set; } + public List pendingCollaborators { get; set; } = new(); + public Branches branches { get; set; } - public class Branches - { - public int totalCount { get; set; } - public string cursor { get; set; } - public List items { get; set; } - } + /// + /// Set only in the case that you've requested this through . + /// + public Branch branch { get; set; } - public class Commits - { - public int totalCount { get; set; } - public string cursor { get; set; } - public List items { get; set; } - } + /// + /// Set only in the case that you've requested this through . + /// + public Commit commit { get; set; } - public class Commit - { - public string id { get; set; } - public string message { get; set; } - public string branchName { get; set; } - public string authorName { get; set; } - public string authorId { get; set; } - public string authorAvatar { get; set; } - public string createdAt { get; set; } - public string sourceApplication { get; set; } - - public string referencedObject { get; set; } - public int totalChildrenCount { get; set; } - public List parents { get; set; } - - public override string ToString() - { - return $"Commit ({message} | {id})"; - } - } + /// + /// Set only in the case that you've requested this through + /// + public Commits commits { get; set; } + + public Activity activity { get; set; } + + public SpeckleObject @object { get; set; } - public class Activity + public override string ToString() { - public int totalCount { get; set; } - public DateTime cursor { get; set; } - public List items { get; set; } + return $"Stream ({name} | {id})"; } +} + +public class Collaborator +{ + public string id { get; set; } + public string name { get; set; } + public string role { get; set; } + public string avatar { get; set; } - public class ActivityItem + public override string ToString() { - public string actionType { get; set; } - public string userId { get; set; } - public string streamId { get; set; } - public string resourceId { get; set; } - public string resourceType { get; set; } - public string time { get; set; } - public Info info { get; set; } - public string message { get; set; } + return $"Collaborator ({name} | {role} | {id})"; } +} - public class Info - { - public string message { get; set; } - public string sourceApplication { get; set; } +public class StreamInvitesResponse +{ + public List streamInvites { get; set; } +} - public InfoCommit commit { get; set; } - } +public class PendingStreamCollaborator +{ + public string id { get; set; } + public string inviteId { get; set; } + public string streamId { get; set; } + public string streamName { get; set; } + public string title { get; set; } + public string role { get; set; } + public User invitedBy { get; set; } + public User user { get; set; } + public string token { get; set; } +} - public class InfoCommit - { - public string message { get; set; } - public string sourceApplication { get; set; } - public string branchName { get; set; } - } +public class Branches +{ + public int totalCount { get; set; } + public string cursor { get; set; } + public List items { get; set; } +} - public class SpeckleObject - { - public string id { get; set; } - public string speckleType { get; set; } - public string applicationId { get; set; } - public int totalChildrenCount { get; set; } - public string createdAt { get; set; } - } +public class Commits +{ + public int totalCount { get; set; } + public string cursor { get; set; } + public List items { get; set; } +} - public class Branch - { - public string id { get; set; } - public string name { get; set; } - public string description { get; set; } - public Commits commits { get; set; } - - public override string ToString() - { - return $"Branch ({name} | {id})"; - } - } +public class Commit +{ + public string id { get; set; } + public string message { get; set; } + public string branchName { get; set; } + public string authorName { get; set; } + public string authorId { get; set; } + public string authorAvatar { get; set; } + public string createdAt { get; set; } + public string sourceApplication { get; set; } - public class Streams - { - public int totalCount { get; set; } - public string cursor { get; set; } - public List items { get; set; } - } + public string referencedObject { get; set; } + public int totalChildrenCount { get; set; } + public List parents { get; set; } - public class UserBase + public override string ToString() { - public string id { get; set; } - public string name { get; set; } - public string bio { get; set; } - public string company { get; set; } - public string avatar { get; set; } - public bool verified { get; set; } - public string role { get; set; } - public Streams streams { get; set; } + return $"Commit ({message} | {id})"; } +} - public class LimitedUser : UserBase - { - public override string ToString() - { - return $"Other user profile: ({name} | {id})"; - } +public class Activity +{ + public int totalCount { get; set; } + public DateTime cursor { get; set; } + public List items { get; set; } +} - } +public class ActivityItem +{ + public string actionType { get; set; } + public string userId { get; set; } + public string streamId { get; set; } + public string resourceId { get; set; } + public string resourceType { get; set; } + public string time { get; set; } + public Info info { get; set; } + public string message { get; set; } +} - public class User : UserBase - { - public string email { get; set; } - public Streams favoriteStreams { get; set; } +public class Info +{ + public string message { get; set; } + public string sourceApplication { get; set; } - public override string ToString() - { - return $"User ({email} | {name} | {id})"; - } - } + public InfoCommit commit { get; set; } +} +public class InfoCommit +{ + public string message { get; set; } + public string sourceApplication { get; set; } + public string branchName { get; set; } +} - public class Resource - { - public string resourceId { get; set; } - public ResourceType resourceType { get; set; } - } +public class SpeckleObject +{ + public string id { get; set; } + public string speckleType { get; set; } + public string applicationId { get; set; } + public int totalChildrenCount { get; set; } + public string createdAt { get; set; } +} - public enum ResourceType +public class Branch +{ + public string id { get; set; } + public string name { get; set; } + public string description { get; set; } + public Commits commits { get; set; } + + public override string ToString() { - commit, - stream, - @object, - comment + return $"Branch ({name} | {id})"; } +} - public class Location +public class Streams +{ + public int totalCount { get; set; } + public string cursor { get; set; } + public List items { get; set; } +} + +public class UserBase +{ + public string id { get; set; } + public string name { get; set; } + public string bio { get; set; } + public string company { get; set; } + public string avatar { get; set; } + public bool verified { get; set; } + public string role { get; set; } + public Streams streams { get; set; } +} + +public class LimitedUser : UserBase +{ + public override string ToString() { - public double x { get; set; } - public double y { get; set; } - public double z { get; set; } + return $"Other user profile: ({name} | {id})"; } +} - public class UserData +public class User : UserBase +{ + public string email { get; set; } + public Streams favoriteStreams { get; set; } + + public override string ToString() { - public User user { get; set; } + return $"User ({email} | {name} | {id})"; } +} + +public class Resource +{ + public string resourceId { get; set; } + public ResourceType resourceType { get; set; } +} + +public enum ResourceType +{ + commit, + stream, + @object, + comment +} + +public class Location +{ + public double x { get; set; } + public double y { get; set; } + public double z { get; set; } +} +public class UserData +{ + public User user { get; set; } +} +/// +/// GraphQL DTO model for active user data +/// +public class ActiveUserData +{ /// - /// GraphQL DTO model for active user data + /// User profile of the active user. /// - public class ActiveUserData - { - /// - /// User profile of the active user. - /// - public User activeUser { get; set; } - } - + public User activeUser { get; set; } +} +/// +/// GraphQL DTO model for limited user data. Mostly referring to other user's profile. +/// +public class LimitedUserData +{ /// - /// GraphQL DTO model for limited user data. Mostly referring to other user's profile. + /// The limited user profile of another (non active user) /// - public class LimitedUserData - { - /// - /// The limited user profile of another (non active user) - /// - public LimitedUser otherUser { get; set; } - } + public LimitedUser otherUser { get; set; } +} - public class UserSearchData - { - public UserSearch userSearch { get; set; } - } +public class UserSearchData +{ + public UserSearch userSearch { get; set; } +} - public class UserSearch - { - public string cursor { get; set; } - public List items { get; set; } - } +public class UserSearch +{ + public string cursor { get; set; } + public List items { get; set; } +} - public class ServerInfoResponse - { - // TODO: server and user models are duplicated here and in Core.Credentials.Responses - // a bit weird and unnecessary - shouldn't both Credentials and Api share the same models since they're - // all server models that should be consistent? am creating a new obj here as to not reference Credentials in - // this file but it should prob be refactored in the futrue - public ServerInfo serverInfo { get; set; } - } +public class ServerInfoResponse +{ + // TODO: server and user models are duplicated here and in Core.Credentials.Responses + // a bit weird and unnecessary - shouldn't both Credentials and Api share the same models since they're + // all server models that should be consistent? am creating a new obj here as to not reference Credentials in + // this file but it should prob be refactored in the futrue + public ServerInfo serverInfo { get; set; } +} - // TODO: prob remove and bring one level up and shared w Core.Credentials - public class ServerInfo - { - public string name { get; set; } - public string company { get; set; } - public string url { get; set; } - public string version { get; set; } - public string adminContact { get; set; } - public string description { get; set; } - } +// TODO: prob remove and bring one level up and shared w Core.Credentials +public class ServerInfo +{ + public string name { get; set; } + public string company { get; set; } + public string url { get; set; } + public string version { get; set; } + public string adminContact { get; set; } + public string description { get; set; } +} - public class StreamData - { - public Stream stream { get; set; } - } +public class StreamData +{ + public Stream stream { get; set; } +} - public class StreamsData - { - public Streams streams { get; set; } - } +public class StreamsData +{ + public Streams streams { get; set; } +} - #region comments - public class Comments - { - public int totalCount { get; set; } - public DateTime? cursor { get; set; } - public List items { get; set; } - } +#region comments +public class Comments +{ + public int totalCount { get; set; } + public DateTime? cursor { get; set; } + public List items { get; set; } +} - public class CommentData - { - public Comments comments { get; set; } - public List camPos { get; set; } - public object filters { get; set; } - public Location location { get; set; } - public object selection { get; set; } - public object sectionBox { get; set; } - } +public class CommentData +{ + public Comments comments { get; set; } + public List camPos { get; set; } + public object filters { get; set; } + public Location location { get; set; } + public object selection { get; set; } + public object sectionBox { get; set; } +} - public class CommentItem - { - public string id { get; set; } - public string authorId { get; set; } - public bool archived { get; set; } - public string screenshot { get; set; } - public string rawText { get; set; } - public CommentData data { get; set; } - public DateTime createdAt { get; set; } - public DateTime updatedAt { get; set; } - public DateTime? viewedAt { get; set; } - public object reactions { get; set; } - public Comments replies { get; set; } - public List resources { get; set; } - } +public class CommentItem +{ + public string id { get; set; } + public string authorId { get; set; } + public bool archived { get; set; } + public string screenshot { get; set; } + public string rawText { get; set; } + public CommentData data { get; set; } + public DateTime createdAt { get; set; } + public DateTime updatedAt { get; set; } + public DateTime? viewedAt { get; set; } + public object reactions { get; set; } + public Comments replies { get; set; } + public List resources { get; set; } +} - public partial class ContentContent - { - public string Type { get; set; } - //public Mark[] Marks { get; set; } - public string Text { get; set; } - } +public partial class ContentContent +{ + public string Type { get; set; } - public class CommentsData - { - public Comments comments { get; set; } - } + //public Mark[] Marks { get; set; } + public string Text { get; set; } +} - public class CommentItemData - { - public CommentItem comment { get; set; } - } +public class CommentsData +{ + public Comments comments { get; set; } +} - public class CommentActivityMessage - { - public string type { get; set; } - public CommentItem comment { get; set; } - } +public class CommentItemData +{ + public CommentItem comment { get; set; } +} - public class CommentActivityResponse - { - public CommentActivityMessage commentActivity { get; set; } - } - #endregion +public class CommentActivityMessage +{ + public string type { get; set; } + public CommentItem comment { get; set; } +} - #region manager api +public class CommentActivityResponse +{ + public CommentActivityMessage commentActivity { get; set; } +} +#endregion - public class Connector - { - public List Versions { get; set; } = new List(); - } +#region manager api - public class Version - { - public string Number { get; set; } - public string Url { get; set; } - public Os Os { get; set; } - public Architecture Architecture { get; set; } = Architecture.Any; - public DateTime Date { get; set; } - - [JsonIgnore] - public string DateTimeAgo => Helpers.TimeAgo(Date); - public bool Prerelease { get; set; } = false; - - public Version(string number, string url, Os os = Os.Win, Architecture architecture = Architecture.Any) - { - Number = number; - Url = url; - Date = DateTime.Now; - Prerelease = Number.Contains("-"); - Os = os; - Architecture = architecture; - } - } +public class Connector +{ + public List Versions { get; set; } = new(); +} - /// - /// OS - /// NOTE: do not edit order and only append new items as they are serialized to ints - /// - public enum Os - { - Win, //0 - OSX, //1 - Linux, //2 - Any //3 - } +public class Version +{ + public Version( + string number, + string url, + Os os = Os.Win, + Architecture architecture = Architecture.Any + ) + { + Number = number; + Url = url; + Date = DateTime.Now; + Prerelease = Number.Contains("-"); + Os = os; + Architecture = architecture; + } + + public string Number { get; set; } + public string Url { get; set; } + public Os Os { get; set; } + public Architecture Architecture { get; set; } = Architecture.Any; + public DateTime Date { get; set; } + + [JsonIgnore] + public string DateTimeAgo => Helpers.TimeAgo(Date); + + public bool Prerelease { get; set; } = false; +} - /// - /// Architecture - /// NOTE: do not edit order and only append new items as they are serialized to ints - /// - public enum Architecture - { - Any, //0 - Arm, //1 - Intel //2 - } +/// +/// OS +/// NOTE: do not edit order and only append new items as they are serialized to ints +/// +public enum Os +{ + Win, //0 + OSX, //1 + Linux, //2 + Any //3 +} +/// +/// Architecture +/// NOTE: do not edit order and only append new items as they are serialized to ints +/// +public enum Architecture +{ + Any, //0 + Arm, //1 + Intel //2 +} - //GHOST API - public class Meta - { - public Pagination pagination { get; set; } - } +//GHOST API +public class Meta +{ + public Pagination pagination { get; set; } +} - public class Pagination - { - public int page { get; set; } - public string limit { get; set; } - public int pages { get; set; } - public int total { get; set; } - public object next { get; set; } - public object prev { get; set; } - } +public class Pagination +{ + public int page { get; set; } + public string limit { get; set; } + public int pages { get; set; } + public int total { get; set; } + public object next { get; set; } + public object prev { get; set; } +} - public class Tags - { - public List tags { get; set; } - public Meta meta { get; set; } - } +public class Tags +{ + public List tags { get; set; } + public Meta meta { get; set; } +} - public class Tag - { - public string id { get; set; } - public string name { get; set; } - public string slug { get; set; } - public string description { get; set; } - public string feature_image { get; set; } - public string visibility { get; set; } - public string codeinjection_head { get; set; } - public object codeinjection_foot { get; set; } - public object canonical_url { get; set; } - public string accent_color { get; set; } - public string url { get; set; } - } - #endregion +public class Tag +{ + public string id { get; set; } + public string name { get; set; } + public string slug { get; set; } + public string description { get; set; } + public string feature_image { get; set; } + public string visibility { get; set; } + public string codeinjection_head { get; set; } + public object codeinjection_foot { get; set; } + public object canonical_url { get; set; } + public string accent_color { get; set; } + public string url { get; set; } } +#endregion diff --git a/Core/Core/Api/GraphQL/Serializer/ConstantCaseEnumConverter.cs b/Core/Core/Api/GraphQL/Serializer/ConstantCaseEnumConverter.cs index 7a010ef1ca..76cac71149 100644 --- a/Core/Core/Api/GraphQL/Serializer/ConstantCaseEnumConverter.cs +++ b/Core/Core/Api/GraphQL/Serializer/ConstantCaseEnumConverter.cs @@ -1,36 +1,40 @@ -using System; +using System; using System.Linq; using System.Reflection; using GraphQL.Client.Abstractions.Utilities; using Speckle.Newtonsoft.Json; using Speckle.Newtonsoft.Json.Converters; -namespace Speckle.Core.Api.GraphQL.Serializer +namespace Speckle.Core.Api.GraphQL.Serializer; + +public class ConstantCaseEnumConverter : StringEnumConverter { - public class ConstantCaseEnumConverter : StringEnumConverter + public override void WriteJson(JsonWriter writer, object value, JsonSerializer serializer) { - public override void WriteJson(JsonWriter writer, object value, JsonSerializer serializer) + if (value == null) + { + writer.WriteNull(); + } + else { - if (value == null) + var enumString = ((Enum)value).ToString("G"); + var memberName = value + .GetType() + .GetMember( + enumString, + BindingFlags.DeclaredOnly | BindingFlags.Static | BindingFlags.Public + ) + .FirstOrDefault() + ?.Name; + if (string.IsNullOrEmpty(memberName)) { - writer.WriteNull(); + if (!AllowIntegerValues) + throw new JsonSerializationException($"Integer value {value} is not allowed."); + writer.WriteValue(value); } else { - var enumString = ((Enum)value).ToString("G"); - var memberName = value.GetType() - .GetMember(enumString, BindingFlags.DeclaredOnly | BindingFlags.Static | BindingFlags.Public) - .FirstOrDefault()?.Name; - if (string.IsNullOrEmpty(memberName)) - { - if (!AllowIntegerValues) - throw new JsonSerializationException($"Integer value {value} is not allowed."); - writer.WriteValue(value); - } - else - { - writer.WriteValue(memberName.ToConstantCase()); - } + writer.WriteValue(memberName.ToConstantCase()); } } } diff --git a/Core/Core/Api/GraphQL/Serializer/MapConverter.cs b/Core/Core/Api/GraphQL/Serializer/MapConverter.cs index 634bb21df9..a1f1e2d721 100644 --- a/Core/Core/Api/GraphQL/Serializer/MapConverter.cs +++ b/Core/Core/Api/GraphQL/Serializer/MapConverter.cs @@ -1,65 +1,89 @@ -using System; +using System; using System.Collections.Generic; using System.Linq; using GraphQL; using Speckle.Newtonsoft.Json; using Speckle.Newtonsoft.Json.Linq; -namespace Speckle.Core.Api.GraphQL.Serializer +namespace Speckle.Core.Api.GraphQL.Serializer; + +public class MapConverter : JsonConverter { - public class MapConverter : JsonConverter + public override void WriteJson(JsonWriter writer, Map value, JsonSerializer serializer) { - public override void WriteJson(JsonWriter writer, Map value, JsonSerializer serializer) => - throw new NotImplementedException( - "This converter currently is only intended to be used to read a JSON object into a strongly-typed representation."); + throw new NotImplementedException( + "This converter currently is only intended to be used to read a JSON object into a strongly-typed representation." + ); + } - public override Map ReadJson(JsonReader reader, Type objectType, Map existingValue, bool hasExistingValue, JsonSerializer serializer) - { - var rootToken = JToken.ReadFrom(reader); - if (rootToken is JObject) - { - return (Map)ReadDictionary(rootToken, new Map()); - } - else - throw new ArgumentException("This converter can only parse when the root element is a JSON Object."); - } + public override Map ReadJson( + JsonReader reader, + Type objectType, + Map existingValue, + bool hasExistingValue, + JsonSerializer serializer + ) + { + var rootToken = JToken.ReadFrom(reader); + if (rootToken is JObject) + return (Map)ReadDictionary(rootToken, new Map()); + else + throw new ArgumentException( + "This converter can only parse when the root element is a JSON Object." + ); + } - private object? ReadToken(JToken? token) => - token switch - { - JObject jObject => ReadDictionary(jObject, new Dictionary()), - JArray jArray => ReadArray(jArray).ToList(), - JValue jValue => jValue.Value, - JConstructor _ => throw new ArgumentOutOfRangeException(nameof(token.Type), - "cannot deserialize a JSON constructor"), - JProperty _ => throw new ArgumentOutOfRangeException(nameof(token.Type), - "cannot deserialize a JSON property"), - JContainer _ => throw new ArgumentOutOfRangeException(nameof(token.Type), - "cannot deserialize a JSON comment"), - _ => throw new ArgumentOutOfRangeException(nameof(token.Type)) - }; + private object? ReadToken(JToken? token) + { + return token switch + { + JObject jObject => ReadDictionary(jObject, new Dictionary()), + JArray jArray => ReadArray(jArray).ToList(), + JValue jValue => jValue.Value, + JConstructor _ + => throw new ArgumentOutOfRangeException( + nameof(token.Type), + "cannot deserialize a JSON constructor" + ), + JProperty _ + => throw new ArgumentOutOfRangeException( + nameof(token.Type), + "cannot deserialize a JSON property" + ), + JContainer _ + => throw new ArgumentOutOfRangeException( + nameof(token.Type), + "cannot deserialize a JSON comment" + ), + _ => throw new ArgumentOutOfRangeException(nameof(token.Type)) + }; + } - private Dictionary ReadDictionary(JToken element, Dictionary to) + private Dictionary ReadDictionary(JToken element, Dictionary to) + { + foreach (var property in ((JObject)element).Properties()) { - foreach (var property in ((JObject)element).Properties()) - { - if (IsUnsupportedJTokenType(property.Value.Type)) - continue; - to[property.Name] = ReadToken(property.Value); - } - return to; + if (IsUnsupportedJTokenType(property.Value.Type)) + continue; + to[property.Name] = ReadToken(property.Value); } + return to; + } - private IEnumerable ReadArray(JArray element) + private IEnumerable ReadArray(JArray element) + { + foreach (var item in element) { - foreach (var item in element) - { - if (IsUnsupportedJTokenType(item.Type)) - continue; - yield return ReadToken(item); - } + if (IsUnsupportedJTokenType(item.Type)) + continue; + yield return ReadToken(item); } + } - private bool IsUnsupportedJTokenType(JTokenType type) => type == JTokenType.Constructor || type == JTokenType.Property || type == JTokenType.Comment; + private bool IsUnsupportedJTokenType(JTokenType type) + { + return type == JTokenType.Constructor + || type == JTokenType.Property + || type == JTokenType.Comment; } } diff --git a/Core/Core/Api/GraphQL/Serializer/NewtonsoftJsonSerializer.cs b/Core/Core/Api/GraphQL/Serializer/NewtonsoftJsonSerializer.cs index df6b332909..799da36694 100644 --- a/Core/Core/Api/GraphQL/Serializer/NewtonsoftJsonSerializer.cs +++ b/Core/Core/Api/GraphQL/Serializer/NewtonsoftJsonSerializer.cs @@ -1,4 +1,4 @@ -using System; +using System; using System.IO; using System.Text; using System.Threading; @@ -9,54 +9,82 @@ using Speckle.Newtonsoft.Json; using Speckle.Newtonsoft.Json.Serialization; -namespace Speckle.Core.Api.GraphQL.Serializer +namespace Speckle.Core.Api.GraphQL.Serializer; + +public class NewtonsoftJsonSerializer : IGraphQLWebsocketJsonSerializer { - public class NewtonsoftJsonSerializer : IGraphQLWebsocketJsonSerializer + public NewtonsoftJsonSerializer() + : this(DefaultJsonSerializerSettings) { } + + public NewtonsoftJsonSerializer(Action configure) + : this(configure.AndReturn(DefaultJsonSerializerSettings)) { } + + public NewtonsoftJsonSerializer(JsonSerializerSettings jsonSerializerSettings) { - public static JsonSerializerSettings DefaultJsonSerializerSettings => new JsonSerializerSettings + JsonSerializerSettings = jsonSerializerSettings; + ConfigureMandatorySerializerOptions(); + } + + public static JsonSerializerSettings DefaultJsonSerializerSettings => + new() { - ContractResolver = new CamelCasePropertyNamesContractResolver { IgnoreIsSpecifiedMembers = true }, + ContractResolver = new CamelCasePropertyNamesContractResolver + { + IgnoreIsSpecifiedMembers = true + }, MissingMemberHandling = MissingMemberHandling.Ignore, Converters = { new ConstantCaseEnumConverter() } }; - public JsonSerializerSettings JsonSerializerSettings { get; } - - public NewtonsoftJsonSerializer() : this(DefaultJsonSerializerSettings) { } + public JsonSerializerSettings JsonSerializerSettings { get; } - public NewtonsoftJsonSerializer(Action configure) : this(configure.AndReturn(DefaultJsonSerializerSettings)) { } - - public NewtonsoftJsonSerializer(JsonSerializerSettings jsonSerializerSettings) - { - JsonSerializerSettings = jsonSerializerSettings; - ConfigureMandatorySerializerOptions(); - } - - // deserialize extensions to Dictionary - private void ConfigureMandatorySerializerOptions() => JsonSerializerSettings.Converters.Insert(0, new MapConverter()); + public string SerializeToString(GraphQLRequest request) + { + return JsonConvert.SerializeObject(request, JsonSerializerSettings); + } - public string SerializeToString(GraphQLRequest request) => JsonConvert.SerializeObject(request, JsonSerializerSettings); + public byte[] SerializeToBytes(GraphQLWebSocketRequest request) + { + var json = JsonConvert.SerializeObject(request, JsonSerializerSettings); + return Encoding.UTF8.GetBytes(json); + } - public byte[] SerializeToBytes(GraphQLWebSocketRequest request) - { - var json = JsonConvert.SerializeObject(request, JsonSerializerSettings); - return Encoding.UTF8.GetBytes(json); - } + public Task DeserializeToWebsocketResponseWrapperAsync( + System.IO.Stream stream + ) + { + return DeserializeFromUtf8Stream(stream); + } - public Task DeserializeToWebsocketResponseWrapperAsync(System.IO.Stream stream) => DeserializeFromUtf8Stream(stream); + public GraphQLWebSocketResponse< + GraphQLResponse + > DeserializeToWebsocketResponse(byte[] bytes) + { + return JsonConvert.DeserializeObject>>( + Encoding.UTF8.GetString(bytes), + JsonSerializerSettings + ); + } - public GraphQLWebSocketResponse> DeserializeToWebsocketResponse(byte[] bytes) => - JsonConvert.DeserializeObject>>(Encoding.UTF8.GetString(bytes), - JsonSerializerSettings); + public Task> DeserializeFromUtf8StreamAsync( + System.IO.Stream stream, + CancellationToken cancellationToken + ) + { + return DeserializeFromUtf8Stream>(stream); + } - public Task> DeserializeFromUtf8StreamAsync(System.IO.Stream stream, CancellationToken cancellationToken) => DeserializeFromUtf8Stream>(stream); + // deserialize extensions to Dictionary + private void ConfigureMandatorySerializerOptions() + { + JsonSerializerSettings.Converters.Insert(0, new MapConverter()); + } - private Task DeserializeFromUtf8Stream(System.IO.Stream stream) - { - using var sr = new StreamReader(stream); - using JsonReader reader = new JsonTextReader(sr); - var serializer = JsonSerializer.Create(JsonSerializerSettings); - return Task.FromResult(serializer.Deserialize(reader)); - } + private Task DeserializeFromUtf8Stream(System.IO.Stream stream) + { + using var sr = new StreamReader(stream); + using JsonReader reader = new JsonTextReader(sr); + var serializer = JsonSerializer.Create(JsonSerializerSettings); + return Task.FromResult(serializer.Deserialize(reader)); } } diff --git a/Core/Core/Api/GraphQL/SubscriptionModels.cs b/Core/Core/Api/GraphQL/SubscriptionModels.cs index 64c91722d1..3385d1adbd 100644 --- a/Core/Core/Api/GraphQL/SubscriptionModels.cs +++ b/Core/Core/Api/GraphQL/SubscriptionModels.cs @@ -1,92 +1,89 @@ -using System; +using System; -namespace Speckle.Core.Api.SubscriptionModels +namespace Speckle.Core.Api.SubscriptionModels; + +#region streams +public class StreamInfo +{ + public string id { get; set; } + public string name { get; set; } + public string description { get; set; } + public string sharedBy { get; set; } +} + +public class UserStreamAddedResult +{ + public StreamInfo userStreamAdded { get; set; } +} + +public class StreamUpdatedResult +{ + public StreamInfo streamUpdated { get; set; } +} + +public class UserStreamRemovedResult +{ + public StreamInfo userStreamRemoved { get; set; } +} +#endregion + +#region branches + +public class BranchInfo +{ + public string id { get; set; } + public string name { get; set; } + public string description { get; set; } + public string streamId { get; set; } + public string authorId { get; set; } +} + +public class BranchCreatedResult +{ + public BranchInfo branchCreated { get; set; } +} + +public class BranchUpdatedResult +{ + public BranchInfo branchUpdated { get; set; } +} + +public class BranchDeletedResult +{ + public BranchInfo branchDeleted { get; set; } +} +#endregion + +#region commits + +public class CommitInfo +{ + public string id { get; set; } + public string streamId { get; set; } + public string branchName { get; set; } + public string objectId { get; set; } + public string authorId { get; set; } + public string message { get; set; } + public string sourceApplication { get; set; } + public int? totalChildrenCount { get; set; } + public string[] parents { get; set; } + + [Obsolete("Please use the parents property. This property will be removed in later versions")] + public string[] previousCommitIds { get; set; } +} + +public class CommitCreatedResult +{ + public CommitInfo commitCreated { get; set; } +} + +public class CommitUpdatedResult +{ + public CommitInfo commitUpdated { get; set; } +} + +public class CommitDeletedResult { - #region streams - public class StreamInfo - { - public string id { get; set; } - public string name { get; set; } - public string description { get; set; } - public string sharedBy { get; set; } - - } - - public class UserStreamAddedResult - { - public StreamInfo userStreamAdded { get; set; } - } - - public class StreamUpdatedResult - { - public StreamInfo streamUpdated { get; set; } - } - - public class UserStreamRemovedResult - { - public StreamInfo userStreamRemoved { get; set; } - } - #endregion - - #region branches - - public class BranchInfo - { - public string id { get; set; } - public string name { get; set; } - public string description { get; set; } - public string streamId { get; set; } - public string authorId { get; set; } - } - - public class BranchCreatedResult - { - public BranchInfo branchCreated { get; set; } - } - - public class BranchUpdatedResult - { - public BranchInfo branchUpdated { get; set; } - } - - public class BranchDeletedResult - { - public BranchInfo branchDeleted { get; set; } - } - #endregion - - #region commits - - public class CommitInfo - { - public string id { get; set; } - public string streamId { get; set; } - public string branchName { get; set; } - public string objectId { get; set; } - public string authorId { get; set; } - public string message { get; set; } - public string sourceApplication { get; set; } - public int? totalChildrenCount { get; set; } - public string[] parents { get; set; } - - [Obsolete("Please use the parents property. This property will be removed in later versions")] - public string[] previousCommitIds { get; set; } - - } - - public class CommitCreatedResult - { - public CommitInfo commitCreated { get; set; } - } - - public class CommitUpdatedResult - { - public CommitInfo commitUpdated { get; set; } - } - - public class CommitDeletedResult - { - public CommitInfo commitDeleted { get; set; } - } - #endregion + public CommitInfo commitDeleted { get; set; } } +#endregion diff --git a/Core/Core/Api/Helpers.cs b/Core/Core/Api/Helpers.cs index 655ee143ed..cf934e94c3 100644 --- a/Core/Core/Api/Helpers.cs +++ b/Core/Core/Api/Helpers.cs @@ -1,4 +1,4 @@ -using System; +using System; using System.Collections.Concurrent; using System.Collections.Generic; using System.IO; @@ -6,10 +6,8 @@ using System.Net.Http; using System.Reflection; using System.Runtime.InteropServices; -using System.Text; using System.Text.Json; using System.Threading.Tasks; -using Sentry; using Speckle.Core.Credentials; using Speckle.Core.Helpers; using Speckle.Core.Kits; @@ -17,105 +15,153 @@ using Speckle.Core.Models; using Speckle.Core.Transports; -namespace Speckle.Core.Api +namespace Speckle.Core.Api; + +public static class Helpers { - public static class Helpers + public const string ReleasesUrl = "https://releases.speckle.dev"; + private static string _feedsEndpoint = ReleasesUrl + "/manager2/feeds"; + + /// + /// Envirenment Variable that allows to overwrite the + /// /// + private static string _speckleUserDataEnvVar = "SPECKLE_USERDATA_PATH"; + + /// + /// Returns the correct location of the Speckle installation folder. Usually this would be the user's %appdata%/Speckle folder, unless the install was made for all users. + /// + /// The location of the Speckle installation folder + [Obsolete("Please use Helpers/SpecklePathProvider.InstallSpeckleFolderPath", true)] + public static string InstallSpeckleFolderPath => + Path.Combine(InstallApplicationDataPath, "Speckle"); + + /// + /// Returns the correct location of the Speckle folder for the current user. Usually this would be the user's %appdata%/Speckle folder. + /// + /// The location of the Speckle installation folder + [Obsolete("Please use Helpers/SpecklePathProvider.UserSpeckleFolderPath()", true)] + public static string UserSpeckleFolderPath => Path.Combine(UserApplicationDataPath, "Speckle"); + + /// + /// Returns the correct location of the AppData folder where Speckle is installed. Usually this would be the user's %appdata% folder, unless the install was made for all users. + /// This folder contains Kits and othe data that can be shared among users of the same machine. + /// + /// The location of the AppData folder where Speckle is installed + [Obsolete("Please use Helpers/SpecklePathProvider.InstallApplicationDataPath ", true)] + public static string InstallApplicationDataPath => + Assembly.GetAssembly(typeof(Helpers)).Location.Contains("ProgramData") + ? Environment.GetFolderPath( + Environment.SpecialFolder.CommonApplicationData, + Environment.SpecialFolderOption.Create + ) + : UserApplicationDataPath; + + /// + /// Returns the location of the User Application Data folder for the current roaming user, which contains user specific data such as accounts and cache. + /// + /// The location of the user's `%appdata%` folder. + [Obsolete("Please use Helpers/SpecklePathProvider.UserApplicationDataPath", true)] + public static string UserApplicationDataPath => + !string.IsNullOrEmpty(Environment.GetEnvironmentVariable(_speckleUserDataEnvVar)) + ? Environment.GetEnvironmentVariable(_speckleUserDataEnvVar) + : Environment.GetFolderPath( + Environment.SpecialFolder.ApplicationData, + Environment.SpecialFolderOption.Create + ); + + /// + /// Helper method to Receive from a Speckle Server. + /// + /// Stream URL or Id to receive from. If the URL contains branchName, commitId or objectId those will be used, otherwise the latest commit from main will be received. + /// Account to use. If not provided the default account will be used. + /// Action invoked on progress iterations. + /// Action invoked on internal errors. + /// Action invoked once the total count of objects is known. + /// + public static async Task Receive( + string stream, + Account account = null, + Action> onProgressAction = null, + Action onErrorAction = null, + Action onTotalChildrenCountKnown = null + ) { - public const string ReleasesUrl = "https://releases.speckle.dev"; - private static string _feedsEndpoint = ReleasesUrl + "/manager2/feeds"; - - /// - /// Helper method to Receive from a Speckle Server. - /// - /// Stream URL or Id to receive from. If the URL contains branchName, commitId or objectId those will be used, otherwise the latest commit from main will be received. - /// Account to use. If not provided the default account will be used. - /// Action invoked on progress iterations. - /// Action invoked on internal errors. - /// Action invoked once the total count of objects is known. - /// - public static async Task Receive( - string stream, - Account account = null, - Action> onProgressAction = null, - Action onErrorAction = null, - Action onTotalChildrenCountKnown = null - ) + var sw = new StreamWrapper(stream); + + try + { + account ??= await sw.GetAccount().ConfigureAwait(false); + } + catch (SpeckleException e) { - var sw = new StreamWrapper(stream); + if (string.IsNullOrEmpty(sw.StreamId)) + throw e; - try - { - account ??= await sw.GetAccount(); - } - catch (SpeckleException e) + //Fallback to a non authed account + account = new Account() { - if (string.IsNullOrEmpty(sw.StreamId)) - throw e; + token = "", + serverInfo = new ServerInfo() { url = sw.ServerUrl }, + userInfo = new UserInfo() + }; + } - //Fallback to a non authed account - account = new Account() - { - token = "", - serverInfo = new ServerInfo() { url = sw.ServerUrl }, - userInfo = new UserInfo() - }; - } + var client = new Client(account); - var client = new Client(account); + var transport = new ServerTransport(client.Account, sw.StreamId); - var transport = new ServerTransport(client.Account, sw.StreamId); + string objectId = ""; + Commit commit = null; - string objectId = ""; - Commit commit = null; + //OBJECT URL + if (!string.IsNullOrEmpty(sw.ObjectId)) + { + objectId = sw.ObjectId; + } + //COMMIT URL + else if (!string.IsNullOrEmpty(sw.CommitId)) + { + commit = await client.CommitGet(sw.StreamId, sw.CommitId).ConfigureAwait(false); + objectId = commit.referencedObject; + } + //BRANCH URL OR STREAM URL + else + { + var branchName = string.IsNullOrEmpty(sw.BranchName) ? "main" : sw.BranchName; - //OBJECT URL - if (!string.IsNullOrEmpty(sw.ObjectId)) - { - objectId = sw.ObjectId; - } - //COMMIT URL - else if (!string.IsNullOrEmpty(sw.CommitId)) - { - commit = await client.CommitGet(sw.StreamId, sw.CommitId); - objectId = commit.referencedObject; - } - //BRANCH URL OR STREAM URL - else - { - var branchName = string.IsNullOrEmpty(sw.BranchName) ? "main" : sw.BranchName; + var branch = await client.BranchGet(sw.StreamId, branchName, 1).ConfigureAwait(false); + if (!branch.commits.items.Any()) + throw new SpeckleException($"The selected branch has no commits."); - var branch = await client.BranchGet(sw.StreamId, branchName, 1); - if (!branch.commits.items.Any()) - throw new SpeckleException( - $"The selected branch has no commits." - ); + commit = branch.commits.items[0]; + objectId = branch.commits.items[0].referencedObject; + } - commit = branch.commits.items[0]; - objectId = branch.commits.items[0].referencedObject; + Analytics.TrackEvent( + client.Account, + Analytics.Events.Receive, + new Dictionary() + { + { "sourceHostApp", HostApplications.GetHostAppFromString(commit.sourceApplication).Slug }, + { "sourceHostAppVersion", commit.sourceApplication } } + ); - Analytics.TrackEvent( - client.Account, - Analytics.Events.Receive, - new Dictionary() - { - { "sourceHostApp", HostApplications.GetHostAppFromString(commit.sourceApplication).Slug }, - { "sourceHostAppVersion", commit.sourceApplication } - } - ); - - var receiveRes = await Operations.Receive( + var receiveRes = await Operations + .Receive( objectId, - remoteTransport: transport, + transport, onErrorAction: onErrorAction, onProgressAction: onProgressAction, onTotalChildrenCountKnown: onTotalChildrenCountKnown, disposeTransports: true - ); + ) + .ConfigureAwait(false); - try - { - await client.CommitReceived( + try + { + await client + .CommitReceived( new CommitReceivedInput { streamId = sw.StreamId, @@ -123,56 +169,60 @@ await client.CommitReceived( message = commit?.message, sourceApplication = "Other" } - ); - } - catch - { - // Do nothing! - } - return receiveRes; + ) + .ConfigureAwait(false); } - - /// - /// Helper method to Send to a Speckle Server. - /// - /// Stream URL or Id to send to. If the URL contains branchName, commitId or objectId those will be used, otherwise the latest commit from main will be received. - /// Data to send - /// Account to use. If not provided the default account will be used. - /// Toggle for the default cache. If set to false, it will only send to the provided transports. - /// Action invoked on progress iterations. - /// Action invoked on internal errors. - /// - public static async Task Send( - string stream, - Base data, - string message = "No message", - string sourceApplication = ".net", - int totalChildrenCount = 0, - Account account = null, - bool useDefaultCache = true, - Action> onProgressAction = null, - Action onErrorAction = null - ) + catch { - var sw = new StreamWrapper(stream); + // Do nothing! + } + return receiveRes; + } - var client = new Client(account ?? await sw.GetAccount()); + /// + /// Helper method to Send to a Speckle Server. + /// + /// Stream URL or Id to send to. If the URL contains branchName, commitId or objectId those will be used, otherwise the latest commit from main will be received. + /// Data to send + /// Account to use. If not provided the default account will be used. + /// Toggle for the default cache. If set to false, it will only send to the provided transports. + /// Action invoked on progress iterations. + /// Action invoked on internal errors. + /// + public static async Task Send( + string stream, + Base data, + string message = "No message", + string sourceApplication = ".net", + int totalChildrenCount = 0, + Account account = null, + bool useDefaultCache = true, + Action> onProgressAction = null, + Action onErrorAction = null + ) + { + var sw = new StreamWrapper(stream); - var transport = new ServerTransport(client.Account, sw.StreamId); - var branchName = string.IsNullOrEmpty(sw.BranchName) ? "main" : sw.BranchName; + var client = new Client(account ?? await sw.GetAccount().ConfigureAwait(false)); + + var transport = new ServerTransport(client.Account, sw.StreamId); + var branchName = string.IsNullOrEmpty(sw.BranchName) ? "main" : sw.BranchName; - var objectId = await Operations.Send( + var objectId = await Operations + .Send( data, new List { transport }, useDefaultCache, onProgressAction, onErrorAction, - disposeTransports: true - ); + true + ) + .ConfigureAwait(false); - Analytics.TrackEvent(client.Account, Analytics.Events.Send); + Analytics.TrackEvent(client.Account, Analytics.Events.Send); - return await client.CommitCreate( + return await client + .CommitCreate( new CommitCreateInput { streamId = sw.StreamId, @@ -180,141 +230,96 @@ public static async Task Send( objectId = objectId, message = message, sourceApplication = sourceApplication, - totalChildrenCount = totalChildrenCount, + totalChildrenCount = totalChildrenCount } - ); - } + ) + .ConfigureAwait(false); + } - /// - /// - /// - /// The connector slug eg. revit, rhino, etc - /// - public static async Task IsConnectorUpdateAvailable(string slug) - { + /// + /// + /// + /// The connector slug eg. revit, rhino, etc + /// + public static async Task IsConnectorUpdateAvailable(string slug) + { #if DEBUG - if (slug == "dui2") - slug = "revit"; - //when debugging the version is not correct, so don't bother - return false; + if (slug == "dui2") + slug = "revit"; + //when debugging the version is not correct, so don't bother + return false; #endif - try - { - HttpClient client = Http.GetHttpProxyClient(); - var response = await client.GetStringAsync($"{_feedsEndpoint}/{slug}.json"); - var connector = JsonSerializer.Deserialize(response); - - var os = Os.Win; - if (RuntimeInformation.IsOSPlatform(OSPlatform.OSX)) - os = Os.OSX; - - var versions = connector.Versions - .Where(x => x.Os == os) - .OrderByDescending(x => x.Date) - .ToList(); - var stables = versions.Where(x => !x.Prerelease); - if (!stables.Any()) - return false; - - var latestVersion = new System.Version(stables.First().Number); - - var currentVersion = Assembly.GetAssembly(typeof(Helpers)).GetName().Version; - - if (latestVersion > currentVersion) - return true; - } - catch (Exception ex) - { - //new SpeckleException($"Could not check for connector updates: {slug}", ex, true, SentryLevel.Warning); - } - - return false; + try + { + HttpClient client = Http.GetHttpProxyClient(); + var response = await client + .GetStringAsync($"{_feedsEndpoint}/{slug}.json") + .ConfigureAwait(false); + var connector = JsonSerializer.Deserialize(response); + + var os = Os.Win; + if (RuntimeInformation.IsOSPlatform(OSPlatform.OSX)) + os = Os.OSX; + + var versions = connector.Versions + .Where(x => x.Os == os) + .OrderByDescending(x => x.Date) + .ToList(); + var stables = versions.Where(x => !x.Prerelease); + if (!stables.Any()) + return false; + + var latestVersion = new System.Version(stables.First().Number); + + var currentVersion = Assembly.GetAssembly(typeof(Helpers)).GetName().Version; + + if (latestVersion > currentVersion) + return true; } - - public static string TimeAgo(string timestamp) + catch (Exception ex) { - return TimeAgo(DateTime.Parse(timestamp)); + //new SpeckleException($"Could not check for connector updates: {slug}", ex, true, SentryLevel.Warning); } - public static string TimeAgo(DateTime timestamp) - { - TimeSpan timeAgo; - try - { - timeAgo = DateTime.Now.Subtract(timestamp); - } - catch (FormatException e) - { - return "never"; - } + return false; + } - if (timeAgo.TotalSeconds < 60) - return "just now"; - if (timeAgo.TotalMinutes < 60) - return $"{timeAgo.Minutes} minute{PluralS(timeAgo.Minutes)} ago"; - if (timeAgo.TotalHours < 24) - return $"{timeAgo.Hours} hour{PluralS(timeAgo.Hours)} ago"; - if (timeAgo.TotalDays < 7) - return $"{timeAgo.Days} day{PluralS(timeAgo.Days)} ago"; - if (timeAgo.TotalDays < 30) - return $"{timeAgo.Days / 7} week{PluralS(timeAgo.Days / 7)} ago"; - if (timeAgo.TotalDays < 365) - return $"{timeAgo.Days / 30} month{PluralS(timeAgo.Days / 30)} ago"; - - return $"{timeAgo.Days / 356} year{PluralS(timeAgo.Days / 356)} ago"; - } + public static string TimeAgo(string timestamp) + { + return TimeAgo(DateTime.Parse(timestamp)); + } - public static string PluralS(int num) + public static string TimeAgo(DateTime timestamp) + { + TimeSpan timeAgo; + try + { + timeAgo = DateTime.Now.Subtract(timestamp); + } + catch (FormatException e) { - return num != 1 ? "s" : ""; + return "never"; } - /// - /// Returns the correct location of the Speckle installation folder. Usually this would be the user's %appdata%/Speckle folder, unless the install was made for all users. - /// - /// The location of the Speckle installation folder - [Obsolete("Please use Helpers/SpecklePathProvider.InstallSpeckleFolderPath", true)] - public static string InstallSpeckleFolderPath => - Path.Combine(InstallApplicationDataPath, "Speckle"); - - /// - /// Returns the correct location of the Speckle folder for the current user. Usually this would be the user's %appdata%/Speckle folder. - /// - /// The location of the Speckle installation folder - [Obsolete("Please use Helpers/SpecklePathProvider.UserSpeckleFolderPath()", true)] - public static string UserSpeckleFolderPath => Path.Combine(UserApplicationDataPath, "Speckle"); - - /// - /// Returns the correct location of the AppData folder where Speckle is installed. Usually this would be the user's %appdata% folder, unless the install was made for all users. - /// This folder contains Kits and othe data that can be shared among users of the same machine. - /// - /// The location of the AppData folder where Speckle is installed - [Obsolete("Please use Helpers/SpecklePathProvider.InstallApplicationDataPath ", true)] - public static string InstallApplicationDataPath => - Assembly.GetAssembly(typeof(Helpers)).Location.Contains("ProgramData") - ? Environment.GetFolderPath( - Environment.SpecialFolder.CommonApplicationData, - Environment.SpecialFolderOption.Create - ) - : UserApplicationDataPath; - - /// - /// Envirenment Variable that allows to overwrite the - /// /// - private static string _speckleUserDataEnvVar = "SPECKLE_USERDATA_PATH"; - - /// - /// Returns the location of the User Application Data folder for the current roaming user, which contains user specific data such as accounts and cache. - /// - /// The location of the user's `%appdata%` folder. - [Obsolete("Please use Helpers/SpecklePathProvider.UserApplicationDataPath", true)] - public static string UserApplicationDataPath => - !string.IsNullOrEmpty(Environment.GetEnvironmentVariable(_speckleUserDataEnvVar)) - ? Environment.GetEnvironmentVariable(_speckleUserDataEnvVar) - : Environment.GetFolderPath( - Environment.SpecialFolder.ApplicationData, - Environment.SpecialFolderOption.Create - ); + if (timeAgo.TotalSeconds < 60) + return "just now"; + if (timeAgo.TotalMinutes < 60) + return $"{timeAgo.Minutes} minute{PluralS(timeAgo.Minutes)} ago"; + if (timeAgo.TotalHours < 24) + return $"{timeAgo.Hours} hour{PluralS(timeAgo.Hours)} ago"; + if (timeAgo.TotalDays < 7) + return $"{timeAgo.Days} day{PluralS(timeAgo.Days)} ago"; + if (timeAgo.TotalDays < 30) + return $"{timeAgo.Days / 7} week{PluralS(timeAgo.Days / 7)} ago"; + if (timeAgo.TotalDays < 365) + return $"{timeAgo.Days / 30} month{PluralS(timeAgo.Days / 30)} ago"; + + return $"{timeAgo.Days / 356} year{PluralS(timeAgo.Days / 356)} ago"; + } + + public static string PluralS(int num) + { + return num != 1 ? "s" : ""; } } diff --git a/Core/Core/Api/Operations/Operations.Receive.cs b/Core/Core/Api/Operations/Operations.Receive.cs index a6095c36dc..d78e8d8c06 100644 --- a/Core/Core/Api/Operations/Operations.Receive.cs +++ b/Core/Core/Api/Operations/Operations.Receive.cs @@ -7,7 +7,6 @@ using System.Linq; using System.Threading; using System.Threading.Tasks; -using Serilog; using Serilog.Context; using Speckle.Core.Logging; using Speckle.Core.Models; @@ -15,274 +14,282 @@ using Speckle.Core.Transports; using Speckle.Newtonsoft.Json; -namespace Speckle.Core.Api +namespace Speckle.Core.Api; + +public enum SerializerVersion { - public enum SerializerVersion + V1, + V2 +} + +public static partial class Operations +{ + /// + /// Receives an object from a transport. + /// + /// + /// The transport to receive from. + /// Leave null to use the default cache. + /// Action invoked on progress iterations. + /// Action invoked on internal errors. + /// Action invoked once the total count of objects is known. + /// + public static Task Receive( + string objectId, + ITransport? remoteTransport = null, + ITransport? localTransport = null, + Action>? onProgressAction = null, + Action? onErrorAction = null, + Action? onTotalChildrenCountKnown = null, + bool disposeTransports = false, + SerializerVersion serializerVersion = SerializerVersion.V2 + ) { - V1, - V2 + return Receive( + objectId, + CancellationToken.None, + remoteTransport, + localTransport, + onProgressAction, + onErrorAction, + onTotalChildrenCountKnown, + disposeTransports, + serializerVersion + ); } - public static partial class Operations + /// + /// Receives an object from a transport. + /// + /// + /// A cancellation token that can be used by other objects or threads to send notice of cancellation. + /// The transport to receive from. + /// Leave null to use the default cache. + /// Action invoked on progress iterations. + /// Action invoked on internal errors. + /// Action invoked once the total count of objects is known. + /// + public static async Task Receive( + string objectId, + CancellationToken cancellationToken, + ITransport? remoteTransport = null, + ITransport? localTransport = null, + Action>? onProgressAction = null, + Action? onErrorAction = null, + Action? onTotalChildrenCountKnown = null, + bool disposeTransports = false, + SerializerVersion serializerVersion = SerializerVersion.V2 + ) { - /// - /// Receives an object from a transport. - /// - /// - /// The transport to receive from. - /// Leave null to use the default cache. - /// Action invoked on progress iterations. - /// Action invoked on internal errors. - /// Action invoked once the total count of objects is known. - /// - public static Task Receive( - string objectId, - ITransport? remoteTransport = null, - ITransport? localTransport = null, - Action>? onProgressAction = null, - Action? onErrorAction = null, - Action? onTotalChildrenCountKnown = null, - bool disposeTransports = false, - SerializerVersion serializerVersion = SerializerVersion.V2 - ) + var hasUserProvidedLocalTransport = localTransport != null; + localTransport ??= new SQLiteTransport(); + using (LogContext.PushProperty("remoteTransportContext", remoteTransport?.TransportContext)) + using (LogContext.PushProperty("localTransportContext", localTransport.TransportContext)) + using (LogContext.PushProperty("objectId", objectId)) { - return Receive( + var timer = Stopwatch.StartNew(); + SpeckleLog.Logger.Information( + "Starting receive {objectId} from transports {localTransport} / {remoteTransport}", objectId, - CancellationToken.None, - remoteTransport, - localTransport, - onProgressAction, - onErrorAction, - onTotalChildrenCountKnown, - disposeTransports, - serializerVersion + localTransport.TransportName, + remoteTransport?.TransportName ); - } - - /// - /// Receives an object from a transport. - /// - /// - /// A cancellation token that can be used by other objects or threads to send notice of cancellation. - /// The transport to receive from. - /// Leave null to use the default cache. - /// Action invoked on progress iterations. - /// Action invoked on internal errors. - /// Action invoked once the total count of objects is known. - /// - public static async Task Receive( - string objectId, - CancellationToken cancellationToken, - ITransport? remoteTransport = null, - ITransport? localTransport = null, - Action>? onProgressAction = null, - Action? onErrorAction = null, - Action? onTotalChildrenCountKnown = null, - bool disposeTransports = false, - SerializerVersion serializerVersion = SerializerVersion.V2 - ) - { - var hasUserProvidedLocalTransport = localTransport != null; - localTransport ??= new SQLiteTransport(); - using (LogContext.PushProperty("remoteTransportContext", remoteTransport?.TransportContext)) - using (LogContext.PushProperty("localTransportContext", localTransport.TransportContext)) - using (LogContext.PushProperty("objectId", objectId)) - { - var timer = Stopwatch.StartNew(); - SpeckleLog.Logger.Information( - "Starting receive {objectId} from transports {localTransport} / {remoteTransport}", - objectId, - localTransport.TransportName, - remoteTransport?.TransportName - ); - - BaseObjectSerializer? serializer = null; - JsonSerializerSettings? settings = null; - BaseObjectDeserializerV2? serializerV2 = null; - if (serializerVersion == SerializerVersion.V1) - (serializer, settings) = GetSerializerInstance(); - else - serializerV2 = new BaseObjectDeserializerV2(); - - var localProgressDict = new ConcurrentDictionary(); - var internalProgressAction = GetInternalProgressAction(localProgressDict, onProgressAction); - - - localTransport.OnErrorAction = onErrorAction; - localTransport.OnProgressAction = internalProgressAction; - localTransport.CancellationToken = cancellationToken; - if (serializerVersion == SerializerVersion.V1) - { - serializer!.ReadTransport = localTransport; - serializer.OnProgressAction = internalProgressAction; - serializer.OnErrorAction = onErrorAction; - serializer.CancellationToken = cancellationToken; - } - else - { - serializerV2!.ReadTransport = localTransport; - serializerV2.OnProgressAction = internalProgressAction; - serializerV2.OnErrorAction = onErrorAction; - serializerV2.CancellationToken = cancellationToken; - if (remoteTransport is IBlobCapableTransport t) - { - serializerV2.BlobStorageFolder = t.BlobStorageFolder; - } - } - - // First we try and get the object from the local transport. If it's there, we assume all its children are there, and proceed with deserialisation. - // This assumption is hard-wired into the SDK. Read below. - var objString = localTransport.GetObject(objectId); + BaseObjectSerializer? serializer = null; + JsonSerializerSettings? settings = null; + BaseObjectDeserializerV2? serializerV2 = null; + if (serializerVersion == SerializerVersion.V1) + (serializer, settings) = GetSerializerInstance(); + else + serializerV2 = new BaseObjectDeserializerV2(); - if (objString != null) - { - // Shoot out the total children count - var partial = JsonConvert.DeserializeObject(objString); - if (partial == null) throw new SpeckleDeserializeException($"Failed to deserialize {nameof(objString)} into {nameof(Placeholder)}"); - if (partial.__closure != null) - onTotalChildrenCountKnown?.Invoke(partial.__closure.Count); + var localProgressDict = new ConcurrentDictionary(); + var internalProgressAction = GetInternalProgressAction(localProgressDict, onProgressAction); - Base? localRes = DeserializeStringToBase(serializerVersion, objString, settings, serializerV2); + localTransport.OnErrorAction = onErrorAction; + localTransport.OnProgressAction = internalProgressAction; + localTransport.CancellationToken = cancellationToken; - if ( - (disposeTransports || !hasUserProvidedLocalTransport) - && localTransport is IDisposable dispLocal - ) - dispLocal.Dispose(); - if ( - disposeTransports - && remoteTransport != null - && remoteTransport is IDisposable dispRemote - ) - dispRemote.Dispose(); + if (serializerVersion == SerializerVersion.V1) + { + serializer!.ReadTransport = localTransport; + serializer.OnProgressAction = internalProgressAction; + serializer.OnErrorAction = onErrorAction; + serializer.CancellationToken = cancellationToken; + } + else + { + serializerV2!.ReadTransport = localTransport; + serializerV2.OnProgressAction = internalProgressAction; + serializerV2.OnErrorAction = onErrorAction; + serializerV2.CancellationToken = cancellationToken; + if (remoteTransport is IBlobCapableTransport t) + serializerV2.BlobStorageFolder = t.BlobStorageFolder; + } - timer.Stop(); - SpeckleLog.Logger.ForContext("deserializerElapsed", serializerV2?.Elapsed) - .ForContext( - "transportElapsedBreakdown", - new ITransport?[] { localTransport, remoteTransport } - .Where(t => t != null) - .ToDictionary(t => t!.TransportName, t => t!.Elapsed) - ) - .Information( - "Finished receiving {objectId} from {source} in {elapsed} seconds", - objectId, - localTransport.TransportName, - timer.Elapsed.TotalSeconds - ); - return localRes; - } - else if (remoteTransport == null) - { - var ex = new SpeckleException( - $"Could not find specified object using the local transport {localTransport.TransportName}, and you didn't provide a fallback remote from which to pull it." - ); + // First we try and get the object from the local transport. If it's there, we assume all its children are there, and proceed with deserialisation. + // This assumption is hard-wired into the SDK. Read below. + var objString = localTransport.GetObject(objectId); - SpeckleLog.Logger.Error( - ex, - "Cannot receive object from the given transports {exceptionMessage}", - ex.Message + if (objString != null) + { + // Shoot out the total children count + var partial = JsonConvert.DeserializeObject(objString); + if (partial == null) + throw new SpeckleDeserializeException( + $"Failed to deserialize {nameof(objString)} into {nameof(Placeholder)}" ); - throw ex; - } + if (partial.__closure != null) + onTotalChildrenCountKnown?.Invoke(partial.__closure.Count); - // If we've reached this stage, it means that we didn't get a local transport hit on our object, so we will proceed to get it from the provided remote transport. - // This is done by copying itself and all its children from the remote transport into the local one. - remoteTransport.OnErrorAction = onErrorAction; - remoteTransport.OnProgressAction = internalProgressAction; - remoteTransport.CancellationToken = cancellationToken; - - SpeckleLog.Logger.Debug( - "Cannot find object {objectId} in the local transport, hitting remote {transportName}.", - remoteTransport.TransportName - ); - objString = await remoteTransport.CopyObjectAndChildren( - objectId, - localTransport, - onTotalChildrenCountKnown + Base? localRes = DeserializeStringToBase( + serializerVersion, + objString, + settings, + serializerV2 ); - // Wait for the local transport to finish "writing" - in this case, it signifies that the remote transport has done pushing copying objects into it. (TODO: I can see some scenarios where latency can screw things up, and we should rather wait on the remote transport). - await localTransport.WriteComplete(); - - // Proceed to deserialise the object, now safely knowing that all its children are present in the local (fast) transport. - - Base? res = DeserializeStringToBase(serializerVersion, objString, settings, serializerV2); if ( - (disposeTransports || !hasUserProvidedLocalTransport) && localTransport is IDisposable dl + (disposeTransports || !hasUserProvidedLocalTransport) + && localTransport is IDisposable dispLocal + ) + dispLocal.Dispose(); + if ( + disposeTransports && remoteTransport != null && remoteTransport is IDisposable dispRemote ) - dl.Dispose(); - if (disposeTransports && remoteTransport is IDisposable dr) - dr.Dispose(); + dispRemote.Dispose(); - SpeckleLog.Logger.ForContext("deserializerElapsed", serializerV2.Elapsed) + timer.Stop(); + SpeckleLog.Logger + .ForContext("deserializerElapsed", serializerV2?.Elapsed) .ForContext( "transportElapsedBreakdown", new ITransport?[] { localTransport, remoteTransport } .Where(t => t != null) - .ToDictionary(t => t.TransportName, t => t.Elapsed) + .ToDictionary(t => t!.TransportName, t => t!.Elapsed) ) .Information( "Finished receiving {objectId} from {source} in {elapsed} seconds", objectId, - remoteTransport.TransportName, + localTransport.TransportName, timer.Elapsed.TotalSeconds ); - return res; + return localRes; + } + else if (remoteTransport == null) + { + var ex = new SpeckleException( + $"Could not find specified object using the local transport {localTransport.TransportName}, and you didn't provide a fallback remote from which to pull it." + ); - // Summary: - // Basically, receiving an object (and all its subchildren) operates with two transports, one that is potentially slow, and one that is fast. - // The fast transport ("localTransport") is used syncronously inside the deserialisation routine to get the value of nested references and set them. The slow transport ("remoteTransport") is used to get the raw data and populate the local transport with all necessary data for a successful deserialisation of the object. - // Note: if properly implemented, there is no hard distinction between what is a local or remote transport; it's still just a transport. So, for example, if you want to receive an object without actually writing it first to a local transport, you can just pass a Server/S3 transport as a local transport. - // This is not reccommended, but shows what you can do. Another tidbit: the local transport does not need to be disk-bound; it can easily be an in memory transport. In memory transports are the fastest ones, but they're of limited use for more + SpeckleLog.Logger.Error( + ex, + "Cannot receive object from the given transports {exceptionMessage}", + ex.Message + ); + throw ex; } + // If we've reached this stage, it means that we didn't get a local transport hit on our object, so we will proceed to get it from the provided remote transport. + // This is done by copying itself and all its children from the remote transport into the local one. + remoteTransport.OnErrorAction = onErrorAction; + remoteTransport.OnProgressAction = internalProgressAction; + remoteTransport.CancellationToken = cancellationToken; + + SpeckleLog.Logger.Debug( + "Cannot find object {objectId} in the local transport, hitting remote {transportName}.", + remoteTransport.TransportName + ); + objString = await remoteTransport + .CopyObjectAndChildren(objectId, localTransport, onTotalChildrenCountKnown) + .ConfigureAwait(false); + // Wait for the local transport to finish "writing" - in this case, it signifies that the remote transport has done pushing copying objects into it. (TODO: I can see some scenarios where latency can screw things up, and we should rather wait on the remote transport). + await localTransport.WriteComplete().ConfigureAwait(false); + + // Proceed to deserialise the object, now safely knowing that all its children are present in the local (fast) transport. + + Base? res = DeserializeStringToBase(serializerVersion, objString, settings, serializerV2); + if ((disposeTransports || !hasUserProvidedLocalTransport) && localTransport is IDisposable dl) + dl.Dispose(); + if (disposeTransports && remoteTransport is IDisposable dr) + dr.Dispose(); + + SpeckleLog.Logger + .ForContext("deserializerElapsed", serializerV2.Elapsed) + .ForContext( + "transportElapsedBreakdown", + new ITransport?[] { localTransport, remoteTransport } + .Where(t => t != null) + .ToDictionary(t => t.TransportName, t => t.Elapsed) + ) + .Information( + "Finished receiving {objectId} from {source} in {elapsed} seconds", + objectId, + remoteTransport.TransportName, + timer.Elapsed.TotalSeconds + ); + return res; + + // Summary: + // Basically, receiving an object (and all its subchildren) operates with two transports, one that is potentially slow, and one that is fast. + // The fast transport ("localTransport") is used syncronously inside the deserialisation routine to get the value of nested references and set them. The slow transport ("remoteTransport") is used to get the raw data and populate the local transport with all necessary data for a successful deserialisation of the object. + // Note: if properly implemented, there is no hard distinction between what is a local or remote transport; it's still just a transport. So, for example, if you want to receive an object without actually writing it first to a local transport, you can just pass a Server/S3 transport as a local transport. + // This is not reccommended, but shows what you can do. Another tidbit: the local transport does not need to be disk-bound; it can easily be an in memory transport. In memory transports are the fastest ones, but they're of limited use for more } + } - private static Base? DeserializeStringToBase(SerializerVersion serializerVersion, string objString, JsonSerializerSettings? settings, - BaseObjectDeserializerV2? serializerV2) - { - Base? localRes; - if (serializerVersion == SerializerVersion.V1) - localRes = JsonConvert.DeserializeObject(objString, settings); - else + private static Base? DeserializeStringToBase( + SerializerVersion serializerVersion, + string objString, + JsonSerializerSettings? settings, + BaseObjectDeserializerV2? serializerV2 + ) + { + Base? localRes; + if (serializerVersion == SerializerVersion.V1) + localRes = JsonConvert.DeserializeObject(objString, settings); + else + try { - try - { - localRes = serializerV2!.Deserialize(objString); - } - catch (OperationCanceledException e) - { + localRes = serializerV2!.Deserialize(objString); + } + catch (OperationCanceledException e) + { + throw; + } + catch (Exception ex) + { + SpeckleLog.Logger.Error( + ex, + "A deserialization error has occurred {exceptionMessage}", + ex.Message + ); + if (serializerV2.OnErrorAction == null) throw; - } - catch (Exception ex) - { - SpeckleLog.Logger.Error(ex, "A deserialization error has occurred {exceptionMessage}", ex.Message); - if (serializerV2.OnErrorAction == null) - throw; - serializerV2.OnErrorAction.Invoke( - $"A deserialization error has occurred: {ex.Message}", - new SpeckleDeserializeException($"A deserialization error has occurred", ex) - ); - localRes = null; - } + serializerV2.OnErrorAction.Invoke( + $"A deserialization error has occurred: {ex.Message}", + new SpeckleDeserializeException($"A deserialization error has occurred", ex) + ); + localRes = null; } - return localRes; - } + return localRes; + } - internal class Placeholder - { - public Dictionary? __closure { get; set; } = new Dictionary(); - } + internal class Placeholder + { + public Dictionary? __closure { get; set; } = new(); + } - public class SpeckleDeserializeException : SpeckleException - { - public SpeckleDeserializeException() { } + public class SpeckleDeserializeException : SpeckleException + { + public SpeckleDeserializeException() { } - public SpeckleDeserializeException(string message, Exception? inner = null) : base(message, inner) { } - } + public SpeckleDeserializeException(string message, Exception? inner = null) + : base(message, inner) { } + + public SpeckleDeserializeException(string message) + : base(message) { } } } diff --git a/Core/Core/Api/Operations/Operations.Send.cs b/Core/Core/Api/Operations/Operations.Send.cs index 77b98ddd67..f68def6ef4 100644 --- a/Core/Core/Api/Operations/Operations.Send.cs +++ b/Core/Core/Api/Operations/Operations.Send.cs @@ -6,7 +6,6 @@ using System.Linq; using System.Threading; using System.Threading.Tasks; -using Serilog; using Serilog.Context; using Speckle.Core.Logging; using Speckle.Core.Models; @@ -15,193 +14,191 @@ using Speckle.Newtonsoft.Json; using Speckle.Newtonsoft.Json.Linq; -namespace Speckle.Core.Api +namespace Speckle.Core.Api; + +public static partial class Operations { - public static partial class Operations + #region Pushing objects + + /// + /// Sends an object via the provided transports. Defaults to the local cache. + /// + /// The object you want to send. + /// Where you want to send them. + /// Toggle for the default cache. If set to false, it will only send to the provided transports. + /// Action that gets triggered on every progress tick (keeps track of all transports). + /// Use this to capture and handle any errors from within the transports. + /// The id (hash) of the object. + public static Task Send( + Base @object, + List? transports = null, + bool useDefaultCache = true, + Action>? onProgressAction = null, + Action? onErrorAction = null, + bool disposeTransports = false, + SerializerVersion serializerVersion = SerializerVersion.V2 + ) { - #region Pushing objects - - /// - /// Sends an object via the provided transports. Defaults to the local cache. - /// - /// The object you want to send. - /// Where you want to send them. - /// Toggle for the default cache. If set to false, it will only send to the provided transports. - /// Action that gets triggered on every progress tick (keeps track of all transports). - /// Use this to capture and handle any errors from within the transports. - /// The id (hash) of the object. - public static Task Send( - Base @object, - List? transports = null, - bool useDefaultCache = true, - Action>? onProgressAction = null, - Action? onErrorAction = null, - bool disposeTransports = false, - SerializerVersion serializerVersion = SerializerVersion.V2 - ) - { - return Send( - @object, - CancellationToken.None, - transports, - useDefaultCache, - onProgressAction, - onErrorAction, - disposeTransports, - serializerVersion - ); - } + return Send( + @object, + CancellationToken.None, + transports, + useDefaultCache, + onProgressAction, + onErrorAction, + disposeTransports, + serializerVersion + ); + } - /// - /// Sends an object via the provided transports. Defaults to the local cache. - /// - /// The object you want to send. - /// A cancellation token that can be used by other objects or threads to send notice of cancellation. - /// Where you want to send them. - /// Toggle for the default cache. If set to false, it will only send to the provided transports. - /// Action that gets triggered on every progress tick (keeps track of all transports). - /// Use this to capture and handle any errors from within the transports. - /// The id (hash) of the object. - public static async Task Send( - Base @object, - CancellationToken cancellationToken, - List? transports = null, - bool useDefaultCache = true, - Action>? onProgressAction = null, - Action? onErrorAction = null, - bool disposeTransports = false, - SerializerVersion serializerVersion = SerializerVersion.V2 - ) - { - transports ??= new List(); + /// + /// Sends an object via the provided transports. Defaults to the local cache. + /// + /// The object you want to send. + /// A cancellation token that can be used by other objects or threads to send notice of cancellation. + /// Where you want to send them. + /// Toggle for the default cache. If set to false, it will only send to the provided transports. + /// Action that gets triggered on every progress tick (keeps track of all transports). + /// Use this to capture and handle any errors from within the transports. + /// The id (hash) of the object. + public static async Task Send( + Base @object, + CancellationToken cancellationToken, + List? transports = null, + bool useDefaultCache = true, + Action>? onProgressAction = null, + Action? onErrorAction = null, + bool disposeTransports = false, + SerializerVersion serializerVersion = SerializerVersion.V2 + ) + { + transports ??= new List(); - if (transports.Count == 0 && useDefaultCache == false) - throw new ArgumentException( - "You need to provide at least one transport: cannot send with an empty transport list and no default cache.", - nameof(transports) - ); + if (transports.Count == 0 && useDefaultCache == false) + throw new ArgumentException( + "You need to provide at least one transport: cannot send with an empty transport list and no default cache.", + nameof(transports) + ); - if (useDefaultCache) - transports.Insert(0, new SQLiteTransport() { TransportName = "LC" }); + if (useDefaultCache) + transports.Insert(0, new SQLiteTransport() { TransportName = "LC" }); - var transportContext = transports.ToDictionary(t => t.TransportName, t => t.TransportContext); + var transportContext = transports.ToDictionary(t => t.TransportName, t => t.TransportContext); - // make sure all logs in the operation have the proper context - using (LogContext.PushProperty("transportContext", transportContext)) - using (LogContext.PushProperty("correlationId", Guid.NewGuid().ToString())) - { - var sendTimer = Stopwatch.StartNew(); - SpeckleLog.Logger.Information("Starting send operation"); + // make sure all logs in the operation have the proper context + using (LogContext.PushProperty("transportContext", transportContext)) + using (LogContext.PushProperty("correlationId", Guid.NewGuid().ToString())) + { + var sendTimer = Stopwatch.StartNew(); + SpeckleLog.Logger.Information("Starting send operation"); - BaseObjectSerializer? serializer = null; - JsonSerializerSettings? settings = null; - BaseObjectSerializerV2? serializerV2 = null; - if (serializerVersion == SerializerVersion.V1) - (serializer, settings) = GetSerializerInstance(); - else - serializerV2 = new BaseObjectSerializerV2(); + BaseObjectSerializer? serializer = null; + JsonSerializerSettings? settings = null; + BaseObjectSerializerV2? serializerV2 = null; + if (serializerVersion == SerializerVersion.V1) + (serializer, settings) = GetSerializerInstance(); + else + serializerV2 = new BaseObjectSerializerV2(); - var localProgressDict = new ConcurrentDictionary(); - var internalProgressAction = Operations.GetInternalProgressAction( - localProgressDict, - onProgressAction - ); + var localProgressDict = new ConcurrentDictionary(); + var internalProgressAction = GetInternalProgressAction(localProgressDict, onProgressAction); - if (serializerVersion == SerializerVersion.V1) - { - serializer!.OnProgressAction = internalProgressAction; - serializer.CancellationToken = cancellationToken; - serializer.OnErrorAction = onErrorAction; - } - else - { - serializerV2!.OnProgressAction = internalProgressAction; - serializerV2.CancellationToken = cancellationToken; - serializerV2.OnErrorAction = onErrorAction; - } + if (serializerVersion == SerializerVersion.V1) + { + serializer!.OnProgressAction = internalProgressAction; + serializer.CancellationToken = cancellationToken; + serializer.OnErrorAction = onErrorAction; + } + else + { + serializerV2!.OnProgressAction = internalProgressAction; + serializerV2.CancellationToken = cancellationToken; + serializerV2.OnErrorAction = onErrorAction; + } - foreach (var t in transports) - { - t.OnProgressAction = internalProgressAction; - t.CancellationToken = cancellationToken; - t.OnErrorAction = onErrorAction; - t.BeginWrite(); - - if (serializerVersion == SerializerVersion.V1) - serializer!.WriteTransports.Add(t); - else - serializerV2!.WriteTransports.Add(t); - } + foreach (var t in transports) + { + t.OnProgressAction = internalProgressAction; + t.CancellationToken = cancellationToken; + t.OnErrorAction = onErrorAction; + t.BeginWrite(); - string obj; - List transportAwaits; if (serializerVersion == SerializerVersion.V1) - { - obj = JsonConvert.SerializeObject(@object, settings); - transportAwaits = serializer!.WriteTransports.Select(t => t.WriteComplete()).ToList(); - } + serializer!.WriteTransports.Add(t); else - { - obj = serializerV2!.Serialize(@object); - transportAwaits = serializerV2.WriteTransports.Select(t => t.WriteComplete()).ToList(); - } + serializerV2!.WriteTransports.Add(t); + } - if (cancellationToken.IsCancellationRequested) - { - SpeckleLog.Logger.Information( - "Send operation cancelled after {elapsed} seconds", - sendTimer.Elapsed.TotalSeconds - ); - cancellationToken.ThrowIfCancellationRequested(); - } + string obj; + List transportAwaits; + if (serializerVersion == SerializerVersion.V1) + { + obj = JsonConvert.SerializeObject(@object, settings); + transportAwaits = serializer!.WriteTransports.Select(t => t.WriteComplete()).ToList(); + } + else + { + obj = serializerV2!.Serialize(@object); + transportAwaits = serializerV2.WriteTransports.Select(t => t.WriteComplete()).ToList(); + } - await Task.WhenAll(transportAwaits).ConfigureAwait(false); + if (cancellationToken.IsCancellationRequested) + { + SpeckleLog.Logger.Information( + "Send operation cancelled after {elapsed} seconds", + sendTimer.Elapsed.TotalSeconds + ); + cancellationToken.ThrowIfCancellationRequested(); + } - foreach (var t in transports) - { - t.EndWrite(); - if (useDefaultCache && t is SQLiteTransport lc && lc.TransportName == "LC") - { - lc.Dispose(); - continue; - } - if (disposeTransports && t is IDisposable disp) - disp.Dispose(); - } + await Task.WhenAll(transportAwaits).ConfigureAwait(false); - if (cancellationToken.IsCancellationRequested) + foreach (var t in transports) + { + t.EndWrite(); + if (useDefaultCache && t is SQLiteTransport lc && lc.TransportName == "LC") { - SpeckleLog.Logger.Information( - "Send operation cancelled after {elapsed}", - sendTimer.Elapsed.TotalSeconds - ); - cancellationToken.ThrowIfCancellationRequested(); + lc.Dispose(); + continue; } + if (disposeTransports && t is IDisposable disp) + disp.Dispose(); + } - var idToken = JObject.Parse(obj).GetValue("id"); - if (idToken == null) throw new SpeckleException("Failed to get id of serialized object"); - var hash = idToken.ToString(); - - sendTimer.Stop(); - SpeckleLog.Logger.ForContext( - "transportElapsedBreakdown", - transports.ToDictionary(t => t.TransportName, t => t.Elapsed) - ) - .ForContext( - "note", - "the elapsed summary doesn't need to add up to the total elapsed... Threading magic..." - ) - .ForContext("serializerElapsed", serializerV2?.Elapsed) - .Information( - "Finished sending {objectCount} objects after {elapsed}, result {objectId}", - transports.Max(t => t.SavedObjectCount), - sendTimer.Elapsed.TotalSeconds, - hash - ); - return hash; + if (cancellationToken.IsCancellationRequested) + { + SpeckleLog.Logger.Information( + "Send operation cancelled after {elapsed}", + sendTimer.Elapsed.TotalSeconds + ); + cancellationToken.ThrowIfCancellationRequested(); } - } - #endregion + var idToken = JObject.Parse(obj).GetValue("id"); + if (idToken == null) + throw new SpeckleException("Failed to get id of serialized object"); + var hash = idToken.ToString(); + + sendTimer.Stop(); + SpeckleLog.Logger + .ForContext( + "transportElapsedBreakdown", + transports.ToDictionary(t => t.TransportName, t => t.Elapsed) + ) + .ForContext( + "note", + "the elapsed summary doesn't need to add up to the total elapsed... Threading magic..." + ) + .ForContext("serializerElapsed", serializerV2?.Elapsed) + .Information( + "Finished sending {objectCount} objects after {elapsed}, result {objectId}", + transports.Max(t => t.SavedObjectCount), + sendTimer.Elapsed.TotalSeconds, + hash + ); + return hash; + } } + + #endregion } diff --git a/Core/Core/Api/Operations/Operations.Serialize.cs b/Core/Core/Api/Operations/Operations.Serialize.cs index e932ab4795..30a6722af0 100644 --- a/Core/Core/Api/Operations/Operations.Serialize.cs +++ b/Core/Core/Api/Operations/Operations.Serialize.cs @@ -1,140 +1,157 @@ -using System; +using System; using System.Collections.Generic; using System.Threading; using Speckle.Core.Models; using Speckle.Core.Serialisation; using Speckle.Newtonsoft.Json; -namespace Speckle.Core.Api +namespace Speckle.Core.Api; + +// TODO: cleanup a bit +public static partial class Operations { - // TODO: cleanup a bit - public static partial class Operations + /// + /// Serializes a given object. Note: if you want to save and persist an object to a Speckle Transport or Server, please use any of the "Send" methods. See . + /// + /// + /// A json string representation of the object. + public static string Serialize(Base @object) { + return Serialize(@object, CancellationToken.None); + } - /// - /// Serializes a given object. Note: if you want to save and persist an object to a Speckle Transport or Server, please use any of the "Send" methods. See . - /// - /// - /// A json string representation of the object. - public static string Serialize(Base @object) + /// + /// Serializes a given object. Note: if you want to save and persist an object to Speckle Transport or Server, please use any of the "Send" methods. See . + /// + /// + /// Propagates notification that operations should be canceled. + /// A json string representation of the object. + public static string Serialize( + Base @object, + CancellationToken cancellationToken, + SerializerVersion serializerVersion = SerializerVersion.V2 + ) + { + if (serializerVersion == SerializerVersion.V1) { - return Serialize(@object, CancellationToken.None); - } + var (serializer, settings) = GetSerializerInstance(); + serializer.CancellationToken = cancellationToken; - /// - /// Serializes a given object. Note: if you want to save and persist an object to Speckle Transport or Server, please use any of the "Send" methods. See . - /// - /// - /// Propagates notification that operations should be canceled. - /// A json string representation of the object. - public static string Serialize(Base @object, CancellationToken cancellationToken, SerializerVersion serializerVersion = SerializerVersion.V2) + return JsonConvert.SerializeObject(@object, settings); + } + else { - if (serializerVersion == SerializerVersion.V1) - { - var (serializer, settings) = GetSerializerInstance(); - serializer.CancellationToken = cancellationToken; - - return JsonConvert.SerializeObject(@object, settings); - } - else - { - var serializer = new BaseObjectSerializerV2(); - serializer.CancellationToken = cancellationToken; - return serializer.Serialize(@object); - } + var serializer = new BaseObjectSerializerV2(); + serializer.CancellationToken = cancellationToken; + return serializer.Serialize(@object); } + } + /// + /// Serializes a list of objects. Note: if you want to save and persist objects to speckle, please use any of the "Send" methods. + /// + /// + /// + [Obsolete( + "Please use the Serialize(Base @object) function. This function will be removed in later versions." + )] + public static string Serialize(List objects) + { + var (_, settings) = GetSerializerInstance(); + return JsonConvert.SerializeObject(objects, settings); + } - /// - /// Serializes a list of objects. Note: if you want to save and persist objects to speckle, please use any of the "Send" methods. - /// - /// - /// - [Obsolete("Please use the Serialize(Base @object) function. This function will be removed in later versions.")] - public static string Serialize(List objects) - { - var (_, settings) = GetSerializerInstance(); - return JsonConvert.SerializeObject(objects, settings); - } + /// + /// Serializes a list of objects. Note: if you want to save and persist objects to speckle, please use any of the "Send" methods. + /// + /// + /// + [Obsolete( + "Please use the Serialize(Base @object) function. This function will be removed in later versions." + )] + public static string Serialize(Dictionary objects) + { + var (_, settings) = GetSerializerInstance(); + return JsonConvert.SerializeObject(objects, settings); + } - /// - /// Serializes a list of objects. Note: if you want to save and persist objects to speckle, please use any of the "Send" methods. - /// - /// - /// - [Obsolete("Please use the Serialize(Base @object) function. This function will be removed in later versions.")] - public static string Serialize(Dictionary objects) - { - var (_, settings) = GetSerializerInstance(); - return JsonConvert.SerializeObject(objects, settings); - } + /// + /// Deserializes a given object. Note: if you want to pull an object from a Speckle Transport or Server, please use any of the . + /// + /// The json string representation of a speckle object that you want to deserialise. + /// + public static Base Deserialize(string @object) + { + return Deserialize(@object, CancellationToken.None); + } - /// - /// Deserializes a given object. Note: if you want to pull an object from a Speckle Transport or Server, please use any of the . - /// - /// The json string representation of a speckle object that you want to deserialise. - /// - public static Base Deserialize(string @object) + /// + /// Deserializes a given object. Note: if you want to pull an object from a Speckle Transport or Server, please use any of the . + /// + /// The json string representation of a speckle object that you want to deserialise. + /// Propagates notification that operations should be canceled. + /// + public static Base Deserialize( + string @object, + CancellationToken cancellationToken, + SerializerVersion serializerVersion = SerializerVersion.V2 + ) + { + if (serializerVersion == SerializerVersion.V1) { - return Deserialize(@object, CancellationToken.None); + var (serializer, settings) = GetSerializerInstance(); + serializer.CancellationToken = cancellationToken; + return JsonConvert.DeserializeObject(@object, settings); } - - /// - /// Deserializes a given object. Note: if you want to pull an object from a Speckle Transport or Server, please use any of the . - /// - /// The json string representation of a speckle object that you want to deserialise. - /// Propagates notification that operations should be canceled. - /// - public static Base Deserialize(string @object, CancellationToken cancellationToken, SerializerVersion serializerVersion = SerializerVersion.V2) + else { - if (serializerVersion == SerializerVersion.V1) - { - var (serializer, settings) = GetSerializerInstance(); - serializer.CancellationToken = cancellationToken; - return JsonConvert.DeserializeObject(@object, settings); - } - else - { - var deserializer = new BaseObjectDeserializerV2(); - deserializer.CancellationToken = cancellationToken; - return deserializer.Deserialize(@object); - } + var deserializer = new BaseObjectDeserializerV2(); + deserializer.CancellationToken = cancellationToken; + return deserializer.Deserialize(@object); } + } - /// - /// Deserializes a list of objects into an array. Note: if you want to pull an object from speckle (either local or remote), please use any of the "Receive" methods. - /// - /// - /// - [Obsolete("Please use the Deserialize(Base @object) function. This function will be removed in later versions.")] - public static List DeserializeArray(string objectArr, SerializerVersion serializerVersion = SerializerVersion.V2) + /// + /// Deserializes a list of objects into an array. Note: if you want to pull an object from speckle (either local or remote), please use any of the "Receive" methods. + /// + /// + /// + [Obsolete( + "Please use the Deserialize(Base @object) function. This function will be removed in later versions." + )] + public static List DeserializeArray( + string objectArr, + SerializerVersion serializerVersion = SerializerVersion.V2 + ) + { + if (serializerVersion == SerializerVersion.V1) { - if (serializerVersion == SerializerVersion.V1) - { - var (_, settings) = GetSerializerInstance(); - return JsonConvert.DeserializeObject>(objectArr, settings); - } - else - { - var deserializer = new BaseObjectDeserializerV2(); - List deserialized = deserializer.DeserializeTransportObject(objectArr) as List; - List ret = new List(); - foreach (object obj in deserialized) - ret.Add((Base)obj); - return ret; - } + var (_, settings) = GetSerializerInstance(); + return JsonConvert.DeserializeObject>(objectArr, settings); } - - /// - /// Deserializes a dictionary object. Note: if you want to pull an object from speckle (either local or remote), please use any of the "Receive" methods. - /// - /// - /// - [Obsolete("Please use the Deserialize(Base @object) function. This function will be removed in later versions.")] - public static Dictionary DeserializeDictionary(string dictionary) + else { - var (_, settings) = GetSerializerInstance(); - return JsonConvert.DeserializeObject>(dictionary, settings); + var deserializer = new BaseObjectDeserializerV2(); + List deserialized = + deserializer.DeserializeTransportObject(objectArr) as List; + List ret = new(); + foreach (object obj in deserialized) + ret.Add((Base)obj); + return ret; } } + + /// + /// Deserializes a dictionary object. Note: if you want to pull an object from speckle (either local or remote), please use any of the "Receive" methods. + /// + /// + /// + [Obsolete( + "Please use the Deserialize(Base @object) function. This function will be removed in later versions." + )] + public static Dictionary DeserializeDictionary(string dictionary) + { + var (_, settings) = GetSerializerInstance(); + return JsonConvert.DeserializeObject>(dictionary, settings); + } } diff --git a/Core/Core/Api/Operations/Operations.cs b/Core/Core/Api/Operations/Operations.cs index 4f65fdc193..ffa6aa5bd0 100644 --- a/Core/Core/Api/Operations/Operations.cs +++ b/Core/Core/Api/Operations/Operations.cs @@ -1,53 +1,57 @@ -using System; +using System; using System.Collections.Concurrent; using System.Collections.Generic; using Speckle.Core.Serialisation; using Speckle.Newtonsoft.Json; using Speckle.Newtonsoft.Json.Serialization; -namespace Speckle.Core.Api +namespace Speckle.Core.Api; + +/// +/// Exposes several key methods for interacting with Speckle.Core. +/// Serialize/Deserialize +/// Push/Pull (methods to serialize & send data to one or more servers) +/// +public static partial class Operations { /// - /// Exposes several key methods for interacting with Speckle.Core. - /// Serialize/Deserialize - /// Push/Pull (methods to serialize & send data to one or more servers) + /// Convenience method to instantiate an instance of the default object serializer and settings pre-populated with it. /// - public static partial class Operations + public static (BaseObjectSerializer, JsonSerializerSettings) GetSerializerInstance() { - /// - /// Convenience method to instantiate an instance of the default object serializer and settings pre-populated with it. - /// - public static (BaseObjectSerializer, JsonSerializerSettings) GetSerializerInstance() + var serializer = new BaseObjectSerializer(); + var settings = new JsonSerializerSettings() { - var serializer = new BaseObjectSerializer(); - var settings = new JsonSerializerSettings() - { - NullValueHandling = NullValueHandling.Ignore, - ContractResolver = new CamelCasePropertyNamesContractResolver(), - Formatting = Formatting.None, - ReferenceLoopHandling = ReferenceLoopHandling.Ignore, - Converters = new List { serializer } - }; + NullValueHandling = NullValueHandling.Ignore, + ContractResolver = new CamelCasePropertyNamesContractResolver(), + Formatting = Formatting.None, + ReferenceLoopHandling = ReferenceLoopHandling.Ignore, + Converters = new List { serializer } + }; - return (serializer, settings); - } + return (serializer, settings); + } - /// - /// Factory for progress actions used internally inside send & receive methods. - /// - /// - /// - /// - private static Action GetInternalProgressAction(ConcurrentDictionary localProgressDict, Action> onProgressAction = null) - { - return new Action((name, processed) => + /// + /// Factory for progress actions used internally inside send & receive methods. + /// + /// + /// + /// + private static Action GetInternalProgressAction( + ConcurrentDictionary localProgressDict, + Action> onProgressAction = null + ) + { + return new Action( + (name, processed) => { if (localProgressDict.ContainsKey(name)) localProgressDict[name] += processed; else localProgressDict[name] = processed; onProgressAction?.Invoke(localProgressDict); - }); - } + } + ); } } diff --git a/Core/Core/Credentials/Account.cs b/Core/Core/Credentials/Account.cs index 5428bbb1a2..73bb58a089 100644 --- a/Core/Core/Credentials/Account.cs +++ b/Core/Core/Credentials/Account.cs @@ -6,103 +6,111 @@ using Speckle.Core.Api.GraphQL.Serializer; using Speckle.Core.Helpers; using Speckle.Core.Logging; +using Speckle.Core.Models; -namespace Speckle.Core.Credentials +namespace Speckle.Core.Credentials; + +public class Account : IEquatable { - public class Account : IEquatable + public Account() { } + + private string _id { get; set; } = null; + + public string id { - private string _id { get; set; } = null; - public string id + get { - get + if (_id == null) { - if (_id == null) - { - if (serverInfo == null || userInfo == null) - throw new SpeckleException( - "Incomplete account info: cannot generate id." - ); - _id = Speckle.Core.Models.Utilities - .hashString(userInfo.email + serverInfo.url, Models.Utilities.HashingFuctions.MD5) - .ToUpper(); - } - return _id; + if (serverInfo == null || userInfo == null) + throw new SpeckleException("Incomplete account info: cannot generate id."); + _id = Utilities + .hashString(userInfo.email + serverInfo.url, Utilities.HashingFuctions.MD5) + .ToUpper(); } - set { _id = value; } + return _id; } - public string token { get; set; } + set => _id = value; + } - public string refreshToken { get; set; } + public string token { get; set; } - public bool isDefault { get; set; } = false; - public bool isOnline { get; set; } = true; + public string refreshToken { get; set; } - public ServerInfo serverInfo { get; set; } + public bool isDefault { get; set; } = false; + public bool isOnline { get; set; } = true; - public UserInfo userInfo { get; set; } + public ServerInfo serverInfo { get; set; } - public Account() { } + public UserInfo userInfo { get; set; } - #region public methods + #region private methods - public string GetHashedEmail() - { - string email = userInfo?.email ?? "unknown"; - return "@" + Crypt.Hash(email); - } + private static string CleanURL(string server) + { + Uri NewUri; - public string GetHashedServer() - { - string url = serverInfo?.url ?? "https://speckle.xyz/"; - return Crypt.Hash(CleanURL(url)); - } + if (Uri.TryCreate(server, UriKind.Absolute, out NewUri)) + server = NewUri.Authority; + return server; + } - public async Task Validate() - { - using var httpClient = Http.GetHttpProxyClient(); + #endregion - httpClient.DefaultRequestHeaders.Add("Authorization", $"Bearer {token}"); + #region public methods - using var gqlClient = new GraphQLHttpClient( - new GraphQLHttpClientOptions() { EndPoint = new Uri(new Uri(serverInfo.url), "/graphql") }, - new NewtonsoftJsonSerializer(), - httpClient - ); + public string GetHashedEmail() + { + string email = userInfo?.email ?? "unknown"; + return "@" + Crypt.Hash(email); + } - var request = new GraphQLRequest { Query = @" query { user { name email id company } }" }; + public string GetHashedServer() + { + string url = serverInfo?.url ?? "https://speckle.xyz/"; + return Crypt.Hash(CleanURL(url)); + } - var response = await gqlClient.SendQueryAsync(request); + public async Task Validate() + { + using var httpClient = Http.GetHttpProxyClient(); - if (response.Errors != null) - return null; + httpClient.DefaultRequestHeaders.Add("Authorization", $"Bearer {token}"); - return response.Data.user; - } + using var gqlClient = new GraphQLHttpClient( + new GraphQLHttpClientOptions() { EndPoint = new Uri(new Uri(serverInfo.url), "/graphql") }, + new NewtonsoftJsonSerializer(), + httpClient + ); - public bool Equals(Account other) - { - return other.userInfo.email == userInfo.email && other.serverInfo.url == serverInfo.url; - } + var request = new GraphQLRequest { Query = @" query { user { name email id company } }" }; - public override string ToString() - { - return $"Account ({userInfo.email} | {serverInfo.url})"; - } + var response = await gqlClient.SendQueryAsync(request).ConfigureAwait(false); - #endregion + if (response.Errors != null) + return null; - #region private methods - private static string CleanURL(string server) - { - Uri NewUri; + return response.Data.user; + } - if (Uri.TryCreate(server, UriKind.Absolute, out NewUri)) - { - server = NewUri.Authority; - } - return server; - } + public bool Equals(Account other) + { + return other is not null + && other.userInfo.email == userInfo.email + && other.serverInfo.url == serverInfo.url; + } + + public override string ToString() + { + return $"Account ({userInfo.email} | {serverInfo.url})"; + } - #endregion +#pragma warning disable CS0659 //TODO: Disabled to prevent GetHashCode from being added by the cleanup. + public override bool Equals(object obj) + { + return obj is Account acc && Equals(acc); } +#pragma warning restore CS0659 + + #endregion } diff --git a/Core/Core/Credentials/AccountManager.cs b/Core/Core/Credentials/AccountManager.cs index 749a092638..c31958ad09 100644 --- a/Core/Core/Credentials/AccountManager.cs +++ b/Core/Core/Credentials/AccountManager.cs @@ -1,4 +1,4 @@ -using System; +using System; using System.Collections.Generic; using System.Diagnostics; using System.IO; @@ -7,645 +7,638 @@ using System.Net.Http; using System.Net.Http.Headers; using System.Security.Cryptography; +using System.Text; using System.Text.RegularExpressions; using System.Threading.Tasks; using GraphQL; using GraphQL.Client.Http; -using Serilog; using Speckle.Core.Api; using Speckle.Core.Api.GraphQL.Serializer; using Speckle.Core.Helpers; using Speckle.Core.Logging; using Speckle.Core.Transports; using Speckle.Newtonsoft.Json; +using Stream = System.IO.Stream; -namespace Speckle.Core.Credentials +namespace Speckle.Core.Credentials; + +/// +/// Manage accounts locally for desktop applications. +/// +public static class AccountManager { + private static SQLiteTransport AccountStorage = new(scope: "Accounts"); + private static bool _isAddingAccount = false; + private static SQLiteTransport AccountAddLockStorage = new(scope: "AccountAddFlow"); + /// - /// Manage accounts locally for desktop applications. + /// Gets the basic information about a server. /// - public static class AccountManager + /// Server URL + /// + public static async Task GetServerInfo(string server) { - private static SQLiteTransport AccountStorage = new SQLiteTransport(scope: "Accounts"); - private static bool _isAddingAccount = false; - private static SQLiteTransport AccountAddLockStorage = new SQLiteTransport( - scope: "AccountAddFlow" + using var httpClient = Http.GetHttpProxyClient(); + + using var gqlClient = new GraphQLHttpClient( + new GraphQLHttpClientOptions() { EndPoint = new Uri(new Uri(server), "/graphql") }, + new NewtonsoftJsonSerializer(), + httpClient ); - /// - /// Gets the basic information about a server. - /// - /// Server URL - /// - public static async Task GetServerInfo(string server) - { - using var httpClient = Http.GetHttpProxyClient(); + var request = new GraphQLRequest { Query = @" query { serverInfo { name company } }" }; - using var gqlClient = new GraphQLHttpClient( - new GraphQLHttpClientOptions() { EndPoint = new Uri(new Uri(server), "/graphql") }, - new NewtonsoftJsonSerializer(), - httpClient - ); + var response = await gqlClient + .SendQueryAsync(request) + .ConfigureAwait(false); - var request = new GraphQLRequest { Query = @" query { serverInfo { name company } }" }; + if (response.Errors != null) + return null; - var response = await gqlClient.SendQueryAsync(request); + response.Data.serverInfo.url = server; - if (response.Errors != null) - return null; + return response.Data.serverInfo; + } - response.Data.serverInfo.url = server; + /// + /// Gets basic user information given a token and a server. + /// + /// + /// Server URL + /// + public static async Task GetUserInfo(string token, string server) + { + using var httpClient = Http.GetHttpProxyClient(); + httpClient.DefaultRequestHeaders.Add("Authorization", $"Bearer {token}"); - return response.Data.serverInfo; - } + using var gqlClient = new GraphQLHttpClient( + new GraphQLHttpClientOptions() { EndPoint = new Uri(new Uri(server), "/graphql") }, + new NewtonsoftJsonSerializer(), + httpClient + ); + + var request = new GraphQLRequest { Query = @" query { user { name email id company } }" }; + + var response = await gqlClient.SendQueryAsync(request).ConfigureAwait(false); - /// - /// Gets basic user information given a token and a server. - /// - /// - /// Server URL - /// - public static async Task GetUserInfo(string token, string server) + if (response.Errors != null) + return null; + + return response.Data.user; + } + + /// + /// Gets basic user and server information given a token and a server. + /// + /// + /// Server URL + /// + private static async Task GetUserServerInfo(string token, string server) + { + try { - using var httpClient = Http.GetHttpProxyClient(); + var httpClient = Http.GetHttpProxyClient(); httpClient.DefaultRequestHeaders.Add("Authorization", $"Bearer {token}"); - using var gqlClient = new GraphQLHttpClient( - new GraphQLHttpClientOptions() { EndPoint = new Uri(new Uri(server), "/graphql") }, + var client = new GraphQLHttpClient( + new GraphQLHttpClientOptions { EndPoint = new Uri(new Uri(server), "/graphql") }, new NewtonsoftJsonSerializer(), httpClient ); - var request = new GraphQLRequest { Query = @" query { user { name email id company } }" }; + var request = new GraphQLRequest + { + Query = + @"query { user { id name email company avatar streams { totalCount } commits { totalCount } } serverInfo { name company adminContact description version} }" + }; - var response = await gqlClient.SendQueryAsync(request); + var res = await client.SendQueryAsync(request).ConfigureAwait(false); - if (response.Errors != null) - return null; + if (res.Errors != null && res.Errors.Any()) + throw new SpeckleException(res.Errors[0].Message, res.Errors); - return response.Data.user; + return res.Data; } - - /// - /// Gets basic user and server information given a token and a server. - /// - /// - /// Server URL - /// - private static async Task GetUserServerInfo(string token, string server) + catch (Exception e) { - try - { - var httpClient = Http.GetHttpProxyClient(); - httpClient.DefaultRequestHeaders.Add("Authorization", $"Bearer {token}"); - - var client = new GraphQLHttpClient( - new GraphQLHttpClientOptions { EndPoint = new Uri(new Uri(server), "/graphql"), }, - new NewtonsoftJsonSerializer(), - httpClient - ); - - var request = new GraphQLRequest - { - Query = - @"query { user { id name email company avatar streams { totalCount } commits { totalCount } } serverInfo { name company adminContact description version} }" - }; + throw new SpeckleException(e.Message, e); + } + } - var res = await client - .SendQueryAsync(request) - .ConfigureAwait(false); + /// + /// The Default Server URL for authentication, can be overridden by placing a file with the alternatrive url in the Speckle folder or with an ENV_VAR + /// + public static string GetDefaultServerUrl() + { + var defaultServerUrl = "https://speckle.xyz"; + var customServerUrl = ""; - if (res.Errors != null && res.Errors.Any()) - throw new SpeckleException(res.Errors[0].Message, res.Errors); + // first mechanism, check for local file + var customServerFile = Path.Combine(SpecklePathProvider.UserSpeckleFolderPath, "server"); + if (File.Exists(customServerFile)) + customServerUrl = File.ReadAllText(customServerFile); - return res.Data; - } - catch (Exception e) - { - throw new SpeckleException(e.Message, e); - } - } + // second mechanism, check ENV VAR + var customServerEnvVar = Environment.GetEnvironmentVariable("SPECKLE_SERVER"); + if (!string.IsNullOrEmpty(customServerEnvVar)) + customServerUrl = customServerEnvVar; - /// - /// The Default Server URL for authentication, can be overridden by placing a file with the alternatrive url in the Speckle folder or with an ENV_VAR - /// - public static string GetDefaultServerUrl() + if (!string.IsNullOrEmpty(customServerUrl)) { - var defaultServerUrl = "https://speckle.xyz"; - var customServerUrl = ""; - - // first mechanism, check for local file - var customServerFile = Path.Combine(SpecklePathProvider.UserSpeckleFolderPath, "server"); - if (File.Exists(customServerFile)) - customServerUrl = File.ReadAllText(customServerFile); - - // second mechanism, check ENV VAR - var customServerEnvVar = Environment.GetEnvironmentVariable("SPECKLE_SERVER"); - if (!string.IsNullOrEmpty(customServerEnvVar)) - customServerUrl = customServerEnvVar; + Uri url = null; + Uri.TryCreate(customServerUrl, UriKind.Absolute, out url); + if (url != null) + defaultServerUrl = customServerUrl.TrimEnd(new[] { '/' }); + } - if (!string.IsNullOrEmpty(customServerUrl)) - { - Uri url = null; - Uri.TryCreate(customServerUrl, UriKind.Absolute, out url); - if (url != null) - defaultServerUrl = customServerUrl.TrimEnd(new[] { '/' }); - } + return defaultServerUrl; + } - return defaultServerUrl; - } + /// + /// Gets all the accounts for a given server. + /// + /// + /// + public static IEnumerable GetAccounts(string serverUrl) + { + return GetAccounts().Where(acc => acc.serverInfo.url == serverUrl); + } - /// - /// Gets all the accounts for a given server. - /// - /// - /// - public static IEnumerable GetAccounts(string serverUrl) + /// + /// Gets this environment's default account if any. If there is no default, the first found will be returned and set as default. + /// + /// The default account or null. + public static Account GetDefaultAccount() + { + var defaultAccount = GetAccounts().Where(acc => acc.isDefault).FirstOrDefault(); + if (defaultAccount == null) { - return GetAccounts().Where(acc => acc.serverInfo.url == serverUrl); + var firstAccount = GetAccounts().FirstOrDefault(); + if (firstAccount == null) + SpeckleLog.Logger.Information( + "No Speckle accounts found. Visit the Speckle web app to create one." + ); + return firstAccount; } + return defaultAccount; + } - /// - /// Gets this environment's default account if any. If there is no default, the first found will be returned and set as default. - /// - /// The default account or null. - public static Account GetDefaultAccount() - { - var defaultAccount = GetAccounts().Where(acc => acc.isDefault).FirstOrDefault(); - if (defaultAccount == null) - { - var firstAccount = GetAccounts().FirstOrDefault(); - if (firstAccount == null) - SpeckleLog.Logger.Information("No Speckle accounts found. Visit the Speckle web app to create one."); - return firstAccount; - } - return defaultAccount; - } + /// + /// Gets all the accounts present in this environment. + /// + /// + public static IEnumerable GetAccounts() + { + var sqlAccounts = AccountStorage + .GetAllObjects() + .Select(x => JsonConvert.DeserializeObject(x)); + var localAccounts = GetLocalAccounts(); - /// - /// Gets all the accounts present in this environment. - /// - /// - public static IEnumerable GetAccounts() - { - var sqlAccounts = AccountStorage - .GetAllObjects() - .Select(x => JsonConvert.DeserializeObject(x)); - var localAccounts = GetLocalAccounts(); - - //prevent invalid account from slipping out - var invalidAccounts = sqlAccounts.Where(x => x.userInfo == null || x.serverInfo == null); - foreach (var acc in invalidAccounts) - { - RemoveAccount(acc.id); - } + //prevent invalid account from slipping out + var invalidAccounts = sqlAccounts.Where(x => x.userInfo == null || x.serverInfo == null); + foreach (var acc in invalidAccounts) + RemoveAccount(acc.id); - var allAccounts = sqlAccounts.Concat(localAccounts); + var allAccounts = sqlAccounts.Concat(localAccounts); - return allAccounts; - } + return allAccounts; + } - /// - /// Gets the local accounts - /// These are accounts not handled by Manager and are stored in json format in a local directory - /// - /// - private static IEnumerable GetLocalAccounts() - { - var accounts = new List(); - var accountsDir = SpecklePathProvider.AccountsFolderPath; - if (!Directory.Exists(accountsDir)) + /// + /// Gets the local accounts + /// These are accounts not handled by Manager and are stored in json format in a local directory + /// + /// + private static IEnumerable GetLocalAccounts() + { + var accounts = new List(); + var accountsDir = SpecklePathProvider.AccountsFolderPath; + if (!Directory.Exists(accountsDir)) + return accounts; + var files = Directory.GetFiles(accountsDir, "*.json", SearchOption.AllDirectories); + foreach (var file in files) + try { - return accounts; + var json = File.ReadAllText(file); + var account = JsonConvert.DeserializeObject(json); + + if ( + !string.IsNullOrEmpty(account.token) + && !string.IsNullOrEmpty(account.userInfo.id) + && !string.IsNullOrEmpty(account.userInfo.email) + && !string.IsNullOrEmpty(account.userInfo.name) + && !string.IsNullOrEmpty(account.serverInfo.url) + && !string.IsNullOrEmpty(account.serverInfo.name) + ) + accounts.Add(account); } - var files = Directory.GetFiles(accountsDir, "*.json", SearchOption.AllDirectories); - foreach (var file in files) + catch { - try - { - var json = File.ReadAllText(file); - var account = JsonConvert.DeserializeObject(json); - - if ( - !string.IsNullOrEmpty(account.token) - && !string.IsNullOrEmpty(account.userInfo.id) - && !string.IsNullOrEmpty(account.userInfo.email) - && !string.IsNullOrEmpty(account.userInfo.name) - && !string.IsNullOrEmpty(account.serverInfo.url) - && !string.IsNullOrEmpty(account.serverInfo.name) - ) - accounts.Add(account); - } - catch - { //ignore it - } + //ignore it } - return accounts; - } - /// - /// Refetches user and server info for each account - /// - /// - public static async Task UpdateAccounts() + return accounts; + } + + /// + /// Refetches user and server info for each account + /// + /// + public static async Task UpdateAccounts() + { + foreach (var account in GetAccounts()) { - foreach (var account in GetAccounts()) + var url = account.serverInfo.url; + + try { - var url = account.serverInfo.url; + var userServerInfo = await GetUserServerInfo(account.token, url).ConfigureAwait(false); - try + //the token has expired + //TODO: once we get a token expired exception from the server use that instead + if ( + userServerInfo == null || userServerInfo.user == null || userServerInfo.serverInfo == null + ) { - var userServerInfo = await GetUserServerInfo(account.token, url); + var tokenResponse = await GetRefreshedToken(account.refreshToken, url) + .ConfigureAwait(false); + userServerInfo = await GetUserServerInfo(tokenResponse.token, url).ConfigureAwait(false); - //the token has expired - //TODO: once we get a token expired exception from the server use that instead if ( userServerInfo == null || userServerInfo.user == null || userServerInfo.serverInfo == null ) - { - var tokenResponse = await GetRefreshedToken(account.refreshToken, url); - userServerInfo = await GetUserServerInfo(tokenResponse.token, url); - - if ( - userServerInfo == null - || userServerInfo.user == null - || userServerInfo.serverInfo == null - ) - throw new SpeckleException("Could not refresh token"); - - account.token = tokenResponse.token; - account.refreshToken = tokenResponse.refreshToken; - } - - account.isOnline = true; - account.userInfo = userServerInfo.user; - account.serverInfo = userServerInfo.serverInfo; - account.serverInfo.url = url; - } - catch (Exception ex) - { - account.isOnline = false; + throw new SpeckleException("Could not refresh token"); + + account.token = tokenResponse.token; + account.refreshToken = tokenResponse.refreshToken; } - AccountStorage.UpdateObject(account.id, JsonConvert.SerializeObject(account)); + account.isOnline = true; + account.userInfo = userServerInfo.user; + account.serverInfo = userServerInfo.serverInfo; + account.serverInfo.url = url; + } + catch (Exception ex) + { + account.isOnline = false; } + + AccountStorage.UpdateObject(account.id, JsonConvert.SerializeObject(account)); } + } - /// - /// Removes an account - /// - /// ID of the account to remove - public static void RemoveAccount(string id) - { - //TODO: reset default account - AccountStorage.DeleteObject(id); + /// + /// Removes an account + /// + /// ID of the account to remove + public static void RemoveAccount(string id) + { + //TODO: reset default account + AccountStorage.DeleteObject(id); - var accounts = GetAccounts(); + var accounts = GetAccounts(); - if (accounts.Any() && !accounts.Any(x => x.isDefault)) - { - ChangeDefaultAccount(accounts.First().id); - } - } + if (accounts.Any() && !accounts.Any(x => x.isDefault)) + ChangeDefaultAccount(accounts.First().id); + } - /// - /// Changes the default account - /// - /// - public static void ChangeDefaultAccount(string id) + /// + /// Changes the default account + /// + /// + public static void ChangeDefaultAccount(string id) + { + foreach (var account in GetAccounts()) { - foreach (var account in GetAccounts()) - { - if (account.id != id) - { - account.isDefault = false; - } - else - { - account.isDefault = true; - } + if (account.id != id) + account.isDefault = false; + else + account.isDefault = true; - AccountStorage.UpdateObject(account.id, JsonConvert.SerializeObject(account)); - } + AccountStorage.UpdateObject(account.id, JsonConvert.SerializeObject(account)); } + } - private static string _ensureCorrectServerUrl(string server) + private static string _ensureCorrectServerUrl(string server) + { + var localUrl = server; + if (string.IsNullOrEmpty(localUrl)) { - var localUrl = server; - if (string.IsNullOrEmpty(localUrl)) - { - localUrl = GetDefaultServerUrl(); - SpeckleLog.Logger.Debug( - "The provided server url was null or empty. Changed to the default url {serverUrl}", - localUrl - ); - } - return localUrl.TrimEnd(new[] { '/' }); + localUrl = GetDefaultServerUrl(); + SpeckleLog.Logger.Debug( + "The provided server url was null or empty. Changed to the default url {serverUrl}", + localUrl + ); } + return localUrl.TrimEnd(new[] { '/' }); + } - private static void _ensureGetAccessCodeFlowIsSupported() + private static void _ensureGetAccessCodeFlowIsSupported() + { + if (!HttpListener.IsSupported) { - if (!HttpListener.IsSupported) - { - SpeckleLog.Logger.Error("HttpListener not supported"); - throw new Exception("Your operating system is not supported"); - } + SpeckleLog.Logger.Error("HttpListener not supported"); + throw new Exception("Your operating system is not supported"); } + } - private static async Task _getAccessCode( - string server, - string challenge, - TimeSpan timeout - ) - { - _ensureGetAccessCodeFlowIsSupported(); - - SpeckleLog.Logger.Debug( - "Starting auth process for {server}/authn/verify/sca/{challenge}", - server, - challenge - ); - - var accessCode = ""; + private static async Task _getAccessCode( + string server, + string challenge, + TimeSpan timeout + ) + { + _ensureGetAccessCodeFlowIsSupported(); - Process.Start( - new ProcessStartInfo($"{server}/authn/verify/sca/{challenge}") { UseShellExecute = true } - ); + SpeckleLog.Logger.Debug( + "Starting auth process for {server}/authn/verify/sca/{challenge}", + server, + challenge + ); - var listener = new HttpListener(); + var accessCode = ""; - var task = Task.Run(() => - { - var localUrl = "http://localhost:29363/"; - listener.Prefixes.Add(localUrl); - listener.Start(); - SpeckleLog.Logger.Debug("Listening for auth redirects on {localUrl}", localUrl); - // Note: The GetContext method blocks while waiting for a request. - HttpListenerContext context = listener.GetContext(); - HttpListenerRequest request = context.Request; - HttpListenerResponse response = context.Response; - - accessCode = request.QueryString["access_code"]; - SpeckleLog.Logger.Debug("Got access code {accessCode}", accessCode); - var message = ""; - if (accessCode != null) - message = - "Success!

You can close this window now."; - else - message = "Oups, something went wrong...!"; - - var responseString = - $"
{message}"; - byte[] buffer = System.Text.Encoding.UTF8.GetBytes(responseString); - response.ContentLength64 = buffer.Length; - System.IO.Stream output = response.OutputStream; - output.Write(buffer, 0, buffer.Length); - output.Close(); - SpeckleLog.Logger.Debug("Processed finished processing the access code."); - listener.Stop(); - listener.Close(); - }); - - var completedTask = await Task.WhenAny(task, Task.Delay(timeout)); - - //ensure the listener is closed even if the task has timed out or failed - if (listener.IsListening) - listener.Abort(); - - // this is means the task timed out - if (completedTask != task) - { - SpeckleLog.Logger.Warning( - "Local auth flow failed to complete within the timeout window. Access code is {accessCode}", - accessCode - ); - throw new Exception("Local auth flow failed to complete within the timeout window"); - } + Process.Start( + new ProcessStartInfo($"{server}/authn/verify/sca/{challenge}") { UseShellExecute = true } + ); - if (task.IsFaulted) - { - SpeckleLog.Logger.Error( - task.Exception, - "Getting access code flow failed with {exceptionMessage}", - task.Exception.Message - ); - throw new Exception($"Auth flow failed: {task.Exception.Message}", task.Exception); - } + var listener = new HttpListener(); - // task completed within timeout - SpeckleLog.Logger.Information( - "Local auth flow completed successfully within the timeout window. Access code is {accessCode}", + var task = Task.Run(() => + { + var localUrl = "http://localhost:29363/"; + listener.Prefixes.Add(localUrl); + listener.Start(); + SpeckleLog.Logger.Debug("Listening for auth redirects on {localUrl}", localUrl); + // Note: The GetContext method blocks while waiting for a request. + HttpListenerContext context = listener.GetContext(); + HttpListenerRequest request = context.Request; + HttpListenerResponse response = context.Response; + + accessCode = request.QueryString["access_code"]; + SpeckleLog.Logger.Debug("Got access code {accessCode}", accessCode); + var message = ""; + if (accessCode != null) + message = + "Success!

You can close this window now."; + else + message = "Oups, something went wrong...!"; + + var responseString = + $"
{message}"; + byte[] buffer = Encoding.UTF8.GetBytes(responseString); + response.ContentLength64 = buffer.Length; + Stream output = response.OutputStream; + output.Write(buffer, 0, buffer.Length); + output.Close(); + SpeckleLog.Logger.Debug("Processed finished processing the access code."); + listener.Stop(); + listener.Close(); + }); + + var completedTask = await Task.WhenAny(task, Task.Delay(timeout)).ConfigureAwait(false); + + //ensure the listener is closed even if the task has timed out or failed + if (listener.IsListening) + listener.Abort(); + + // this is means the task timed out + if (completedTask != task) + { + SpeckleLog.Logger.Warning( + "Local auth flow failed to complete within the timeout window. Access code is {accessCode}", accessCode ); - return accessCode; + throw new Exception("Local auth flow failed to complete within the timeout window"); } - private static async Task _createAccount( - string accessCode, - string challenge, - string server - ) + if (task.IsFaulted) { - try - { - var tokenResponse = await GetToken(accessCode, challenge, server); - var userResponse = await GetUserServerInfo(tokenResponse.token, server); - - var account = new Account() - { - token = tokenResponse.token, - refreshToken = tokenResponse.refreshToken, - isDefault = GetAccounts().Count() == 0, - serverInfo = userResponse.serverInfo, - userInfo = userResponse.user - }; - SpeckleLog.Logger.Information("Successfully created account for {serverUrl}", server); - account.serverInfo.url = server; - - return account; - } - catch (Exception ex) - { - throw new SpeckleAccountManagerException( - "Failed to create account from access code and challenge", - ex - ); - } + SpeckleLog.Logger.Error( + task.Exception, + "Getting access code flow failed with {exceptionMessage}", + task.Exception.Message + ); + throw new Exception($"Auth flow failed: {task.Exception.Message}", task.Exception); } - private static void _tryLockAccountAddFlow(TimeSpan timespan) - { - // use a static variable to quickly - // prevent launching this flow multiple times - if (_isAddingAccount) - // this should probably throw with an error message - throw new SpeckleAccountFlowLockedException("The account add flow is already launched."); - - // this uses the SQLite transport to store locks - var lockIds = AccountAddLockStorage.GetAllObjects().OrderByDescending(d => d).ToList(); - var now = DateTime.Now; - foreach (var l in lockIds) - { - var lockArray = l.Split('@'); - var lockName = lockArray.Length == 2 ? lockArray[0] : "the other app"; - var lockTime = lockArray.Length == 2 ? DateTime.ParseExact(lockArray[1], "o", null) : DateTime.ParseExact(lockArray[0], "o", null); - - if (lockTime > now) - { - var lockString = String.Format("{0:mm} minutes {0:ss} seconds", lockTime - now); - throw new SpeckleAccountFlowLockedException( - $"The account add flow was already started in {lockName}, retry in {lockString}" - ); - } - } - + // task completed within timeout + SpeckleLog.Logger.Information( + "Local auth flow completed successfully within the timeout window. Access code is {accessCode}", + accessCode + ); + return accessCode; + } - var lockId = Setup.HostApplication + "@" + DateTime.Now.Add(timespan).ToString("o"); + private static async Task _createAccount( + string accessCode, + string challenge, + string server + ) + { + try + { + var tokenResponse = await GetToken(accessCode, challenge, server).ConfigureAwait(false); + var userResponse = await GetUserServerInfo(tokenResponse.token, server).ConfigureAwait(false); - // using the lock release time as an id and value - // for ease of deletion and retrieval - AccountAddLockStorage.SaveObjectSync(lockId, lockId); - _isAddingAccount = true; - return; + var account = new Account() + { + token = tokenResponse.token, + refreshToken = tokenResponse.refreshToken, + isDefault = GetAccounts().Count() == 0, + serverInfo = userResponse.serverInfo, + userInfo = userResponse.user + }; + SpeckleLog.Logger.Information("Successfully created account for {serverUrl}", server); + account.serverInfo.url = server; + + return account; } + catch (Exception ex) + { + throw new SpeckleAccountManagerException( + "Failed to create account from access code and challenge", + ex + ); + } + } - private static void _unlockAccountAddFlow() + private static void _tryLockAccountAddFlow(TimeSpan timespan) + { + // use a static variable to quickly + // prevent launching this flow multiple times + if (_isAddingAccount) + // this should probably throw with an error message + throw new SpeckleAccountFlowLockedException("The account add flow is already launched."); + + // this uses the SQLite transport to store locks + var lockIds = AccountAddLockStorage.GetAllObjects().OrderByDescending(d => d).ToList(); + var now = DateTime.Now; + foreach (var l in lockIds) { - _isAddingAccount = false; - // make sure all old locks are removed - foreach (var id in AccountAddLockStorage.GetAllObjects()) + var lockArray = l.Split('@'); + var lockName = lockArray.Length == 2 ? lockArray[0] : "the other app"; + var lockTime = + lockArray.Length == 2 + ? DateTime.ParseExact(lockArray[1], "o", null) + : DateTime.ParseExact(lockArray[0], "o", null); + + if (lockTime > now) { - AccountAddLockStorage.DeleteObject(id); + var lockString = string.Format("{0:mm} minutes {0:ss} seconds", lockTime - now); + throw new SpeckleAccountFlowLockedException( + $"The account add flow was already started in {lockName}, retry in {lockString}" + ); } } - /// - /// Adds an account by propting the user to log in via a web flow - /// - /// Server to use to add the account, if not provied the default Server will be used - /// - public static async Task AddAccount(string server = "") - { - SpeckleLog.Logger.Debug("Starting to add account for {serverUrl}", server); + var lockId = Setup.HostApplication + "@" + DateTime.Now.Add(timespan).ToString("o"); - server = _ensureCorrectServerUrl(server); + // using the lock release time as an id and value + // for ease of deletion and retrieval + AccountAddLockStorage.SaveObjectSync(lockId, lockId); + _isAddingAccount = true; + return; + } - // locking for 1 minute - var timeout = TimeSpan.FromMinutes(1); - // this is not part of the try finally block - // we do not want to clean up the existing locks - _tryLockAccountAddFlow(timeout); - var challenge = GenerateChallenge(); + private static void _unlockAccountAddFlow() + { + _isAddingAccount = false; + // make sure all old locks are removed + foreach (var id in AccountAddLockStorage.GetAllObjects()) + AccountAddLockStorage.DeleteObject(id); + } - var accessCode = ""; + /// + /// Adds an account by propting the user to log in via a web flow + /// + /// Server to use to add the account, if not provied the default Server will be used + /// + public static async Task AddAccount(string server = "") + { + SpeckleLog.Logger.Debug("Starting to add account for {serverUrl}", server); - try - { - accessCode = await _getAccessCode(server, challenge, timeout); - if (string.IsNullOrEmpty(accessCode)) - throw new SpeckleAccountManagerException("Access code is invalid"); + server = _ensureCorrectServerUrl(server); - var account = await _createAccount(accessCode, challenge, server); + // locking for 1 minute + var timeout = TimeSpan.FromMinutes(1); + // this is not part of the try finally block + // we do not want to clean up the existing locks + _tryLockAccountAddFlow(timeout); + var challenge = GenerateChallenge(); - //if the account already exists it will not be added again - AccountStorage.SaveObject(account.id, JsonConvert.SerializeObject(account)); - SpeckleLog.Logger.Debug("Finished adding account {accountId} for {serverUrl}", account.id, server); - } - catch (SpeckleAccountManagerException ex) - { - SpeckleLog.Logger.Fatal(ex, "Failed to add account: {exceptionMessage}", ex.Message); - // rethrowing any known errors - throw; - } - catch (Exception ex) - { - SpeckleLog.Logger.Fatal(ex, "Failed to add account: {exceptionMessage}", ex.Message); - throw new SpeckleAccountManagerException($"Failed to add account: {ex.Message}", ex); - } - finally - { - _unlockAccountAddFlow(); - } + var accessCode = ""; + + try + { + accessCode = await _getAccessCode(server, challenge, timeout).ConfigureAwait(false); + if (string.IsNullOrEmpty(accessCode)) + throw new SpeckleAccountManagerException("Access code is invalid"); + + var account = await _createAccount(accessCode, challenge, server).ConfigureAwait(false); + + //if the account already exists it will not be added again + AccountStorage.SaveObject(account.id, JsonConvert.SerializeObject(account)); + SpeckleLog.Logger.Debug( + "Finished adding account {accountId} for {serverUrl}", + account.id, + server + ); + } + catch (SpeckleAccountManagerException ex) + { + SpeckleLog.Logger.Fatal(ex, "Failed to add account: {exceptionMessage}", ex.Message); + // rethrowing any known errors + throw; + } + catch (Exception ex) + { + SpeckleLog.Logger.Fatal(ex, "Failed to add account: {exceptionMessage}", ex.Message); + throw new SpeckleAccountManagerException($"Failed to add account: {ex.Message}", ex); + } + finally + { + _unlockAccountAddFlow(); } + } - private static async Task GetToken( - string accessCode, - string challenge, - string server - ) + private static async Task GetToken( + string accessCode, + string challenge, + string server + ) + { + try { - try - { - ServicePointManager.SecurityProtocol = - SecurityProtocolType.Tls12 | SecurityProtocolType.Tls11 | SecurityProtocolType.Tls; - var client = Http.GetHttpProxyClient(); + ServicePointManager.SecurityProtocol = + SecurityProtocolType.Tls12 | SecurityProtocolType.Tls11 | SecurityProtocolType.Tls; + var client = Http.GetHttpProxyClient(); - var body = new - { - appId = "sca", - appSecret = "sca", - accessCode = accessCode, - challenge = challenge, - }; - - var content = new StringContent(JsonConvert.SerializeObject(body)); - content.Headers.ContentType = new MediaTypeHeaderValue("application/json"); - var response = await client.PostAsync($"{server}/auth/token", content); - - return JsonConvert.DeserializeObject( - await response.Content.ReadAsStringAsync() - ); - } - catch (Exception e) + var body = new { - throw new SpeckleException(e.Message, e); - } + appId = "sca", + appSecret = "sca", + accessCode = accessCode, + challenge = challenge + }; + + var content = new StringContent(JsonConvert.SerializeObject(body)); + content.Headers.ContentType = new MediaTypeHeaderValue("application/json"); + var response = await client.PostAsync($"{server}/auth/token", content).ConfigureAwait(false); + + return JsonConvert.DeserializeObject( + await response.Content.ReadAsStringAsync().ConfigureAwait(false) + ); + } + catch (Exception e) + { + throw new SpeckleException(e.Message, e); } + } - private static async Task GetRefreshedToken( - string refreshToken, - string server - ) + private static async Task GetRefreshedToken( + string refreshToken, + string server + ) + { + try { - try - { - ServicePointManager.SecurityProtocol = - SecurityProtocolType.Tls12 | SecurityProtocolType.Tls11 | SecurityProtocolType.Tls; - var client = Http.GetHttpProxyClient(); + ServicePointManager.SecurityProtocol = + SecurityProtocolType.Tls12 | SecurityProtocolType.Tls11 | SecurityProtocolType.Tls; + var client = Http.GetHttpProxyClient(); - var body = new - { - appId = "sca", - appSecret = "sca", - refreshToken = refreshToken - }; + var body = new + { + appId = "sca", + appSecret = "sca", + refreshToken = refreshToken + }; - var content = new StringContent(JsonConvert.SerializeObject(body)); - content.Headers.ContentType = new MediaTypeHeaderValue("application/json"); - var response = await client.PostAsync($"{server}/auth/token", content); + var content = new StringContent(JsonConvert.SerializeObject(body)); + content.Headers.ContentType = new MediaTypeHeaderValue("application/json"); + var response = await client.PostAsync($"{server}/auth/token", content).ConfigureAwait(false); - return JsonConvert.DeserializeObject( - await response.Content.ReadAsStringAsync() - ); - } - catch (Exception e) - { - throw new SpeckleException(e.Message, e); - } + return JsonConvert.DeserializeObject( + await response.Content.ReadAsStringAsync().ConfigureAwait(false) + ); + } + catch (Exception e) + { + throw new SpeckleException(e.Message, e); } + } - private static string GenerateChallenge() + private static string GenerateChallenge() + { + using (RandomNumberGenerator rng = new RNGCryptoServiceProvider()) { - using (RandomNumberGenerator rng = new RNGCryptoServiceProvider()) - { - byte[] challengeData = new byte[32]; - rng.GetBytes(challengeData); + byte[] challengeData = new byte[32]; + rng.GetBytes(challengeData); - //escaped chars like % do not play nice with the server - return Regex.Replace(Convert.ToBase64String(challengeData), @"[^\w\.@-]", ""); - } + //escaped chars like % do not play nice with the server + return Regex.Replace(Convert.ToBase64String(challengeData), @"[^\w\.@-]", ""); } } } diff --git a/Core/Core/Credentials/Exceptions.cs b/Core/Core/Credentials/Exceptions.cs index 5a7f2b7b51..abd2158233 100644 --- a/Core/Core/Credentials/Exceptions.cs +++ b/Core/Core/Credentials/Exceptions.cs @@ -1,18 +1,26 @@ using System; using Speckle.Core.Logging; -namespace Speckle.Core.Credentials +namespace Speckle.Core.Credentials; + +public class SpeckleAccountManagerException : SpeckleException +{ + public SpeckleAccountManagerException(string message) + : base(message) { } + + public SpeckleAccountManagerException(string message, Exception inner) + : base(message, inner) { } + + public SpeckleAccountManagerException() { } +} + +public class SpeckleAccountFlowLockedException : SpeckleAccountManagerException { - public class SpeckleAccountManagerException : SpeckleException - { - public SpeckleAccountManagerException(string message) : base(message) { } + public SpeckleAccountFlowLockedException(string message) + : base(message) { } - public SpeckleAccountManagerException(string message, Exception inner) : base(message, inner) - { } - } + public SpeckleAccountFlowLockedException() { } - public class SpeckleAccountFlowLockedException : SpeckleAccountManagerException - { - public SpeckleAccountFlowLockedException(string message) : base(message) { } - } + public SpeckleAccountFlowLockedException(string message, Exception innerException) + : base(message, innerException) { } } diff --git a/Core/Core/Credentials/Responses.cs b/Core/Core/Credentials/Responses.cs index 7b18973873..9f296a6cd2 100644 --- a/Core/Core/Credentials/Responses.cs +++ b/Core/Core/Credentials/Responses.cs @@ -1,44 +1,42 @@ -using Speckle.Core.Api; +using Speckle.Core.Api; -namespace Speckle.Core.Credentials +namespace Speckle.Core.Credentials; + +public class UserServerInfoResponse +{ + public UserInfo user { get; set; } + public ServerInfo serverInfo { get; set; } +} + +public class UserInfoResponse +{ + public UserInfo user { get; set; } +} + +public class UserInfo { + public string id { get; set; } + public string name { get; set; } + public string email { get; set; } + public string company { get; set; } + public string avatar { get; set; } + + public Streams streams { get; set; } + public Commits commits { get; set; } +} - public class UserServerInfoResponse - { - public UserInfo user { get; set; } - public ServerInfo serverInfo { get; set; } - } - public class UserInfoResponse - { - public UserInfo user { get; set; } - } - - public class UserInfo - { - public string id { get; set; } - public string name { get; set; } - public string email { get; set; } - public string company { get; set; } - public string avatar { get; set; } - - public Streams streams { get; set; } - public Commits commits { get; set; } - } - - public class TokenExchangeResponse - { - public string token { get; set; } - public string refreshToken { get; set; } - } - - public class Streams - { - public int totalCount { get; set; } - } - - public class Commits - { - public int totalCount { get; set; } - } +public class TokenExchangeResponse +{ + public string token { get; set; } + public string refreshToken { get; set; } +} +public class Streams +{ + public int totalCount { get; set; } +} + +public class Commits +{ + public int totalCount { get; set; } } diff --git a/Core/Core/Credentials/StreamWrapper.cs b/Core/Core/Credentials/StreamWrapper.cs index 3356f9ff84..83702ff0ca 100644 --- a/Core/Core/Credentials/StreamWrapper.cs +++ b/Core/Core/Credentials/StreamWrapper.cs @@ -1,345 +1,320 @@ -using System; +using System; using System.Linq; using System.Threading.Tasks; +using System.Web; using Speckle.Core.Api; using Speckle.Core.Helpers; using Speckle.Core.Logging; -namespace Speckle.Core.Credentials +namespace Speckle.Core.Credentials; + +public class StreamWrapper { - public class StreamWrapper + private Account _Account; + + public StreamWrapper() { } + + /// + /// Creates a StreamWrapper from a stream url or a stream id + /// + /// Stream Url eg: http://speckle.server/streams/8fecc9aa6d/commits/76a23d7179 or stream ID eg: 8fecc9aa6d + /// + public StreamWrapper(string streamUrlOrId) { - //this needs to be public so it's serialized and stored in Dynamo - public string OriginalInput { get; set; } - - public string UserId { get; set; } - public string ServerUrl { get; set; } - public string StreamId { get; set; } - public string CommitId { get; set; } - public string BranchName { get; set; } - public string ObjectId { get; set; } - - /// - /// Determines if the current stream wrapper contains a valid stream. - /// - public bool IsValid => Type != StreamWrapperType.Undefined; - - public StreamWrapperType Type - { - // Quick solution to determine whether a wrapper points to a branch, commit or stream. - get - { - if (!string.IsNullOrEmpty(ObjectId)) - { - return StreamWrapperType.Object; - } + OriginalInput = streamUrlOrId; + + if (!Uri.TryCreate(streamUrlOrId, UriKind.Absolute, out _)) + StreamWrapperFromId(streamUrlOrId); + else + StreamWrapperFromUrl(streamUrlOrId); + } - if (!string.IsNullOrEmpty(CommitId)) - { - return StreamWrapperType.Commit; - } + /// + /// Creates a StreamWrapper by streamId, userId and serverUrl + /// + /// + /// + /// + public StreamWrapper(string streamId, string userId, string serverUrl) + { + UserId = userId; + ServerUrl = serverUrl; + StreamId = streamId; - if (!string.IsNullOrEmpty(BranchName)) - { - return StreamWrapperType.Branch; - } + OriginalInput = $"{ServerUrl}/streams/{StreamId}{(UserId != null ? "?u=" + UserId : "")}"; + } - // If we reach here and there is no stream id, it means that the stream is invalid for some reason. - return !string.IsNullOrEmpty(StreamId) ? StreamWrapperType.Stream : StreamWrapperType.Undefined; + //this needs to be public so it's serialized and stored in Dynamo + public string OriginalInput { get; set; } - } - } + public string UserId { get; set; } + public string ServerUrl { get; set; } + public string StreamId { get; set; } + public string CommitId { get; set; } + public string BranchName { get; set; } + public string ObjectId { get; set; } - public StreamWrapper() - { - } + /// + /// Determines if the current stream wrapper contains a valid stream. + /// + public bool IsValid => Type != StreamWrapperType.Undefined; - /// - /// Creates a StreamWrapper from a stream url or a stream id - /// - /// Stream Url eg: http://speckle.server/streams/8fecc9aa6d/commits/76a23d7179 or stream ID eg: 8fecc9aa6d - /// - public StreamWrapper(string streamUrlOrId) + public StreamWrapperType Type + { + // Quick solution to determine whether a wrapper points to a branch, commit or stream. + get { - OriginalInput = streamUrlOrId; + if (!string.IsNullOrEmpty(ObjectId)) + return StreamWrapperType.Object; - Uri uri; - try - { - if (!Uri.TryCreate(streamUrlOrId, UriKind.Absolute, out uri)) - { - StreamWrapperFromId(streamUrlOrId); - } - else - { - StreamWrapperFromUrl(streamUrlOrId); - } - } - catch (Exception e) - { - throw; - } - } + if (!string.IsNullOrEmpty(CommitId)) + return StreamWrapperType.Commit; - /// - /// Creates a StreamWrapper by streamId, userId and serverUrl - /// - /// - /// - /// - public StreamWrapper(string streamId, string userId, string serverUrl) - { - UserId = userId; - ServerUrl = serverUrl; - StreamId = streamId; + if (!string.IsNullOrEmpty(BranchName)) + return StreamWrapperType.Branch; - OriginalInput = $"{ServerUrl}/streams/{StreamId}{(UserId != null ? "?u=" + UserId : "")}"; + // If we reach here and there is no stream id, it means that the stream is invalid for some reason. + return !string.IsNullOrEmpty(StreamId) + ? StreamWrapperType.Stream + : StreamWrapperType.Undefined; } + } - private void StreamWrapperFromId(string streamId) - { - Account account = AccountManager.GetDefaultAccount(); + private void StreamWrapperFromId(string streamId) + { + Account account = AccountManager.GetDefaultAccount(); - if (account == null) - { - throw new SpeckleException( - $"You do not have any account. Please create one or add it to the Speckle Manager."); - } + if (account == null) + throw new SpeckleException( + "You do not have any account. Please create one or add it to the Speckle Manager." + ); - ServerUrl = account.serverInfo.url; - UserId = account.userInfo.id; - StreamId = streamId; - } + ServerUrl = account.serverInfo.url; + UserId = account.userInfo.id; + StreamId = streamId; + } - private void StreamWrapperFromUrl(string streamUrl) - { - Uri uri = new Uri(streamUrl, true); + private void StreamWrapperFromUrl(string streamUrl) + { + Uri uri = new(streamUrl, true); - ServerUrl = uri.GetLeftPart(UriPartial.Authority); - // Note: this is a hack. It's because new Uri() is parsed escaped in .net framework; wheareas in .netstandard it's not. - // Tests pass in Core without this hack. - if (uri.Segments.Length >= 4 && uri.Segments[3]?.ToLowerInvariant() == "branches/") + ServerUrl = uri.GetLeftPart(UriPartial.Authority); + // Note: this is a hack. It's because new Uri() is parsed escaped in .net framework; wheareas in .netstandard it's not. + // Tests pass in Core without this hack. + if (uri.Segments.Length >= 4 && uri.Segments[3]?.ToLowerInvariant() == "branches/") + { + StreamId = uri.Segments[2].Replace("/", ""); + if (uri.Segments.Length > 5) { - StreamId = uri.Segments[2].Replace("/", ""); - if (uri.Segments.Length > 5) - { - var branchSegs = uri.Segments.ToList().GetRange(4, uri.Segments.Length - 4); - BranchName = Uri.UnescapeDataString(string.Concat(branchSegs)); - } - else - { - BranchName = Uri.UnescapeDataString(uri.Segments[4]); - } + var branchSegs = uri.Segments.ToList().GetRange(4, uri.Segments.Length - 4); + BranchName = Uri.UnescapeDataString(string.Concat(branchSegs)); } else - switch (uri.Segments.Length) - { - case 3: // ie http://speckle.server/streams/8fecc9aa6d - if (uri.Segments[1].ToLowerInvariant() == "streams/") - StreamId = uri.Segments[2].Replace("/", ""); - else - throw new SpeckleException($"Cannot parse {uri} into a stream wrapper class."); + { + BranchName = Uri.UnescapeDataString(uri.Segments[4]); + } + } + else + { + switch (uri.Segments.Length) + { + case 3: // ie http://speckle.server/streams/8fecc9aa6d + if (uri.Segments[1].ToLowerInvariant() == "streams/") + StreamId = uri.Segments[2].Replace("/", ""); + else + throw new SpeckleException($"Cannot parse {uri} into a stream wrapper class."); + + break; + case 4: // ie https://speckle.server/streams/0c6ad366c4/globals/ + if (uri.Segments[3].ToLowerInvariant().StartsWith("globals")) + { + StreamId = uri.Segments[2].Replace("/", ""); + BranchName = Uri.UnescapeDataString(uri.Segments[3].Replace("/", "")); + } + else + { + throw new SpeckleException($"Cannot parse {uri} into a stream wrapper class"); + } - break; - case 4: // ie https://speckle.server/streams/0c6ad366c4/globals/ - if (uri.Segments[3].ToLowerInvariant().StartsWith("globals")) - { + break; + case 5: // ie http://speckle.server/streams/8fecc9aa6d/commits/76a23d7179 + switch (uri.Segments[3].ToLowerInvariant()) + { + // NOTE: this is a good practice reminder on how it should work + case "commits/": + StreamId = uri.Segments[2].Replace("/", ""); + CommitId = uri.Segments[4].Replace("/", ""); + break; + case "globals/": StreamId = uri.Segments[2].Replace("/", ""); BranchName = Uri.UnescapeDataString(uri.Segments[3].Replace("/", "")); - } - else - throw new SpeckleException($"Cannot parse {uri} into a stream wrapper class"); - - break; - case 5: // ie http://speckle.server/streams/8fecc9aa6d/commits/76a23d7179 - switch (uri.Segments[3].ToLowerInvariant()) - { - // NOTE: this is a good practice reminder on how it should work - case "commits/": - StreamId = uri.Segments[2].Replace("/", ""); - CommitId = uri.Segments[4].Replace("/", ""); - break; - case "globals/": - StreamId = uri.Segments[2].Replace("/", ""); - BranchName = Uri.UnescapeDataString(uri.Segments[3].Replace("/", "")); - CommitId = uri.Segments[4].Replace("/", ""); - break; - case "branches/": - StreamId = uri.Segments[2].Replace("/", ""); - BranchName = Uri.UnescapeDataString(uri.Segments[4].Replace("/", "")); - break; - case "objects/": - StreamId = uri.Segments[2].Replace("/", ""); - ObjectId = uri.Segments[4].Replace("/", ""); - break; - default: - throw new SpeckleException($"Cannot parse {uri} into a stream wrapper class."); - } - - break; - - default: - throw new SpeckleException($"Cannot parse {uri} into a stream wrapper class."); - } + CommitId = uri.Segments[4].Replace("/", ""); + break; + case "branches/": + StreamId = uri.Segments[2].Replace("/", ""); + BranchName = Uri.UnescapeDataString(uri.Segments[4].Replace("/", "")); + break; + case "objects/": + StreamId = uri.Segments[2].Replace("/", ""); + ObjectId = uri.Segments[4].Replace("/", ""); + break; + default: + throw new SpeckleException($"Cannot parse {uri} into a stream wrapper class."); + } - var queryDictionary = System.Web.HttpUtility.ParseQueryString(uri.Query); - UserId = queryDictionary["u"]; + break; + default: + throw new SpeckleException($"Cannot parse {uri} into a stream wrapper class."); + } } - private Account _Account; + var queryDictionary = HttpUtility.ParseQueryString(uri.Query); + UserId = queryDictionary["u"]; + } - /// - /// Gets a valid account for this stream wrapper. - /// Note: this method ensures that the stream exists and/or that the user has an account which has access to that stream. If used in a sync manner, make sure it's not blocking. - /// - /// Throws exception if account fetching failed. This could be due to non-existent account or stream. - /// The valid account object for this stream. - public async Task GetAccount() - { - Exception err = null; + /// + /// Gets a valid account for this stream wrapper. + /// Note: this method ensures that the stream exists and/or that the user has an account which has access to that stream. If used in a sync manner, make sure it's not blocking. + /// + /// Throws exception if account fetching failed. This could be due to non-existent account or stream. + /// The valid account object for this stream. + public async Task GetAccount() + { + Exception err = null; - if (_Account != null) - { - return _Account; - } + if (_Account != null) + return _Account; - // Step 1: check if direct account id (?u=) - if (OriginalInput != null && OriginalInput.Contains("?u=")) + // Step 1: check if direct account id (?u=) + if (OriginalInput != null && OriginalInput.Contains("?u=")) + { + var userId = OriginalInput.Split(new[] { "?u=" }, StringSplitOptions.None)[1]; + var acc = AccountManager.GetAccounts().FirstOrDefault(acc => acc.userInfo.id == userId); + if (acc != null) { - var userId = OriginalInput.Split(new string[] { "?u=" }, StringSplitOptions.None)[1]; - var acc = AccountManager.GetAccounts().FirstOrDefault(acc => acc.userInfo.id == userId); - if (acc != null) - { - try - { - await ValidateWithAccount(acc); - _Account = acc; - return acc; - } - catch (Exception e) - { - // If user specified account and fails, we should stop trying. - throw e; - } - } + await ValidateWithAccount(acc).ConfigureAwait(false); + _Account = acc; + return acc; } + } + + // Step 2: check the default + var defAcc = AccountManager.GetDefaultAccount(); + try + { + await ValidateWithAccount(defAcc).ConfigureAwait(false); + _Account = defAcc; + return defAcc; + } + catch (Exception e) + { + err = e; + } + + // Step 3: all the rest + var accs = AccountManager.GetAccounts(ServerUrl); + if (accs.Count() == 0) + throw new SpeckleException($"You don't have any accounts for {ServerUrl}."); - // Step 2: check the default - var defAcc = AccountManager.GetDefaultAccount(); + foreach (var acc in accs) try { - await ValidateWithAccount(defAcc); - _Account = defAcc; - return defAcc; + await ValidateWithAccount(acc).ConfigureAwait(false); + _Account = acc; + return acc; } catch (Exception e) { err = e; } - // Step 3: all the rest - var accs = AccountManager.GetAccounts(ServerUrl); - if (accs.Count() == 0) - { - throw new SpeckleException($"You don't have any accounts for {ServerUrl}."); - } + throw err; + } - foreach (var acc in accs) - { - try - { - await ValidateWithAccount(acc); - _Account = acc; - return acc; - } - catch (Exception e) - { - err = e; - } - } + public void SetAccount(Account acc) + { + _Account = acc; + UserId = _Account.userInfo.id; + } - throw err; - } + public bool Equals(StreamWrapper wrapper) + { + if (wrapper == null) + return false; + if (Type != wrapper.Type) + return false; + return Type == wrapper.Type + && ServerUrl == wrapper.ServerUrl + && UserId == wrapper.UserId + && StreamId == wrapper.StreamId + && Type == StreamWrapperType.Branch + && BranchName == wrapper.BranchName + || Type == StreamWrapperType.Object && ObjectId == wrapper.ObjectId + || Type == StreamWrapperType.Commit && CommitId == wrapper.CommitId; + } - public void SetAccount(Account acc) + public async Task ValidateWithAccount(Account acc) + { + if (ServerUrl != acc.serverInfo.url) + throw new SpeckleException($"Account is not from server {ServerUrl}", false); + + var hasInternet = await Http.UserHasInternet().ConfigureAwait(false); + if (!hasInternet) + throw new Exception("You are not connected to the internet."); + + var client = new Client(acc); + // First check if the stream exists + try { - _Account = acc; - UserId = _Account.userInfo.id; + await client.StreamGet(StreamId).ConfigureAwait(false); } - public bool Equals(StreamWrapper wrapper) + catch { - if (wrapper == null) return false; - if (Type != wrapper.Type) return false; - return Type == wrapper.Type && - ServerUrl == wrapper.ServerUrl && - UserId == wrapper.UserId && - StreamId == wrapper.StreamId && - (Type == StreamWrapperType.Branch && BranchName == wrapper.BranchName) || - (Type == StreamWrapperType.Object && ObjectId == wrapper.ObjectId) || - (Type == StreamWrapperType.Commit && CommitId == wrapper.CommitId); + throw new SpeckleException( + $"You don't have access to stream {StreamId} on server {ServerUrl}, or the stream does not exist.", + false + ); } - public async Task ValidateWithAccount(Account acc) + // Check if the branch exists + if (Type == StreamWrapperType.Branch) { - if (ServerUrl != acc.serverInfo.url) - throw new SpeckleException($"Account is not from server {ServerUrl}", false); - - var hasInternet = await Http.UserHasInternet(); - if (!hasInternet) - { - throw new Exception("You are not connected to the internet."); - } - - var client = new Client(acc); - // First check if the stream exists - try - { - await client.StreamGet(StreamId).ConfigureAwait(false); - } - catch - { + var branch = await client.BranchGet(StreamId, BranchName, 1).ConfigureAwait(false); + if (branch == null) throw new SpeckleException( - $"You don't have access to stream {StreamId} on server {ServerUrl}, or the stream does not exist.", false); - } - - // Check if the branch exists - if (Type == StreamWrapperType.Branch) - { - var branch = await client.BranchGet(StreamId, BranchName, 1).ConfigureAwait(false); - if (branch == null) - throw new SpeckleException( - $"The branch with name '{BranchName}' doesn't exist in stream {StreamId} on server {ServerUrl}", false); - } + $"The branch with name '{BranchName}' doesn't exist in stream {StreamId} on server {ServerUrl}", + false + ); } + } - public override string ToString() + public override string ToString() + { + var url = $"{ServerUrl}/streams/{StreamId}"; + switch (Type) { - var url = $"{ServerUrl}/streams/{StreamId}"; - switch (Type) - { - case StreamWrapperType.Commit: - url += $"/commits/{CommitId}"; - break; - case StreamWrapperType.Branch: - url += $"/branches/{BranchName}"; - break; - case StreamWrapperType.Object: - url += $"/objects/{ObjectId}"; - break; - } - - var acc = $"{(UserId != null ? "?u=" + UserId : "")}"; - return url + acc; + case StreamWrapperType.Commit: + url += $"/commits/{CommitId}"; + break; + case StreamWrapperType.Branch: + url += $"/branches/{BranchName}"; + break; + case StreamWrapperType.Object: + url += $"/objects/{ObjectId}"; + break; } - } - public enum StreamWrapperType - { - Undefined, - Stream, - Commit, - Branch, - Object + var acc = $"{(UserId != null ? "?u=" + UserId : "")}"; + return url + acc; } } + +public enum StreamWrapperType +{ + Undefined, + Stream, + Commit, + Branch, + Object +} diff --git a/Core/Core/Helpers/Crypt.cs b/Core/Core/Helpers/Crypt.cs index 3539ebc4a7..ad331307b1 100644 --- a/Core/Core/Helpers/Crypt.cs +++ b/Core/Core/Helpers/Crypt.cs @@ -1,23 +1,21 @@ +using System.Security.Cryptography; using System.Text; -namespace Speckle.Core.Helpers +namespace Speckle.Core.Helpers; + +public static class Crypt { - public static class Crypt + public static string Hash(string input) { - public static string Hash(string input) + using (MD5 md5 = MD5.Create()) { - using (System.Security.Cryptography.MD5 md5 = System.Security.Cryptography.MD5.Create()) - { - byte[] inputBytes = System.Text.Encoding.ASCII.GetBytes(input.ToLowerInvariant()); - byte[] hashBytes = md5.ComputeHash(inputBytes); + byte[] inputBytes = Encoding.ASCII.GetBytes(input.ToLowerInvariant()); + byte[] hashBytes = md5.ComputeHash(inputBytes); - StringBuilder sb = new StringBuilder(); - for (int i = 0; i < hashBytes.Length; i++) - { - sb.Append(hashBytes[i].ToString("X2")); - } - return sb.ToString(); - } + StringBuilder sb = new(); + for (int i = 0; i < hashBytes.Length; i++) + sb.Append(hashBytes[i].ToString("X2")); + return sb.ToString(); } } } diff --git a/Core/Core/Helpers/Http.cs b/Core/Core/Helpers/Http.cs index d09b36bdda..e5c98cd0c2 100644 --- a/Core/Core/Helpers/Http.cs +++ b/Core/Core/Helpers/Http.cs @@ -1,4 +1,4 @@ -# nullable enable +# nullable enable using System; using System.Collections.Generic; @@ -17,214 +17,225 @@ using Speckle.Core.Credentials; using Speckle.Core.Logging; -namespace Speckle.Core.Helpers +namespace Speckle.Core.Helpers; + +public static class Http { - public static class Http - { - public static IEnumerable DefaultDelay() => - Backoff.DecorrelatedJitterBackoffV2( - medianFirstRetryDelay: TimeSpan.FromMilliseconds(100), - retryCount: 5 - ); + /// + /// Policy for retrying failing Http requests + /// + [Obsolete( + "All http requests are now retried by the client provided in the GetHttpProxyClient method, there is no need to add retries on top", + true + )] + public static Policy HttpRetryPolicy = Policy + .Handle() + .OrResult(r => r.Equals(false)) + .WaitAndRetry( + DefaultDelay(), + (exception, timeSpan, retryAttempt, context) => + { + SpeckleLog.Logger.Information("Retrying #{retryAttempt}...", retryAttempt); + } + ); + + /// + /// Policy for retrying failing Http requests + /// + [Obsolete( + "All http requests are now retried by the client provided in the GetHttpProxyClient method, there is no need to add retries on top", + true + )] + public static AsyncPolicy HttpRetryAsyncPolicy = Policy + .Handle() + .OrResult(r => r.Equals(false)) + .WaitAndRetryAsync( + DefaultDelay(), + (exception, timeSpan, retryAttempt, context) => + { + SpeckleLog.Logger.Information("Retrying #{retryAttempt}...", retryAttempt); + } + ); - /// - /// Policy for retrying failing Http requests - /// - [Obsolete( - "All http requests are now retried by the client provided in the GetHttpProxyClient method, there is no need to add retries on top", - true - )] - public static Policy HttpRetryPolicy = Policy - .Handle() - .OrResult(r => r.Equals(false)) - .WaitAndRetry( - DefaultDelay(), - (exception, timeSpan, retryAttempt, context) => - { - SpeckleLog.Logger.Information("Retrying #{retryAttempt}...", retryAttempt); - } - ); + public static IEnumerable DefaultDelay() + { + return Backoff.DecorrelatedJitterBackoffV2(TimeSpan.FromMilliseconds(100), 5); + } - /// - /// Policy for retrying failing Http requests - /// - [Obsolete( - "All http requests are now retried by the client provided in the GetHttpProxyClient method, there is no need to add retries on top", - true - )] - public static AsyncPolicy HttpRetryAsyncPolicy = Policy - .Handle() - .OrResult(r => r.Equals(false)) + public static AsyncRetryPolicy HttpAsyncPolicy( + IEnumerable? delay = null + ) + { + return HttpPolicyExtensions + .HandleTransientHttpError() .WaitAndRetryAsync( - DefaultDelay(), - (exception, timeSpan, retryAttempt, context) => - { - SpeckleLog.Logger.Information("Retrying #{retryAttempt}...", retryAttempt); + delay ?? DefaultDelay(), + (ex, timeSpan, retryAttempt, context) => { + //context.Remove("retryCount"); + //context.Add("retryCount", retryAttempt); + //Log.Information( + // ex.Exception, + // "The http request failed with {exceptionType} exception retrying after {cooldown} milliseconds. This is retry attempt {retryAttempt}", + // ex.GetType().Name, + // timeSpan.TotalSeconds * 1000, + // retryAttempt + //); } ); + } - public static AsyncRetryPolicy HttpAsyncPolicy( - IEnumerable? delay = null - ) => - HttpPolicyExtensions - .HandleTransientHttpError() - .WaitAndRetryAsync( - delay ?? DefaultDelay(), - onRetry: (ex, timeSpan, retryAttempt, context) => - { - //context.Remove("retryCount"); - //context.Add("retryCount", retryAttempt); - //Log.Information( - // ex.Exception, - // "The http request failed with {exceptionType} exception retrying after {cooldown} milliseconds. This is retry attempt {retryAttempt}", - // ex.GetType().Name, - // timeSpan.TotalSeconds * 1000, - // retryAttempt - //); - } - ); - - /// - /// Checks if the user has a valid internet connection by first pinging cloudfare (fast) - /// and then trying get from the default Speckle server (slower) - /// Each check is retried 3 times - /// - /// True if the user is connected to the internet, false otherwise. - public static async Task UserHasInternet() - { - //can ping cloudfare, skip further checks - //this method should be the fastest - if (await Ping("1.1.1.1")) - return true; + /// + /// Checks if the user has a valid internet connection by first pinging cloudfare (fast) + /// and then trying get from the default Speckle server (slower) + /// Each check is retried 3 times + /// + /// True if the user is connected to the internet, false otherwise. + public static async Task UserHasInternet() + { + //can ping cloudfare, skip further checks + //this method should be the fastest + if (await Ping("1.1.1.1").ConfigureAwait(false)) + return true; - //lastly, try getting the default Speckle server, in case this is a sandboxed environment - string defaultServer = AccountManager.GetDefaultServerUrl(); - bool hasInternet = await HttpPing(defaultServer); + //lastly, try getting the default Speckle server, in case this is a sandboxed environment + string defaultServer = AccountManager.GetDefaultServerUrl(); + bool hasInternet = await HttpPing(defaultServer).ConfigureAwait(false); - if (!hasInternet) - SpeckleLog.Logger.ForContext("defaultServer", defaultServer).Warning("Failed to ping internet"); + if (!hasInternet) + SpeckleLog.Logger + .ForContext("defaultServer", defaultServer) + .Warning("Failed to ping internet"); - return hasInternet; - } + return hasInternet; + } - /// - /// Pings a specific url to verify it's accessible. Retries 3 times. - /// - /// The hostname or address to ping. - /// True if the the status code is 200, false otherwise. - public static async Task Ping(string hostnameOrAddress) - { - SpeckleLog.Logger.Information("Pinging {hostnameOrAddress}", hostnameOrAddress); - var policy = Policy - .Handle() - .Or() - .WaitAndRetryAsync( - DefaultDelay(), - (ex, timeSpan, retryAttempt, context) => - { - //Log.Information( - // ex, - // "The http request failed with {exceptionType} exception retrying after {cooldown} milliseconds. This is retry attempt {retryAttempt}", - // ex.GetType().Name, - // timeSpan.TotalSeconds * 1000, - // retryAttempt - //); - } - ); - var policyResult = await policy.ExecuteAndCaptureAsync(async () => + /// + /// Pings a specific url to verify it's accessible. Retries 3 times. + /// + /// The hostname or address to ping. + /// True if the the status code is 200, false otherwise. + public static async Task Ping(string hostnameOrAddress) + { + SpeckleLog.Logger.Information("Pinging {hostnameOrAddress}", hostnameOrAddress); + var policy = Policy + .Handle() + .Or() + .WaitAndRetryAsync( + DefaultDelay(), + (ex, timeSpan, retryAttempt, context) => { + //Log.Information( + // ex, + // "The http request failed with {exceptionType} exception retrying after {cooldown} milliseconds. This is retry attempt {retryAttempt}", + // ex.GetType().Name, + // timeSpan.TotalSeconds * 1000, + // retryAttempt + //); + } + ); + var policyResult = await policy + .ExecuteAndCaptureAsync(async () => { - Ping myPing = new Ping(); + Ping myPing = new(); var hostname = - (Uri.CheckHostName(hostnameOrAddress) != UriHostNameType.Unknown) + Uri.CheckHostName(hostnameOrAddress) != UriHostNameType.Unknown ? hostnameOrAddress - : (new Uri(hostnameOrAddress)).DnsSafeHost; + : new Uri(hostnameOrAddress).DnsSafeHost; byte[] buffer = new byte[32]; int timeout = 1000; - PingOptions pingOptions = new PingOptions(); - PingReply reply = await myPing.SendPingAsync(hostname, timeout, buffer, pingOptions); + PingOptions pingOptions = new(); + PingReply reply = await myPing + .SendPingAsync(hostname, timeout, buffer, pingOptions) + .ConfigureAwait(false); if (reply.Status != IPStatus.Success) throw new Exception($"The ping operation failed with status {reply.Status}"); return true; - }); - if (policyResult.Outcome == OutcomeType.Successful) - return true; - SpeckleLog.Logger.Warning( - policyResult.FinalException, - "Failed to ping {hostnameOrAddress} cause: {exceptionMessage}", - policyResult.FinalException.Message - ); - return false; - } + }) + .ConfigureAwait(false); + if (policyResult.Outcome == OutcomeType.Successful) + return true; + SpeckleLog.Logger.Warning( + policyResult.FinalException, + "Failed to ping {hostnameOrAddress} cause: {exceptionMessage}", + policyResult.FinalException.Message + ); + return false; + } - /// - /// Pings and tries gettign data from a specific address to verify it's online. Retries 3 times. - /// - /// The address to use - /// True if the the status code is successful, false otherwise. - public static async Task HttpPing(string address) + /// + /// Pings and tries gettign data from a specific address to verify it's online. Retries 3 times. + /// + /// The address to use + /// True if the the status code is successful, false otherwise. + public static async Task HttpPing(string address) + { + SpeckleLog.Logger.Information("HttpPinging {address}", address); + try { - SpeckleLog.Logger.Information("HttpPinging {address}", address); - try - { - var _httpClient = GetHttpProxyClient(); - var response = await _httpClient.GetAsync(address); - return response.IsSuccessStatusCode; - } - catch (Exception ex) - { - SpeckleLog.Logger.Warning(ex, "Exception while pinging: {message}", ex.Message); - return false; - } + var _httpClient = GetHttpProxyClient(); + var response = await _httpClient.GetAsync(address).ConfigureAwait(false); + return response.IsSuccessStatusCode; } - - public static HttpClient GetHttpProxyClient(SpeckleHttpClientHandler? handler = null) + catch (Exception ex) { - IWebProxy proxy = WebRequest.GetSystemWebProxy(); - proxy.Credentials = System.Net.CredentialCache.DefaultCredentials; - - return new HttpClient(handler ?? new SpeckleHttpClientHandler()); + SpeckleLog.Logger.Warning(ex, "Exception while pinging: {message}", ex.Message); + return false; } } - public class SpeckleHttpClientHandler : HttpClientHandler + public static HttpClient GetHttpProxyClient(SpeckleHttpClientHandler? handler = null) { - private IEnumerable _delay; + IWebProxy proxy = WebRequest.GetSystemWebProxy(); + proxy.Credentials = CredentialCache.DefaultCredentials; - public SpeckleHttpClientHandler(IEnumerable? delay = null) : base() - { - _delay = delay ?? Http.DefaultDelay(); - } + return new HttpClient(handler ?? new SpeckleHttpClientHandler()); + } +} - protected override async Task SendAsync( - HttpRequestMessage request, - CancellationToken cancellationToken - ) +public class SpeckleHttpClientHandler : HttpClientHandler +{ + private IEnumerable _delay; + + public SpeckleHttpClientHandler(IEnumerable? delay = null) + : base() + { + _delay = delay ?? Http.DefaultDelay(); + } + + protected override async Task SendAsync( + HttpRequestMessage request, + CancellationToken cancellationToken + ) + { + // this is a preliminary client server correlation implementation + // refactor this, when we have a better observability stack + var context = new Context(); + using (LogContext.PushProperty("correlationId", context.CorrelationId)) + using (LogContext.PushProperty("targetUrl", request.RequestUri)) + using (LogContext.PushProperty("httpMethod", request.Method)) { - // this is a preliminary client server correlation implementation - // refactor this, when we have a better observability stack - var context = new Context(); - using (LogContext.PushProperty("correlationId", context.CorrelationId)) - using (LogContext.PushProperty("targetUrl", request.RequestUri)) - using (LogContext.PushProperty("httpMethod", request.Method)) - { - SpeckleLog.Logger.Debug("Starting execution of http request to {targetUrl}", request.RequestUri); - var timer = new Stopwatch(); - timer.Start(); - context.Add("retryCount", 0); - var policyResult = await Http.HttpAsyncPolicy(_delay) - .ExecuteAndCaptureAsync( - ctx => - { - request.Headers.Add("x-request-id", ctx.CorrelationId.ToString()); - return base.SendAsync(request, cancellationToken); - }, - context - ); - timer.Stop(); - var status = policyResult.Outcome == OutcomeType.Successful ? "succeeded" : "failed"; - context.TryGetValue("retryCount", out var retryCount); - SpeckleLog.Logger.ForContext("ExceptionType", policyResult.FinalException?.GetType()) - .Information( + SpeckleLog.Logger.Debug( + "Starting execution of http request to {targetUrl}", + request.RequestUri + ); + var timer = new Stopwatch(); + timer.Start(); + context.Add("retryCount", 0); + var policyResult = await Http.HttpAsyncPolicy(_delay) + .ExecuteAndCaptureAsync( + ctx => + { + request.Headers.Add("x-request-id", ctx.CorrelationId.ToString()); + return base.SendAsync(request, cancellationToken); + }, + context + ) + .ConfigureAwait(false); + timer.Stop(); + var status = policyResult.Outcome == OutcomeType.Successful ? "succeeded" : "failed"; + context.TryGetValue("retryCount", out var retryCount); + SpeckleLog.Logger + .ForContext("ExceptionType", policyResult.FinalException?.GetType()) + .Information( "Execution of http request to {httpScheme}://{hostUrl}/{relativeUrl} {resultStatus} with {httpStatusCode} after {elapsed} seconds and {retryCount} retries", request.RequestUri.Scheme, request.RequestUri.Host, @@ -234,12 +245,11 @@ CancellationToken cancellationToken timer.Elapsed.TotalSeconds, retryCount ?? 0 ); - if (policyResult.Outcome == OutcomeType.Successful) - return policyResult.Result!; + if (policyResult.Outcome == OutcomeType.Successful) + return policyResult.Result!; - // should we wrap this exception into something Speckle specific? - throw policyResult.FinalException!; - } + // should we wrap this exception into something Speckle specific? + throw policyResult.FinalException!; } } } diff --git a/Core/Core/Helpers/Path.cs b/Core/Core/Helpers/Path.cs index 50de2e4aea..84e4707563 100644 --- a/Core/Core/Helpers/Path.cs +++ b/Core/Core/Helpers/Path.cs @@ -1,176 +1,175 @@ -# nullable enable +# nullable enable using System; using System.IO; using System.Reflection; -namespace Speckle.Core.Helpers +namespace Speckle.Core.Helpers; + +/// +/// Helper class dedicated for Speckle specific Path operations. +/// +public static class SpecklePathProvider { + private static string _applicationName = "Speckle"; + + private static string _blobFolderName = "Blobs"; + + private static string _kitsFolderName = "Kits"; + + private static string _accountsFolderName = "Accounts"; + + private static string _objectsFolderName = "Objects"; + + private static string _logFolderName = "Logs"; + + private static string _userDataPathEnvVar => "SPECKLE_USERDATA_PATH"; + private static string? _path => Environment.GetEnvironmentVariable(_userDataPathEnvVar); + /// - /// Helper class dedicated for Speckle specific Path operations. + /// Get the installation path. /// - public static class SpecklePathProvider - { - private static string _applicationName = "Speckle"; + public static string InstallApplicationDataPath => + Assembly.GetAssembly(typeof(SpecklePathProvider)).Location.Contains("ProgramData") + ? Environment.GetFolderPath(Environment.SpecialFolder.CommonApplicationData) + : UserApplicationDataPath(); - /// - /// Override the global Speckle application name. - /// - /// - public static void OverrideApplicationName(string applicationName) - { - _applicationName = applicationName; - } + /// + /// Get the path where the Speckle applications should be installed + /// + public static string InstallSpeckleFolderPath => + EnsureFolderExists(InstallApplicationDataPath, _applicationName); - private static string _userDataPathEnvVar => "SPECKLE_USERDATA_PATH"; - private static string? _path => Environment.GetEnvironmentVariable(_userDataPathEnvVar); + /// + /// Get the folder where the user's Speckle data should be stored. + /// + public static string UserSpeckleFolderPath => + EnsureFolderExists(UserApplicationDataPath(), _applicationName); - /// - /// Override the global Speckle application data path. - /// - public static void OverrideApplicationDataPath(string? path) - { - Environment.SetEnvironmentVariable(_userDataPathEnvVar, path); - } + /// + /// Get the folder where the Speckle kits should be stored. + /// + public static string KitsFolderPath => + EnsureFolderExists(InstallSpeckleFolderPath, _kitsFolderName); - private static string _blobFolderName = "Blobs"; + /// + /// + /// + public static string ObjectsFolderPath => EnsureFolderExists(KitsFolderPath, _objectsFolderName); - /// - /// Override the global Blob storage folder name. - /// - public static void OverrideBlobStorageFolder(string blobFolderName) - { - _blobFolderName = blobFolderName; - } + /// + /// Get the folder where the Speckle accounts data should be stored. + /// + public static string AccountsFolderPath => + EnsureFolderExists(UserSpeckleFolderPath, _accountsFolderName); - private static string _kitsFolderName = "Kits"; + /// + /// Override the global Speckle application name. + /// + /// + public static void OverrideApplicationName(string applicationName) + { + _applicationName = applicationName; + } - /// - /// Override the global Kits folder name. - /// - public static void OverrideKitsFolderName(string kitsFolderName) - { - _kitsFolderName = kitsFolderName; - } + /// + /// Override the global Speckle application data path. + /// + public static void OverrideApplicationDataPath(string? path) + { + Environment.SetEnvironmentVariable(_userDataPathEnvVar, path); + } - private static string _accountsFolderName = "Accounts"; + /// + /// Override the global Blob storage folder name. + /// + public static void OverrideBlobStorageFolder(string blobFolderName) + { + _blobFolderName = blobFolderName; + } - /// - /// Override the global Accounts folder name. - /// - public static void OverrideAccountsFolderName(string accountsFolderName) - { - _accountsFolderName = accountsFolderName; - } + /// + /// Override the global Kits folder name. + /// + public static void OverrideKitsFolderName(string kitsFolderName) + { + _kitsFolderName = kitsFolderName; + } - /// - /// - /// - public static void OverrideObjectsFolderName(string objectsFolderName) - { - _objectsFolderName = objectsFolderName; - } + /// + /// Override the global Accounts folder name. + /// + public static void OverrideAccountsFolderName(string accountsFolderName) + { + _accountsFolderName = accountsFolderName; + } - private static string _objectsFolderName = "Objects"; + /// + /// + /// + public static void OverrideObjectsFolderName(string objectsFolderName) + { + _objectsFolderName = objectsFolderName; + } - /// - /// Get the platform specific user configuration folder path. - /// - public static string UserApplicationDataPath() + /// + /// Get the platform specific user configuration folder path. + /// + public static string UserApplicationDataPath() + { + // if we have an override, just return that + var pathOverride = _path; + if (pathOverride != null && !string.IsNullOrEmpty(pathOverride)) + return pathOverride; + + // on desktop linux and macos we use the appdata. + // but we might not have write access to the disk + // so the catch falls back to the user profile + try { - // if we have an override, just return that - var pathOverride = _path; - if (pathOverride != null && !string.IsNullOrEmpty(pathOverride)) - return pathOverride; - - // on desktop linux and macos we use the appdata. - // but we might not have write access to the disk - // so the catch falls back to the user profile - try - { - return Environment.GetFolderPath( - Environment.SpecialFolder.ApplicationData, - // if the folder doesn't exist, we get back an empty string on OSX, - // which in turn, breaks other stuff down the line. - // passing in the Create option ensures that this directory exists, - // which is not a given on all OS-es. - Environment.SpecialFolderOption.Create - ); - } - catch - { - // on server linux, there might not be a user setup, things can run under root - // in that case, the appdata variable is most probably not set up - // we fall back to the value of the home folder - return Environment.GetFolderPath(Environment.SpecialFolder.UserProfile); - } + return Environment.GetFolderPath( + Environment.SpecialFolder.ApplicationData, + // if the folder doesn't exist, we get back an empty string on OSX, + // which in turn, breaks other stuff down the line. + // passing in the Create option ensures that this directory exists, + // which is not a given on all OS-es. + Environment.SpecialFolderOption.Create + ); } - - /// - /// Get the installation path. - /// - public static string InstallApplicationDataPath => - Assembly.GetAssembly(typeof(SpecklePathProvider)).Location.Contains("ProgramData") - ? Environment.GetFolderPath(Environment.SpecialFolder.CommonApplicationData) - : UserApplicationDataPath(); - - /// - /// Get the path where the Speckle applications should be installed - /// - public static string InstallSpeckleFolderPath => - EnsureFolderExists(InstallApplicationDataPath, _applicationName); - - /// - /// Get the folder where the user's Speckle data should be stored. - /// - public static string UserSpeckleFolderPath => - EnsureFolderExists(UserApplicationDataPath(), _applicationName); - - /// - /// Get the folder where the user's Speckle blobs should be stored. - /// - public static string BlobStoragePath(string? path = null) => - EnsureFolderExists(path ?? UserSpeckleFolderPath, _blobFolderName); - - /// - /// Get the folder where the Speckle kits should be stored. - /// - public static string KitsFolderPath => - EnsureFolderExists(InstallSpeckleFolderPath, _kitsFolderName); - - /// - /// - /// - public static string ObjectsFolderPath => - EnsureFolderExists(KitsFolderPath, _objectsFolderName); - - /// - /// Get the folder where the Speckle accounts data should be stored. - /// - public static string AccountsFolderPath => - EnsureFolderExists(UserSpeckleFolderPath, _accountsFolderName); - - private static string EnsureFolderExists(string basePath, string folderName) + catch { - var path = Path.Combine(basePath, folderName); - Directory.CreateDirectory(path); - return path; + // on server linux, there might not be a user setup, things can run under root + // in that case, the appdata variable is most probably not set up + // we fall back to the value of the home folder + return Environment.GetFolderPath(Environment.SpecialFolder.UserProfile); } + } - private static string _logFolderName = "Logs"; - - /// - /// Get the folder where the Speckle logs should be stored. - /// - /// Name of the application using this SDK ie.: "Rhino" - /// Public version slug of the application using this SDK ie.: "2023" - public static string LogFolderPath( - string hostApplicationName, - string? hostApplicationVersion - ) => - EnsureFolderExists( - EnsureFolderExists(UserSpeckleFolderPath, _logFolderName), - $"{hostApplicationName}{hostApplicationVersion ?? ""}" - ); + /// + /// Get the folder where the user's Speckle blobs should be stored. + /// + public static string BlobStoragePath(string? path = null) + { + return EnsureFolderExists(path ?? UserSpeckleFolderPath, _blobFolderName); + } + + private static string EnsureFolderExists(string basePath, string folderName) + { + var path = Path.Combine(basePath, folderName); + Directory.CreateDirectory(path); + return path; + } + + /// + /// Get the folder where the Speckle logs should be stored. + /// + /// Name of the application using this SDK ie.: "Rhino" + /// Public version slug of the application using this SDK ie.: "2023" + public static string LogFolderPath(string hostApplicationName, string? hostApplicationVersion) + { + return EnsureFolderExists( + EnsureFolderExists(UserSpeckleFolderPath, _logFolderName), + $"{hostApplicationName}{hostApplicationVersion ?? ""}" + ); } } diff --git a/Core/Core/Kits/Applications.cs b/Core/Core/Kits/Applications.cs index bd51365dd2..a0aff535bc 100644 --- a/Core/Core/Kits/Applications.cs +++ b/Core/Core/Kits/Applications.cs @@ -1,138 +1,160 @@ -using System; +namespace Speckle.Core.Kits; -namespace Speckle.Core.Kits +public enum HostAppVersion { + v, + v6, + v7, + v2019, + v2020, + v2021, + v2022, + v2023, + v2024, + v2025, + vSandbox, + vRevit, + vRevit2021, + vRevit2022, + vRevit2023, + vRevit2024, + vRevit2025, + v25, + v26 +} - - public enum HostAppVersion +public class HostApplication +{ + public HostApplication(string name, string slug) { - v, - v6, - v7, - v2019, - v2020, - v2021, - v2022, - v2023, - v2024, - v2025, - vSandbox, - vRevit, - vRevit2021, - vRevit2022, - vRevit2023, - vRevit2024, - vRevit2025, - v25, - v26 - + Name = name; + Slug = slug; } + public string Name { get; private set; } + public string Slug { get; private set; } - public class HostApplication + /// + /// Returns the versioned app name given a specific version + /// + /// + /// + public string GetVersion(HostAppVersion version) { - public string Name { get; private set; } - public string Slug { get; private set; } - - public HostApplication(string name, string slug) - { - Name = name; - Slug = slug; - } - - /// - /// Returns the versioned app name given a specific version - /// - /// - /// - public string GetVersion(HostAppVersion version) - { - return Name.Replace(" ", "") + version.ToString().TrimStart('v'); - } + return Name.Replace(" ", "") + version.ToString().TrimStart('v'); } +} +/// +/// List of Host Applications - their slugs should match our ghost tags and ci/cd slugs +/// +public static class HostApplications +{ + public static HostApplication Rhino = new("Rhino", "rhino"); + public static HostApplication Grasshopper = new("Grasshopper", "grasshopper"); + public static HostApplication Revit = new("Revit", "revit"); + public static HostApplication Dynamo = new("Dynamo", "dynamo"); + public static HostApplication Unity = new("Unity", "unity"); + public static HostApplication GSA = new("GSA", "gsa"); + public static HostApplication Civil = new("Civil 3D", "civil3d"); + public static HostApplication AutoCAD = new("AutoCAD", "autocad"); + public static HostApplication MicroStation = new("MicroStation", "microstation"); + public static HostApplication OpenRoads = new("OpenRoads", "openroads"); + public static HostApplication OpenRail = new("OpenRail", "openrail"); + public static HostApplication OpenBuildings = new("OpenBuildings", "openbuildings"); + public static HostApplication ETABS = new("ETABS", "etabs"); + public static HostApplication SAP2000 = new("SAP2000", "sap2000"); + public static HostApplication CSiBridge = new("CSiBridge", "csibridge"); + public static HostApplication SAFE = new("SAFE", "safe"); + public static HostApplication TeklaStructures = new("Tekla Structures", "teklastructures"); + public static HostApplication Dxf = new("DXF Converter", "dxf"); + public static HostApplication Excel = new("Excel", "excel"); + public static HostApplication Unreal = new("Unreal", "unreal"); + public static HostApplication PowerBI = new("Power BI", "powerbi"); + public static HostApplication Blender = new("Blender", "blender"); + public static HostApplication QGIS = new("QGIS", "qgis"); + public static HostApplication ArcGIS = new("ArcGIS", "arcgis"); + public static HostApplication SketchUp = new("SketchUp", "sketchup"); + public static HostApplication Archicad = new("Archicad", "archicad"); + public static HostApplication TopSolid = new("TopSolid", "topsolid"); + public static HostApplication Python = new("Python", "python"); + public static HostApplication NET = new(".NET", "net"); + public static HostApplication Navisworks = new("Navisworks", "navisworks"); + public static HostApplication AdvanceSteel = new("Advance Steel", "advancesteel"); + public static HostApplication Other = new("Other", "other"); /// - /// List of Host Applications - their slugs should match our ghost tags and ci/cd slugs + /// Gets a HostApplication form a string. It could be the versioned name or a string coming from a process running. /// - public static class HostApplications + /// String with the name of the app + /// + public static HostApplication GetHostAppFromString(string appname) { - public static HostApplication Rhino = new HostApplication("Rhino", "rhino"); - public static HostApplication Grasshopper = new HostApplication("Grasshopper", "grasshopper"); - public static HostApplication Revit = new HostApplication("Revit", "revit"); - public static HostApplication Dynamo = new HostApplication("Dynamo", "dynamo"); - public static HostApplication Unity = new HostApplication("Unity", "unity"); - public static HostApplication GSA = new HostApplication("GSA", "gsa"); - public static HostApplication Civil = new HostApplication("Civil 3D", "civil3d"); - public static HostApplication AutoCAD = new HostApplication("AutoCAD", "autocad"); - public static HostApplication MicroStation = new HostApplication("MicroStation", "microstation"); - public static HostApplication OpenRoads = new HostApplication("OpenRoads", "openroads"); - public static HostApplication OpenRail = new HostApplication("OpenRail", "openrail"); - public static HostApplication OpenBuildings = new HostApplication("OpenBuildings", "openbuildings"); - public static HostApplication ETABS = new HostApplication("ETABS", "etabs"); - public static HostApplication SAP2000 = new HostApplication("SAP2000", "sap2000"); - public static HostApplication CSiBridge = new HostApplication("CSiBridge", "csibridge"); - public static HostApplication SAFE = new HostApplication("SAFE", "safe"); - public static HostApplication TeklaStructures = new HostApplication("Tekla Structures", "teklastructures"); - public static HostApplication Dxf = new HostApplication("DXF Converter", "dxf"); - public static HostApplication Excel = new HostApplication("Excel", "excel"); - public static HostApplication Unreal = new HostApplication("Unreal", "unreal"); - public static HostApplication PowerBI = new HostApplication("Power BI", "powerbi"); - public static HostApplication Blender = new HostApplication("Blender", "blender"); - public static HostApplication QGIS = new HostApplication("QGIS", "qgis"); - public static HostApplication ArcGIS = new HostApplication("ArcGIS", "arcgis"); - public static HostApplication SketchUp = new HostApplication("SketchUp", "sketchup"); - public static HostApplication Archicad = new HostApplication("Archicad", "archicad"); - public static HostApplication TopSolid = new HostApplication("TopSolid", "topsolid"); - public static HostApplication Python = new HostApplication("Python", "python"); - public static HostApplication NET = new HostApplication(".NET", "net"); - public static HostApplication Navisworks = new HostApplication("Navisworks", "navisworks"); - public static HostApplication AdvanceSteel = new HostApplication("Advance Steel", "advancesteel"); - public static HostApplication Other = new HostApplication("Other", "other"); - - /// - /// Gets a HostApplication form a string. It could be the versioned name or a string coming from a process running. - /// - /// String with the name of the app - /// - public static HostApplication GetHostAppFromString(string appname) - { - if (appname == null) return Other; - appname = appname.ToLowerInvariant().Replace(" ", ""); - if (appname.Contains("dynamo")) return Dynamo; - if (appname.Contains("revit")) return Revit; - if (appname.Contains("autocad")) return AutoCAD; - if (appname.Contains("civil")) return Civil; - if (appname.Contains("rhino")) return Rhino; - if (appname.Contains("grasshopper")) return Grasshopper; - if (appname.Contains("unity")) return Unity; - if (appname.Contains("gsa")) return GSA; - if (appname.Contains("microstation")) return MicroStation; - if (appname.Contains("openroads")) return OpenRoads; - if (appname.Contains("openrail")) return OpenRail; - if (appname.Contains("openbuildings")) return OpenBuildings; - if (appname.Contains("etabs")) return ETABS; - if (appname.Contains("sap")) return SAP2000; - if (appname.Contains("csibridge")) return CSiBridge; - if (appname.Contains("safe")) return SAFE; - if (appname.Contains("teklastructures")) return TeklaStructures; - if (appname.Contains("dxf")) return Dxf; - if (appname.Contains("excel")) return Excel; - if (appname.Contains("unreal")) return Unreal; - if (appname.Contains("powerbi")) return PowerBI; - if (appname.Contains("blender")) return Blender; - if (appname.Contains("qgis")) return QGIS; - if (appname.Contains("arcgis")) return ArcGIS; - if (appname.Contains("sketchup")) return SketchUp; - if (appname.Contains("archicad")) return Archicad; - if (appname.Contains("topsolid")) return TopSolid; - if (appname.Contains("python")) return Python; - if (appname.Contains("net")) return NET; - if (appname.Contains("navisworks")) return Navisworks; - if (appname.Contains("advancesteel")) return AdvanceSteel; - return new HostApplication(appname, appname); - - } - + if (appname == null) + return Other; + appname = appname.ToLowerInvariant().Replace(" ", ""); + if (appname.Contains("dynamo")) + return Dynamo; + if (appname.Contains("revit")) + return Revit; + if (appname.Contains("autocad")) + return AutoCAD; + if (appname.Contains("civil")) + return Civil; + if (appname.Contains("rhino")) + return Rhino; + if (appname.Contains("grasshopper")) + return Grasshopper; + if (appname.Contains("unity")) + return Unity; + if (appname.Contains("gsa")) + return GSA; + if (appname.Contains("microstation")) + return MicroStation; + if (appname.Contains("openroads")) + return OpenRoads; + if (appname.Contains("openrail")) + return OpenRail; + if (appname.Contains("openbuildings")) + return OpenBuildings; + if (appname.Contains("etabs")) + return ETABS; + if (appname.Contains("sap")) + return SAP2000; + if (appname.Contains("csibridge")) + return CSiBridge; + if (appname.Contains("safe")) + return SAFE; + if (appname.Contains("teklastructures")) + return TeklaStructures; + if (appname.Contains("dxf")) + return Dxf; + if (appname.Contains("excel")) + return Excel; + if (appname.Contains("unreal")) + return Unreal; + if (appname.Contains("powerbi")) + return PowerBI; + if (appname.Contains("blender")) + return Blender; + if (appname.Contains("qgis")) + return QGIS; + if (appname.Contains("arcgis")) + return ArcGIS; + if (appname.Contains("sketchup")) + return SketchUp; + if (appname.Contains("archicad")) + return Archicad; + if (appname.Contains("topsolid")) + return TopSolid; + if (appname.Contains("python")) + return Python; + if (appname.Contains("net")) + return NET; + if (appname.Contains("navisworks")) + return Navisworks; + if (appname.Contains("advancesteel")) + return AdvanceSteel; + return new HostApplication(appname, appname); } } diff --git a/Core/Core/Kits/Attributes.cs b/Core/Core/Kits/Attributes.cs index 1a8a47dc1b..a3645b581f 100644 --- a/Core/Core/Kits/Attributes.cs +++ b/Core/Core/Kits/Attributes.cs @@ -1,91 +1,68 @@ -using System; +using System; -namespace Speckle.Core.Kits +namespace Speckle.Core.Kits; + +[AttributeUsage(AttributeTargets.Constructor, Inherited = false, AllowMultiple = false)] +public sealed class SchemaInfo : Attribute { + public SchemaInfo(string name, string description) + : this(name, description, null, null) { } - [AttributeUsage(AttributeTargets.Constructor, Inherited = false, AllowMultiple = false)] - public class SchemaInfo : Attribute + public SchemaInfo(string name, string description, string category, string subcategory) { - private string _description; - private string _name; - private string _category; - private string _subcategory; - public virtual string Subcategory { get => _subcategory; } - public virtual string Category - { - get => _category; - } + Name = name; + Description = description; + Category = category; + Subcategory = subcategory; + } - public virtual string Description - { - get { return _description; } - } + public string Subcategory { get; } - public virtual string Name - { - get { return _name; } - } + public string Category { get; } - public SchemaInfo(string name, string description) : this(name, description, null, null) { } + public string Description { get; } - public SchemaInfo(string name, string description, string category, string subcategory) - { - _name = name; - _description = description; - _category = category; - _subcategory = subcategory; - } - } + public string Name { get; } +} - [AttributeUsage(AttributeTargets.Constructor)] - public class SchemaDeprecated : Attribute - { - } +[AttributeUsage(AttributeTargets.Constructor)] +public sealed class SchemaDeprecated : Attribute { } - [AttributeUsage(AttributeTargets.Parameter, Inherited = false, AllowMultiple = false)] - public class SchemaParamInfo : Attribute +[AttributeUsage(AttributeTargets.Parameter, Inherited = false, AllowMultiple = false)] +public sealed class SchemaParamInfo : Attribute +{ + public SchemaParamInfo(string description) { - private string _description; + Description = description; + } - public virtual string Description - { - get { return _description; } - } + public string Description { get; } +} - public SchemaParamInfo(string description) - { - _description = description; - } - } +/// +/// Used to indicate which is the main input parameter of the schema builder component. Schema info will be attached to this object. +/// +[AttributeUsage(AttributeTargets.Parameter)] +public sealed class SchemaMainParam : Attribute +{ + public SchemaMainParam() { } +} - /// - /// Used to indicate which is the main input parameter of the schema builder component. Schema info will be attached to this object. - /// - [AttributeUsage(AttributeTargets.Parameter)] - public class SchemaMainParam : Attribute - { - public SchemaMainParam() - { - } - } +// TODO: this could be nuked, as it's only used to hide props on Base, +// which we might want to expose anyways... +/// +/// Used to ignore properties from expand objects etc +/// +[AttributeUsage(AttributeTargets.Property)] +public sealed class SchemaIgnore : Attribute { } - // TODO: this could be nuked, as it's only used to hide props on Base, - // which we might want to expose anyways... - /// - /// Used to ignore properties from expand objects etc - /// - [AttributeUsage(AttributeTargets.Property)] - public class SchemaIgnore : Attribute +[AttributeUsage(AttributeTargets.Method)] +public sealed class SchemaComputedAttribute : Attribute +{ + public SchemaComputedAttribute(string name) { + Name = name; } - [AttributeUsage(AttributeTargets.Method)] - public class SchemaComputedAttribute : Attribute - { - public virtual string Name { get; } - public SchemaComputedAttribute(string name) - { - Name = name; - } - } + public string Name { get; } } diff --git a/Core/Core/Kits/ISpeckleConverter.cs b/Core/Core/Kits/ISpeckleConverter.cs index 32a39d7885..ad41d1ae87 100644 --- a/Core/Core/Kits/ISpeckleConverter.cs +++ b/Core/Core/Kits/ISpeckleConverter.cs @@ -1,117 +1,116 @@ -using System.Collections.Generic; +using System.Collections.Generic; using Speckle.Core.Models; -namespace Speckle.Core.Kits +namespace Speckle.Core.Kits; + +public interface ISpeckleConverter { - public interface ISpeckleConverter - { - string Description { get; } - string Name { get; } - string Author { get; } - string WebsiteOrEmail { get; } - - /// - /// Keeps track of the conversion process - /// - public ProgressReport Report { get; } - - /// - /// Decides what to do when an element being received already exists - /// - public ReceiveMode ReceiveMode { get; set; } - - - /// - /// Converts a native object to a Speckle one - /// - /// Native object to convert - /// - public Base ConvertToSpeckle(object @object); - - /// - /// Converts a list of objects to Speckle. - /// - /// - /// - public List ConvertToSpeckle(List objects); - - /// - /// Checks if it can convert a native object to a Speckle one - /// - /// Native object to convert - /// - public bool CanConvertToSpeckle(object @object); - - /// - /// Converts a Speckle object to a native one - /// - /// Speckle object to convert - /// - public object ConvertToNative(Base @object); - - /// - /// Converts a list of Speckle objects to a native ones. - /// - /// - /// - public List ConvertToNative(List objects); - - /// - /// Checks if it can convert a Speckle object to a native one - /// - /// Speckle object to convert - /// - public bool CanConvertToNative(Base @object); - - /// - /// Returns a list of applications serviced by this converter - /// - /// - public IEnumerable GetServicedApplications(); - - /// - /// Sets the application document that the converter is targeting - /// - /// The current application document - public void SetContextDocument(object doc); - - /// - /// Some converters need to know which other objects are being converted, in order to sort relationships between them (ie, Revit). Use this method to set them. - /// - /// - public void SetContextObjects(List objects); - - /// - /// Some converters need to know which objects have been converted before in order to update them (ie, Revit). Use this method to set them. - /// - /// - public void SetPreviousContextObjects(List objects); - - /// - /// Some converters need to be able to receive some settings to modify their internal behaviour (i.e. Rhino's Brep Meshing options). Use this method to set them. - /// - /// The object representing the settings for your converter. - public void SetConverterSettings(object settings); - - } - - // NOTE: Do not change the order of the existing ones - /// - /// Receive modes indicate what to do and not do when receiving objects - /// - public enum ReceiveMode - { - /// - /// Attemts updating previously received objects by ID, deletes previously received objects that do not exist anymore and creates new ones - /// - Update, - /// - /// Always creates new objects - /// - Create, - /// - /// Ignores updating previously received objects and does not attempt updating or deleting them, creates new objects - /// - Ignore - } + string Description { get; } + string Name { get; } + string Author { get; } + string WebsiteOrEmail { get; } + + /// + /// Keeps track of the conversion process + /// + public ProgressReport Report { get; } + + /// + /// Decides what to do when an element being received already exists + /// + public ReceiveMode ReceiveMode { get; set; } + + /// + /// Converts a native object to a Speckle one + /// + /// Native object to convert + /// + public Base ConvertToSpeckle(object @object); + + /// + /// Converts a list of objects to Speckle. + /// + /// + /// + public List ConvertToSpeckle(List objects); + + /// + /// Checks if it can convert a native object to a Speckle one + /// + /// Native object to convert + /// + public bool CanConvertToSpeckle(object @object); + + /// + /// Converts a Speckle object to a native one + /// + /// Speckle object to convert + /// + public object ConvertToNative(Base @object); + + /// + /// Converts a list of Speckle objects to a native ones. + /// + /// + /// + public List ConvertToNative(List objects); + + /// + /// Checks if it can convert a Speckle object to a native one + /// + /// Speckle object to convert + /// + public bool CanConvertToNative(Base @object); + + /// + /// Returns a list of applications serviced by this converter + /// + /// + public IEnumerable GetServicedApplications(); + + /// + /// Sets the application document that the converter is targeting + /// + /// The current application document + public void SetContextDocument(object doc); + + /// + /// Some converters need to know which other objects are being converted, in order to sort relationships between them (ie, Revit). Use this method to set them. + /// + /// + public void SetContextObjects(List objects); + + /// + /// Some converters need to know which objects have been converted before in order to update them (ie, Revit). Use this method to set them. + /// + /// + public void SetPreviousContextObjects(List objects); + + /// + /// Some converters need to be able to receive some settings to modify their internal behaviour (i.e. Rhino's Brep Meshing options). Use this method to set them. + /// + /// The object representing the settings for your converter. + public void SetConverterSettings(object settings); +} + +// NOTE: Do not change the order of the existing ones +/// +/// Receive modes indicate what to do and not do when receiving objects +/// +public enum ReceiveMode +{ + /// + /// Attemts updating previously received objects by ID, deletes previously received objects that do not exist anymore and creates new ones + /// + Update, + + /// + /// Always creates new objects + /// + Create, + + /// + /// Ignores updating previously received objects and does not attempt updating or deleting them, creates new objects + /// + Ignore } diff --git a/Core/Core/Kits/ISpeckleKit.cs b/Core/Core/Kits/ISpeckleKit.cs index c0dd9f6c00..34aee16629 100644 --- a/Core/Core/Kits/ISpeckleKit.cs +++ b/Core/Core/Kits/ISpeckleKit.cs @@ -1,61 +1,65 @@ -#nullable enable +#nullable enable using System; using System.Collections.Generic; -namespace Speckle.Core.Kits +namespace Speckle.Core.Kits; + +/// +/// Defines the basic interface for creating a "Speckle Kit" +/// +public interface ISpeckleKit { /// - /// Defines the basic interface for creating a "Speckle Kit" + /// Gets all the object types (the object model) provided by this kit. + /// + IEnumerable Types { get; } + + /// + /// Gets all available converters for this Kit. + /// + IEnumerable Converters { get; } + + /// + /// Gets this Kit's description. + /// + string Description { get; } + + /// + /// Gets this Kit's name. + /// + string Name { get; } + + /// + /// Gets this Kit's author. + /// + string Author { get; } + + /// + /// Gets the website (or email) to contact the Kit's author. + /// + string WebsiteOrEmail { get; } + + /// + /// Tries to load a converter for a specific . /// - public interface ISpeckleKit + /// The host app string for which a is desired. see + /// The converter for the specific + /// Thrown if the requested converter failed to load + public ISpeckleConverter LoadConverter(string app); +} + +public class KitException : Exception +{ + public KitException(string message, ISpeckleKit kit, Exception? innerException = null) + : base(message, innerException) { - /// - /// Gets all the object types (the object model) provided by this kit. - /// - IEnumerable Types { get; } - - /// - /// Gets all available converters for this Kit. - /// - IEnumerable Converters { get; } - - /// - /// Gets this Kit's description. - /// - string Description { get; } - - /// - /// Gets this Kit's name. - /// - string Name { get; } - - /// - /// Gets this Kit's author. - /// - string Author { get; } - - /// - /// Gets the website (or email) to contact the Kit's author. - /// - string WebsiteOrEmail { get; } - - /// - /// Tries to load a converter for a specific . - /// - /// The host app string for which a is desired. see - /// The converter for the specific - /// Thrown if the requested converter failed to load - public ISpeckleConverter LoadConverter(string app); + Kit = kit; } - public class KitException : Exception - { - public ISpeckleKit Kit { get; } + public ISpeckleKit Kit { get; } - public KitException(string message, ISpeckleKit kit, Exception? innerException = null) : base(message, innerException) - { - Kit = kit; - } + public KitException() { } - } + public KitException(string message) + : base(message) { } } diff --git a/Core/Core/Kits/KitDeclaration.cs b/Core/Core/Kits/KitDeclaration.cs index c777b1b370..6c091b8025 100644 --- a/Core/Core/Kits/KitDeclaration.cs +++ b/Core/Core/Kits/KitDeclaration.cs @@ -1,68 +1,67 @@ -using System; +using System; using System.Collections.Generic; using System.Linq; -using Speckle.Core.Kits; using Speckle.Core.Models; -namespace Speckle.Core.Kits +namespace Speckle.Core.Kits; + +/// +/// Needed so we can properly deserialize all the Base-derived objects from Core itself. +/// +public class CoreKit : ISpeckleKit { - /// - /// Needed so we can properly deserialize all the Base-derived objects from Core itself. - /// - public class CoreKit : ISpeckleKit - { - public IEnumerable Types => GetType().Assembly.GetTypes().Where(type => type.IsSubclassOf(typeof(Base))); + public CoreKit() { } - public string Description => "Base Speckle models for revisions, streams, etc."; + public IEnumerable Types => + GetType().Assembly.GetTypes().Where(type => type.IsSubclassOf(typeof(Base))); - public string Name => nameof(CoreKit); + public string Description => "Base Speckle models for revisions, streams, etc."; - public string Author => "Dimitrie"; + public string Name => nameof(CoreKit); - public string WebsiteOrEmail => "hello@speckle.systems"; + public string Author => "Dimitrie"; - public IEnumerable Converters { get => new List(); } + public string WebsiteOrEmail => "hello@speckle.systems"; - public CoreKit() { } + public IEnumerable Converters => new List(); - public Base ToSpeckle(object @object) - { - throw new NotImplementedException(); - } + public ISpeckleConverter LoadConverter(string app) + { + return null; + } - public bool CanConvertToSpeckle(object @object) - { - throw new NotImplementedException(); - } + public Base ToSpeckle(object @object) + { + throw new NotImplementedException(); + } - public object ToNative(Base @object) - { - throw new NotImplementedException(); - } + public bool CanConvertToSpeckle(object @object) + { + throw new NotImplementedException(); + } - public bool CanConvertToNative(Base @object) - { - throw new NotImplementedException(); - } + public object ToNative(Base @object) + { + throw new NotImplementedException(); + } - public IEnumerable GetServicedApplications() - { - throw new NotImplementedException(); - } + public bool CanConvertToNative(Base @object) + { + throw new NotImplementedException(); + } - public void SetContextDocument(object @object) - { - throw new NotImplementedException(); - } + public IEnumerable GetServicedApplications() + { + throw new NotImplementedException(); + } - public ISpeckleConverter LoadConverter(string app) - { - return null; - } + public void SetContextDocument(object @object) + { + throw new NotImplementedException(); + } - public bool TryLoadConverter(string app, out ISpeckleConverter speckleConverter) - { - throw new NotImplementedException(); - } + public bool TryLoadConverter(string app, out ISpeckleConverter speckleConverter) + { + throw new NotImplementedException(); } } diff --git a/Core/Core/Kits/KitManager.cs b/Core/Core/Kits/KitManager.cs index 36c6e57450..5bca7b254f 100644 --- a/Core/Core/Kits/KitManager.cs +++ b/Core/Core/Kits/KitManager.cs @@ -1,326 +1,317 @@ -using System; +#nullable enable +using System; using System.Collections.Generic; -using System.Diagnostics; using System.IO; using System.Linq; using System.Reflection; -using Serilog; using Speckle.Core.Helpers; using Speckle.Core.Logging; using Speckle.Core.Models; -#nullable enable -namespace Speckle.Core.Kits +namespace Speckle.Core.Kits; + +public static class KitManager { - public static class KitManager - { - private static string? _kitsFolder = null; + private static string? _kitsFolder = null; - /// - /// Local installations store kits in C:\Users\USERNAME\AppData\Roaming\Speckle\Kits - /// Admin/System-wide installations in C:\ProgramData\Speckle\Kits - /// - public static string KitsFolder - { - get { return _kitsFolder ??= SpecklePathProvider.KitsFolderPath; } - set { _kitsFolder = value; } - } + public static readonly AssemblyName SpeckleAssemblyName = typeof(Base) + .GetTypeInfo() + .Assembly.GetName(); - public static readonly AssemblyName SpeckleAssemblyName = typeof(Base) - .GetTypeInfo() - .Assembly.GetName(); + private static Dictionary _SpeckleKits = new(); - private static Dictionary _SpeckleKits = - new Dictionary(); + private static List _AvailableTypes = new(); - private static List _AvailableTypes = new List(); + private static bool _initialized = false; - private static bool _initialized = false; + /// + /// Local installations store kits in C:\Users\USERNAME\AppData\Roaming\Speckle\Kits + /// Admin/System-wide installations in C:\ProgramData\Speckle\Kits + /// + public static string KitsFolder + { + get => _kitsFolder ??= SpecklePathProvider.KitsFolderPath; + set => _kitsFolder = value; + } - /// - /// Checks whether a specific kit exists. - /// - /// - /// - public static bool HasKit(string assemblyFullName) + /// + /// Returns a list of all the kits found on this user's device. + /// + public static IEnumerable Kits + { + get { Initialize(); - return _SpeckleKits.ContainsKey(assemblyFullName); + return _SpeckleKits.Values.Where(v => v != null); //NOTE: null check here should be unnecessary } + } - /// - /// Gets a specific kit. - /// - /// - /// - public static ISpeckleKit GetKit(string assemblyFullName) + /// + /// Returns a list of all the types found in all the kits on this user's device. + /// + public static IEnumerable Types + { + get { Initialize(); - return _SpeckleKits[assemblyFullName]; + return _AvailableTypes; } + } - /// - /// Returns a list of all the kits found on this user's device. - /// - public static IEnumerable Kits - { - get - { - Initialize(); - return _SpeckleKits.Values.Where(v => v != null); //NOTE: null check here should be unnecessary - } - } + /// + /// Checks whether a specific kit exists. + /// + /// + /// + public static bool HasKit(string assemblyFullName) + { + Initialize(); + return _SpeckleKits.ContainsKey(assemblyFullName); + } - /// - /// Returns a list of all the types found in all the kits on this user's device. - /// - public static IEnumerable Types - { - get - { - Initialize(); - return _AvailableTypes; - } - } + /// + /// Gets a specific kit. + /// + /// + /// + public static ISpeckleKit GetKit(string assemblyFullName) + { + Initialize(); + return _SpeckleKits[assemblyFullName]; + } - /// - /// Gets the default Speckle provided kit, "Objects". - /// - /// - public static ISpeckleKit GetDefaultKit() - { - Initialize(); - return _SpeckleKits.First(kvp => kvp.Value.Name == "Objects").Value; - } + /// + /// Gets the default Speckle provided kit, "Objects". + /// + /// + public static ISpeckleKit GetDefaultKit() + { + Initialize(); + return _SpeckleKits.First(kvp => kvp.Value.Name == "Objects").Value; + } - /// - /// Returns all the kits with potential converters for the software app. - /// - /// - /// - public static IEnumerable GetKitsWithConvertersForApp(string app) - { - foreach (var kit in Kits) - { - if (kit.Converters.Contains(app)) - yield return kit; - } - } + /// + /// Returns all the kits with potential converters for the software app. + /// + /// + /// + public static IEnumerable GetKitsWithConvertersForApp(string app) + { + foreach (var kit in Kits) + if (kit.Converters.Contains(app)) + yield return kit; + } - /// - /// Tells the kit manager to initialise from a specific location. - /// - /// - public static void Initialize(string kitFolderLocation) + /// + /// Tells the kit manager to initialise from a specific location. + /// + /// + public static void Initialize(string kitFolderLocation) + { + if (_initialized) { - if (_initialized) - { - SpeckleLog.Logger.Error("{objectType} is already initialised", typeof(KitManager)); - throw new SpeckleException( - "The kit manager has already been initialised. Make sure you call this method earlier in your code!"); - } - - KitsFolder = kitFolderLocation; - Load(); - _initialized = true; + SpeckleLog.Logger.Error("{objectType} is already initialised", typeof(KitManager)); + throw new SpeckleException( + "The kit manager has already been initialised. Make sure you call this method earlier in your code!" + ); } - #region Private Methods + KitsFolder = kitFolderLocation; + Load(); + _initialized = true; + } + + #region Private Methods - private static void Initialize() + private static void Initialize() + { + if (!_initialized) { - if (!_initialized) - { - Load(); - _initialized = true; - } + Load(); + _initialized = true; } + } - private static void Load() - { - SpeckleLog.Logger.Information( - "Initializing Kit Manager in {KitsFolder}", - SpecklePathProvider.KitsFolderPath - ); + private static void Load() + { + SpeckleLog.Logger.Information( + "Initializing Kit Manager in {KitsFolder}", + SpecklePathProvider.KitsFolderPath + ); + + GetLoadedSpeckleReferencingAssemblies(); + LoadSpeckleReferencingAssemblies(); + + _AvailableTypes = _SpeckleKits + .Where(kit => kit.Value != null) //Null check should be unnecessary + .SelectMany(kit => kit.Value.Types) + .ToList(); + } - GetLoadedSpeckleReferencingAssemblies(); - LoadSpeckleReferencingAssemblies(); + // recursive search for referenced assemblies + public static List GetReferencedAssemblies() + { + var returnAssemblies = new List(); + var loadedAssemblies = new HashSet(); + var assembliesToCheck = new Queue(); - _AvailableTypes = _SpeckleKits - .Where(kit => kit.Value != null) //Null check should be unnecessary - .SelectMany(kit => kit.Value.Types) - .ToList(); - } + assembliesToCheck.Enqueue(Assembly.GetEntryAssembly()); - // recursive search for referenced assemblies - public static List GetReferencedAssemblies() + while (assembliesToCheck.Count > 0) { - var returnAssemblies = new List(); - var loadedAssemblies = new HashSet(); - var assembliesToCheck = new Queue(); + var assemblyToCheck = assembliesToCheck.Dequeue(); - assembliesToCheck.Enqueue(Assembly.GetEntryAssembly()); + if (assemblyToCheck == null) + continue; - while (assembliesToCheck.Count > 0) + foreach (var reference in assemblyToCheck.GetReferencedAssemblies()) { - var assemblyToCheck = assembliesToCheck.Dequeue(); + // filtering out system dlls + if (reference.FullName.StartsWith("System.")) + continue; + if (reference.FullName.StartsWith("Microsoft.")) + continue; - if (assemblyToCheck == null) + if (loadedAssemblies.Contains(reference.FullName)) continue; - foreach (var reference in assemblyToCheck.GetReferencedAssemblies()) + Assembly assembly; + try { - // filtering out system dlls - if (reference.FullName.StartsWith("System.")) continue; - if (reference.FullName.StartsWith("Microsoft.")) continue; - - if (loadedAssemblies.Contains(reference.FullName)) continue; - - Assembly assembly; - try - { - assembly = Assembly.Load(reference); - } - catch - { - continue; - } - - assembliesToCheck.Enqueue(assembly); - loadedAssemblies.Add(reference.FullName); - returnAssemblies.Add(assembly); + assembly = Assembly.Load(reference); + } + catch + { + continue; } - } - return returnAssemblies; + assembliesToCheck.Enqueue(assembly); + loadedAssemblies.Add(reference.FullName); + returnAssemblies.Add(assembly); + } } - private static void GetLoadedSpeckleReferencingAssemblies() - { - List assemblies = AppDomain.CurrentDomain.GetAssemblies().ToList(); - assemblies.AddRange(GetReferencedAssemblies()); - - foreach (var assembly in assemblies) - { - if (assembly.IsDynamic || assembly.ReflectionOnly) continue; - if (!assembly.IsReferencing(SpeckleAssemblyName)) continue; - if (_SpeckleKits.ContainsKey(assembly.FullName)) continue; + return returnAssemblies; + } - var kitClass = GetKitClass(assembly); - if (kitClass == null) continue; + private static void GetLoadedSpeckleReferencingAssemblies() + { + List assemblies = AppDomain.CurrentDomain.GetAssemblies().ToList(); + assemblies.AddRange(GetReferencedAssemblies()); - if (Activator.CreateInstance(kitClass) is ISpeckleKit speckleKit) - _SpeckleKits.Add(assembly.FullName, speckleKit); - } + foreach (var assembly in assemblies) + { + if (assembly.IsDynamic || assembly.ReflectionOnly) + continue; + if (!assembly.IsReferencing(SpeckleAssemblyName)) + continue; + if (_SpeckleKits.ContainsKey(assembly.FullName)) + continue; + + var kitClass = GetKitClass(assembly); + if (kitClass == null) + continue; + + if (Activator.CreateInstance(kitClass) is ISpeckleKit speckleKit) + _SpeckleKits.Add(assembly.FullName, speckleKit); } + } - private static void LoadSpeckleReferencingAssemblies() - { - if (!Directory.Exists(KitsFolder)) - return; + private static void LoadSpeckleReferencingAssemblies() + { + if (!Directory.Exists(KitsFolder)) + return; - var directories = Directory.GetDirectories(KitsFolder); + var directories = Directory.GetDirectories(KitsFolder); - foreach (var directory in directories) + foreach (var directory in directories) + { + foreach (var assemblyPath in Directory.EnumerateFiles(directory, "*.dll")) { - foreach (var assemblyPath in Directory.EnumerateFiles(directory, "*.dll")) + var unloadedAssemblyName = SafeGetAssemblyName(assemblyPath); + + if (unloadedAssemblyName == null) + continue; + + try { - var unloadedAssemblyName = SafeGetAssemblyName(assemblyPath); - - if (unloadedAssemblyName == null) - continue; - - try - { - var assembly = Assembly.LoadFrom(assemblyPath); - var kitClass = GetKitClass(assembly); - if (assembly.IsReferencing(SpeckleAssemblyName) && kitClass != null) - { - if (!_SpeckleKits.ContainsKey(assembly.FullName)) - { - if (Activator.CreateInstance(kitClass) is ISpeckleKit speckleKit) - _SpeckleKits.Add(assembly.FullName, speckleKit); - } - } - } - catch (FileLoadException ex) { } - catch (BadImageFormatException ex) { } + var assembly = Assembly.LoadFrom(assemblyPath); + var kitClass = GetKitClass(assembly); + if (assembly.IsReferencing(SpeckleAssemblyName) && kitClass != null) + if (!_SpeckleKits.ContainsKey(assembly.FullName)) + if (Activator.CreateInstance(kitClass) is ISpeckleKit speckleKit) + _SpeckleKits.Add(assembly.FullName, speckleKit); } + catch (FileLoadException ex) { } + catch (BadImageFormatException ex) { } } } + } - private static Type? GetKitClass(Assembly assembly) + private static Type? GetKitClass(Assembly assembly) + { + try { - try - { - var kitClass = assembly - .GetTypes() - .FirstOrDefault(type => - { - return type.GetInterfaces() - .Any(iface => iface.Name == nameof(ISpeckleKit)); - }); - - return kitClass; - } - catch - { - // this will be a ReflectionTypeLoadException and is expected. we don't need to care! - return null; - } - } + var kitClass = assembly + .GetTypes() + .FirstOrDefault(type => + { + return type.GetInterfaces().Any(iface => iface.Name == nameof(ISpeckleKit)); + }); - private static Assembly? SafeLoadAssembly(AppDomain domain, AssemblyName assemblyName) + return kitClass; + } + catch { - try - { - return domain.Load(assemblyName); - } - catch - { - return null; - } + // this will be a ReflectionTypeLoadException and is expected. we don't need to care! + return null; } + } - private static AssemblyName? SafeGetAssemblyName(string assemblyPath) + private static Assembly? SafeLoadAssembly(AppDomain domain, AssemblyName assemblyName) + { + try { - try - { - return AssemblyName.GetAssemblyName(assemblyPath); - } - catch - { - return null; - } + return domain.Load(assemblyName); + } + catch + { + return null; } - - #endregion } - public static class AssemblyExtensions + private static AssemblyName? SafeGetAssemblyName(string assemblyPath) { - /// - /// Indicates if a given assembly references another which is identified by its name. - /// - /// The assembly which will be probed. - /// The reference assembly name. - /// A boolean value indicating if there is a reference. - public static bool IsReferencing(this Assembly assembly, AssemblyName referenceName) + try { - if (AssemblyName.ReferenceMatchesDefinition(assembly.GetName(), referenceName)) - { - return true; - } + return AssemblyName.GetAssemblyName(assemblyPath); + } + catch + { + return null; + } + } - foreach (var referencedAssemblyName in assembly.GetReferencedAssemblies()) - { - if (AssemblyName.ReferenceMatchesDefinition(referencedAssemblyName, referenceName)) - { - return true; - } - } + #endregion +} - return false; - } +public static class AssemblyExtensions +{ + /// + /// Indicates if a given assembly references another which is identified by its name. + /// + /// The assembly which will be probed. + /// The reference assembly name. + /// A boolean value indicating if there is a reference. + public static bool IsReferencing(this Assembly assembly, AssemblyName referenceName) + { + if (AssemblyName.ReferenceMatchesDefinition(assembly.GetName(), referenceName)) + return true; + + foreach (var referencedAssemblyName in assembly.GetReferencedAssemblies()) + if (AssemblyName.ReferenceMatchesDefinition(referencedAssemblyName, referenceName)) + return true; + + return false; } } diff --git a/Core/Core/Kits/Units.cs b/Core/Core/Kits/Units.cs index f5870c9b6f..41f27268af 100644 --- a/Core/Core/Kits/Units.cs +++ b/Core/Core/Kits/Units.cs @@ -1,319 +1,351 @@ -using System; using System.Collections.Generic; using Speckle.Core.Logging; -namespace Speckle.Core.Kits -{ - public static class Units - { - public const string Millimeters = "mm"; - public const string Centimeters = "cm"; - public const string Meters = "m"; - public const string Kilometers = "km"; - public const string Inches = "in"; - public const string Feet = "ft"; // smelly ones - public const string Yards = "yd"; - public const string Miles = "mi"; - public const string None = "none"; - - private static List SupportedUnits = new List() { Millimeters, Centimeters, Meters, Kilometers, Inches, Feet, USFeet, Yards, Miles, None }; +namespace Speckle.Core.Kits; - public static bool IsUnitSupported(string unit) => SupportedUnits.Contains(unit); +public static class Units +{ + public const string Millimeters = "mm"; + public const string Centimeters = "cm"; + public const string Meters = "m"; + public const string Kilometers = "km"; + public const string Inches = "in"; + public const string Feet = "ft"; // smelly ones + public const string Yards = "yd"; + public const string Miles = "mi"; + public const string None = "none"; - // public const string USInches = "us_in"; the smelliest ones, can add later if people scream "USA #1" - public const string USFeet = "us_ft"; // it happened, absolutely gross - // public const string USYards = "us_yd"; the smelliest ones, can add later if people scream "USA #1" - // public const string USMiles = "us_mi"; the smelliest ones, can add later if people scream "USA #1" + // public const string USInches = "us_in"; the smelliest ones, can add later if people scream "USA #1" + public const string USFeet = "us_ft"; // it happened, absolutely gross - public static double GetConversionFactor(string from, string to) + private static List SupportedUnits = + new() { - from = GetUnitsFromString(from); - to = GetUnitsFromString(to); + Millimeters, + Centimeters, + Meters, + Kilometers, + Inches, + Feet, + USFeet, + Yards, + Miles, + None + }; - switch (from) - { - // METRIC - case Units.Millimeters: - switch (to) - { - case Units.Centimeters: - return 0.1; - case Units.Meters: - return 0.001; - case Units.Kilometers: - return 1e-6; - case Units.Inches: - return 0.0393701; - case Units.Feet: - return 0.00328084; - case Units.USFeet: - return 0.0032808333; - case Units.Yards: - return 0.00109361; - case Units.Miles: - return 6.21371e-7; - } - break; - case Units.Centimeters: - switch (to) - { - case Units.Millimeters: - return 10; - case Units.Meters: - return 0.01; - case Units.Kilometers: - return 1e-5; - case Units.Inches: - return 0.393701; - case Units.Feet: - return 0.0328084; - case Units.USFeet: - return 0.0328083333; - case Units.Yards: - return 0.0109361; - case Units.Miles: - return 6.21371e-6; - } - break; - case Units.Meters: - switch (to) - { - case Units.Millimeters: - return 1000; - case Units.Centimeters: - return 100; - case Units.Kilometers: - return 1000; - case Units.Inches: - return 39.3701; - case Units.Feet: - return 3.28084; - case Units.USFeet: - return 3.28083333; - case Units.Yards: - return 1.09361; - case Units.Miles: - return 0.000621371; - } - break; - case Units.Kilometers: - switch (to) - { - case Units.Millimeters: - return 1000000; - case Units.Centimeters: - return 100000; - case Units.Meters: - return 1000; - case Units.Inches: - return 39370.1; - case Units.Feet: - return 3280.84; - case Units.USFeet: - return 3280.83333; - case Units.Yards: - return 1093.61; - case Units.Miles: - return 0.621371; - } - break; + public static bool IsUnitSupported(string unit) + { + return SupportedUnits.Contains(unit); + } - // IMPERIAL - case Units.Inches: - switch (to) - { - case Units.Millimeters: - return 25.4; - case Units.Centimeters: - return 2.54; - case Units.Meters: - return 0.0254; - case Units.Kilometers: - return 2.54e-5; - case Units.Feet: - return 0.0833333; - case Units.USFeet: - return 0.0833331667; - case Units.Yards: - return 0.027777694; - case Units.Miles: - return 1.57828e-5; - } - break; - case Units.Feet: - switch (to) - { - case Units.Millimeters: - return 304.8; - case Units.Centimeters: - return 30.48; - case Units.Meters: - return 0.3048; - case Units.Kilometers: - return 0.0003048; - case Units.Inches: - return 12; - case Units.USFeet: - return 0.999998; - case Units.Yards: - return 0.333332328; - case Units.Miles: - return 0.000189394; - } - break; - case Units.USFeet: - switch (to) - { - case Units.Millimeters: - return 120000d / 3937d; - case Units.Centimeters: - return 12000d / 3937d; - case Units.Meters: - return 1200d / 3937d; - case Units.Kilometers: - return 1.2 / 3937d; - case Units.Inches: - return 12.000024000000002; - case Units.Feet: - return 1.000002; - case Units.Yards: - return 1.000002 / 3d; - case Units.Miles: - return 1.000002 / 5280d; - } - break; - case Units.Yards: - switch (to) - { - case Units.Millimeters: - return 914.4; - case Units.Centimeters: - return 91.44; - case Units.Meters: - return 0.9144; - case Units.Kilometers: - return 0.0009144; - case Units.Inches: - return 36; - case Units.Feet: - return 3; - case Units.USFeet: - return 2.999994; - case Units.Miles: - return 1d / 1760d; - } - break; - case Units.Miles: - switch (to) - { - case Units.Millimeters: - return 1.609e+6; - case Units.Centimeters: - return 160934; - case Units.Meters: - return 1609.34; - case Units.Kilometers: - return 1.60934; - case Units.Inches: - return 63360; - case Units.Feet: - return 5280; - case Units.USFeet: - return 5279.98944002112; - case Units.Yards: - return 1759.99469184; - } - break; - case Units.None: - return 1; - } - return 1; - } + // public const string USYards = "us_yd"; the smelliest ones, can add later if people scream "USA #1" + // public const string USMiles = "us_mi"; the smelliest ones, can add later if people scream "USA #1" + + public static double GetConversionFactor(string from, string to) + { + from = GetUnitsFromString(from); + to = GetUnitsFromString(to); - public static string GetUnitsFromString(string unit) + switch (from) { - if (unit == null) return null; - switch (unit.ToLower()) - { - case "mm": - case "mil": - case "millimeter": - case "millimeters": - case "millimetres": - return Units.Millimeters; - case "cm": - case "centimetre": - case "centimeter": - case "centimetres": - case "centimeters": - return Units.Centimeters; - case "m": - case "meter": - case "metre": - case "meters": - case "metres": - return Units.Meters; - case "inches": - case "inch": - case "in": - return Units.Inches; - case "feet": - case "foot": - case "ft": - return Units.Feet; - case "ussurveyfeet": - return Units.USFeet; - case "yard": - case "yards": - case "yd": - return Units.Yards; - case "miles": - case "mile": - case "mi": - return Units.Miles; - case "kilometers": - case "kilometer": - case "km": - return Units.Kilometers; - case "none": - return Units.None; - } + // METRIC + case Millimeters: + switch (to) + { + case Centimeters: + return 0.1; + case Meters: + return 0.001; + case Kilometers: + return 1e-6; + case Inches: + return 0.0393701; + case Feet: + return 0.00328084; + case USFeet: + return 0.0032808333; + case Yards: + return 0.00109361; + case Miles: + return 6.21371e-7; + } + break; + case Centimeters: + switch (to) + { + case Millimeters: + return 10; + case Meters: + return 0.01; + case Kilometers: + return 1e-5; + case Inches: + return 0.393701; + case Feet: + return 0.0328084; + case USFeet: + return 0.0328083333; + case Yards: + return 0.0109361; + case Miles: + return 6.21371e-6; + } + break; + case Meters: + switch (to) + { + case Millimeters: + return 1000; + case Centimeters: + return 100; + case Kilometers: + return 1000; + case Inches: + return 39.3701; + case Feet: + return 3.28084; + case USFeet: + return 3.28083333; + case Yards: + return 1.09361; + case Miles: + return 0.000621371; + } + break; + case Kilometers: + switch (to) + { + case Millimeters: + return 1000000; + case Centimeters: + return 100000; + case Meters: + return 1000; + case Inches: + return 39370.1; + case Feet: + return 3280.84; + case USFeet: + return 3280.83333; + case Yards: + return 1093.61; + case Miles: + return 0.621371; + } + break; - throw new SpeckleException($"Cannot understand what unit {unit} is."); + // IMPERIAL + case Inches: + switch (to) + { + case Millimeters: + return 25.4; + case Centimeters: + return 2.54; + case Meters: + return 0.0254; + case Kilometers: + return 2.54e-5; + case Feet: + return 0.0833333; + case USFeet: + return 0.0833331667; + case Yards: + return 0.027777694; + case Miles: + return 1.57828e-5; + } + break; + case Feet: + switch (to) + { + case Millimeters: + return 304.8; + case Centimeters: + return 30.48; + case Meters: + return 0.3048; + case Kilometers: + return 0.0003048; + case Inches: + return 12; + case USFeet: + return 0.999998; + case Yards: + return 0.333332328; + case Miles: + return 0.000189394; + } + break; + case USFeet: + switch (to) + { + case Millimeters: + return 120000d / 3937d; + case Centimeters: + return 12000d / 3937d; + case Meters: + return 1200d / 3937d; + case Kilometers: + return 1.2 / 3937d; + case Inches: + return 12.000024000000002; + case Feet: + return 1.000002; + case Yards: + return 1.000002 / 3d; + case Miles: + return 1.000002 / 5280d; + } + break; + case Yards: + switch (to) + { + case Millimeters: + return 914.4; + case Centimeters: + return 91.44; + case Meters: + return 0.9144; + case Kilometers: + return 0.0009144; + case Inches: + return 36; + case Feet: + return 3; + case USFeet: + return 2.999994; + case Miles: + return 1d / 1760d; + } + break; + case Miles: + switch (to) + { + case Millimeters: + return 1.609e+6; + case Centimeters: + return 160934; + case Meters: + return 1609.34; + case Kilometers: + return 1.60934; + case Inches: + return 63360; + case Feet: + return 5280; + case USFeet: + return 5279.98944002112; + case Yards: + return 1759.99469184; + } + break; + case None: + return 1; } + return 1; + } - public static int GetEncodingFromUnit(string unit) + public static string GetUnitsFromString(string unit) + { + if (unit == null) + return null; + switch (unit.ToLower()) { - switch (unit) - { - case Millimeters: return 1; - case Centimeters: return 2; - case Meters: return 3; - case Kilometers: return 4; - case Inches: return 5; - case Feet: return 6; - case Yards: return 7; - case Miles: return 8; - } - - return 0; + case "mm": + case "mil": + case "millimeter": + case "millimeters": + case "millimetres": + return Millimeters; + case "cm": + case "centimetre": + case "centimeter": + case "centimetres": + case "centimeters": + return Centimeters; + case "m": + case "meter": + case "metre": + case "meters": + case "metres": + return Meters; + case "inches": + case "inch": + case "in": + return Inches; + case "feet": + case "foot": + case "ft": + return Feet; + case "ussurveyfeet": + return USFeet; + case "yard": + case "yards": + case "yd": + return Yards; + case "miles": + case "mile": + case "mi": + return Miles; + case "kilometers": + case "kilometer": + case "km": + return Kilometers; + case "none": + return None; } - public static string GetUnitFromEncoding(double unit) + throw new SpeckleException($"Cannot understand what unit {unit} is."); + } + + public static int GetEncodingFromUnit(string unit) + { + switch (unit) { - switch (unit) - { - case 1: return Millimeters; - case 2: return Centimeters; - case 3: return Meters; - case 4: return Kilometers; - case 5: return Inches; - case 6: return Feet; - case 7: return Yards; - case 8: return Miles; - } + case Millimeters: + return 1; + case Centimeters: + return 2; + case Meters: + return 3; + case Kilometers: + return 4; + case Inches: + return 5; + case Feet: + return 6; + case Yards: + return 7; + case Miles: + return 8; + } - return None; + return 0; + } + + public static string GetUnitFromEncoding(double unit) + { + switch (unit) + { + case 1: + return Millimeters; + case 2: + return Centimeters; + case 3: + return Meters; + case 4: + return Kilometers; + case 5: + return Inches; + case 6: + return Feet; + case 7: + return Yards; + case 8: + return Miles; } + + return None; } } diff --git a/Core/Core/Logging/Analytics.cs b/Core/Core/Logging/Analytics.cs index 2a0ea79d96..efec8d3404 100644 --- a/Core/Core/Logging/Analytics.cs +++ b/Core/Core/Logging/Analytics.cs @@ -1,8 +1,7 @@ -using System; +using System; using System.Collections.Generic; using System.IO; using System.Linq; -using System.Net; using System.Net.Http; using System.Net.Http.Headers; using System.Net.NetworkInformation; @@ -11,247 +10,274 @@ using System.Text; using System.Threading.Tasks; using System.Web; -using Serilog; using Speckle.Core.Credentials; using Speckle.Core.Helpers; using Speckle.Newtonsoft.Json; -namespace Speckle.Core.Logging +namespace Speckle.Core.Logging; + +/// +/// Anonymous telemetry to help us understand how to make a better Speckle. +/// This really helps us to deliver a better open source project and product! +/// +public static class Analytics { /// - /// Anonymous telemetry to help us understand how to make a better Speckle. - /// This really helps us to deliver a better open source project and product! + /// Default Mixpanel events /// - public static class Analytics + public enum Events { - private const string MixpanelToken = "acd87c5a50b56df91a795e999812a3a4"; - private const string MixpanelServer = "https://analytics.speckle.systems"; + /// + /// Event triggered when data is sent to a Speckle Server + /// + Send, /// - /// Default Mixpanel events + /// Event triggered when data is received from a Speckle Server /// - public enum Events - { - /// - /// Event triggered when data is sent to a Speckle Server - /// - Send, - /// - /// Event triggered when data is received from a Speckle Server - /// - Receive, - /// - /// Event triggered when a node is executed in a visual programming environment, it should contain the name of the action and the host application - /// - NodeRun, - /// - /// Event triggered when an action is executed in Desktop UI, it should contain the name of the action and the host application - /// - DUIAction, - /// - /// Event triggered when a node is first created in a visual programming environment, it should contain the name of the action and the host application - /// - NodeCreate, - /// - /// Event triggered when the import/export alert is launched or closed - /// - ImportExportAlert, - /// - /// Event triggered when the connector is registered - /// - Registered, - /// - /// Event triggered by the Mapping Tool - /// - MappingsAction - }; + Receive, + /// + /// Event triggered when a node is executed in a visual programming environment, it should contain the name of the action and the host application + /// + NodeRun, /// - /// Cached email + /// Event triggered when an action is executed in Desktop UI, it should contain the name of the action and the host application /// - private static string LastEmail { get; set; } + DUIAction, + /// - /// Cached server URL + /// Event triggered when a node is first created in a visual programming environment, it should contain the name of the action and the host application /// - private static string LastServer { get; set; } + NodeCreate, /// - /// Tracks an event without specifying the email and server. - /// It's not always possible to know which account the user has selected, especially in visual programming. - /// Therefore we are caching the email and server values so that they can be used also when nodes such as "Serialize" are used. - /// If no account info is cached, we use the default account data. + /// Event triggered when the import/export alert is launched or closed /// - /// Name of the even - /// Additional parameters to pass in to event - /// True if it's an action performed by a logged user - public static void TrackEvent(Events eventName, Dictionary customProperties = null, bool isAction = true) - { - string email = ""; - string server = ""; + ImportExportAlert, - if (LastEmail != null && LastServer != null && LastServer != "no-account-server") - { - email = LastEmail; - server = LastServer; - } - else - { - var acc = Credentials.AccountManager.GetDefaultAccount(); - if (acc == null) - { - var macAddr = NetworkInterface - .GetAllNetworkInterfaces() - .Where(nic => nic.OperationalStatus == OperationalStatus.Up && nic.NetworkInterfaceType != NetworkInterfaceType.Loopback) - .Select(nic => nic.GetPhysicalAddress().ToString()) - .FirstOrDefault(); + /// + /// Event triggered when the connector is registered + /// + Registered, + /// + /// Event triggered by the Mapping Tool + /// + MappingsAction + }; - email = macAddr; - server = "no-account-server"; - isAction = false; - } - else - { - email = acc.GetHashedEmail(); - server = acc.GetHashedServer(); - } + private const string MixpanelToken = "acd87c5a50b56df91a795e999812a3a4"; + private const string MixpanelServer = "https://analytics.speckle.systems"; + /// + /// Cached email + /// + private static string LastEmail { get; set; } - } + /// + /// Cached server URL + /// + private static string LastServer { get; set; } - TrackEvent(email, server, eventName, customProperties, isAction); - } + /// + /// Tracks an event without specifying the email and server. + /// It's not always possible to know which account the user has selected, especially in visual programming. + /// Therefore we are caching the email and server values so that they can be used also when nodes such as "Serialize" are used. + /// If no account info is cached, we use the default account data. + /// + /// Name of the even + /// Additional parameters to pass in to event + /// True if it's an action performed by a logged user + public static void TrackEvent( + Events eventName, + Dictionary customProperties = null, + bool isAction = true + ) + { + string email = ""; + string server = ""; - /// - /// Tracks an event from a specified account, anonymizes personal information - /// - /// Account to use, it will be anonymized - /// Name of the event - /// Additional parameters to pass to the event - /// True if it's an action performed by a logged user - public static void TrackEvent(Account account, Events eventName, Dictionary customProperties = null, bool isAction = true) + if (LastEmail != null && LastServer != null && LastServer != "no-account-server") { - if (account == null) - TrackEvent(eventName, customProperties, isAction); - else - TrackEvent(account.GetHashedEmail(), account.GetHashedServer(), eventName, customProperties, isAction); + email = LastEmail; + server = LastServer; } - - /// - /// Tracks an event from a specified email and server, anonymizes personal information - /// - /// Email of the user anonymized - /// Server URL anonymized - /// Name of the event - /// Additional parameters to pass to the event - /// True if it's an action performed by a logged user - private static void TrackEvent(string hashedEmail, string hashedServer, Events eventName, Dictionary customProperties = null, bool isAction = true) + else { - LastEmail = hashedEmail; - LastServer = hashedServer; - -#if DEBUG - //only track in prod - return; -#endif - - Task.Run(() => + var acc = AccountManager.GetDefaultAccount(); + if (acc == null) { + var macAddr = NetworkInterface + .GetAllNetworkInterfaces() + .Where( + nic => + nic.OperationalStatus == OperationalStatus.Up + && nic.NetworkInterfaceType != NetworkInterfaceType.Loopback + ) + .Select(nic => nic.GetPhysicalAddress().ToString()) + .FirstOrDefault(); - try - { - var properties = new Dictionary() - { - { "distinct_id", hashedEmail }, - { "server_id", hashedServer }, - { "token", MixpanelToken }, - { "hostApp", Setup.HostApplication }, - { "hostAppVersion", Setup.VersionedHostApplication }, - { "core_version", Assembly.GetExecutingAssembly().GetName().Version.ToString()}, - { "$os", GetOs() }, - - }; + email = macAddr; + server = "no-account-server"; + isAction = false; + } + else + { + email = acc.GetHashedEmail(); + server = acc.GetHashedServer(); + } + } - if (isAction) - properties.Add("type", "action"); + TrackEvent(email, server, eventName, customProperties, isAction); + } - if (customProperties != null) - properties = properties.Concat(customProperties).ToDictionary(kvp => kvp.Key, kvp => kvp.Value); + /// + /// Tracks an event from a specified account, anonymizes personal information + /// + /// Account to use, it will be anonymized + /// Name of the event + /// Additional parameters to pass to the event + /// True if it's an action performed by a logged user + public static void TrackEvent( + Account account, + Events eventName, + Dictionary customProperties = null, + bool isAction = true + ) + { + if (account == null) + TrackEvent(eventName, customProperties, isAction); + else + TrackEvent( + account.GetHashedEmail(), + account.GetHashedServer(), + eventName, + customProperties, + isAction + ); + } + /// + /// Tracks an event from a specified email and server, anonymizes personal information + /// + /// Email of the user anonymized + /// Server URL anonymized + /// Name of the event + /// Additional parameters to pass to the event + /// True if it's an action performed by a logged user + private static void TrackEvent( + string hashedEmail, + string hashedServer, + Events eventName, + Dictionary customProperties = null, + bool isAction = true + ) + { + LastEmail = hashedEmail; + LastServer = hashedServer; - string json = JsonConvert.SerializeObject(new - { - @event = eventName.ToString(), - properties - }); +#if DEBUG + //only track in prod + return; +#endif - var query = new StreamContent(new MemoryStream(Encoding.UTF8.GetBytes("data=" + HttpUtility.UrlEncode(json)))); - HttpClient client = Http.GetHttpProxyClient(); - client.DefaultRequestHeaders.Accept.Add(new MediaTypeWithQualityHeaderValue("text/plain")); - query.Headers.ContentType = new MediaTypeHeaderValue("application/json"); - client.PostAsync(MixpanelServer + "/track?ip=1", query); - } - catch (Exception ex) + Task.Run(() => + { + try + { + var properties = new Dictionary() { - SpeckleLog.Logger.ForContext("eventName", eventName.ToString()) - .ForContext("isAction", isAction) - .Warning(ex, "Analytics event failed {exceptionMessage}", ex.Message); - } - - }); - - } + { "distinct_id", hashedEmail }, + { "server_id", hashedServer }, + { "token", MixpanelToken }, + { "hostApp", Setup.HostApplication }, + { "hostAppVersion", Setup.VersionedHostApplication }, + { "core_version", Assembly.GetExecutingAssembly().GetName().Version.ToString() }, + { "$os", GetOs() } + }; + + if (isAction) + properties.Add("type", "action"); + + if (customProperties != null) + properties = properties + .Concat(customProperties) + .ToDictionary(kvp => kvp.Key, kvp => kvp.Value); + + string json = JsonConvert.SerializeObject( + new { @event = eventName.ToString(), properties } + ); + + var query = new StreamContent( + new MemoryStream(Encoding.UTF8.GetBytes("data=" + HttpUtility.UrlEncode(json))) + ); + HttpClient client = Http.GetHttpProxyClient(); + client.DefaultRequestHeaders.Accept.Add(new MediaTypeWithQualityHeaderValue("text/plain")); + query.Headers.ContentType = new MediaTypeHeaderValue("application/json"); + client.PostAsync(MixpanelServer + "/track?ip=1", query); + } + catch (Exception ex) + { + SpeckleLog.Logger + .ForContext("eventName", eventName.ToString()) + .ForContext("isAction", isAction) + .Warning(ex, "Analytics event failed {exceptionMessage}", ex.Message); + } + }); + } - internal static void AddConnectorToProfile(string hashedEmail, string connector) + internal static void AddConnectorToProfile(string hashedEmail, string connector) + { + Task.Run(() => { - Task.Run(() => + try { - try + var data = new Dictionary() { - var data = new Dictionary() + { "$token", MixpanelToken }, + { "$distinct_id", hashedEmail }, { - { "$token", MixpanelToken }, - { "$distinct_id", hashedEmail }, - { "$union", new Dictionary() + "$union", + new Dictionary() + { { - {"Connectors", new List{ connector } }, - } - }, - { "set", new Dictionary() - { - {"Identified", true }, + "Connectors", + new List { connector } } } - }; - string json = JsonConvert.SerializeObject(data); - - - var query = new StreamContent(new MemoryStream(Encoding.UTF8.GetBytes("data=" + HttpUtility.UrlEncode(json)))); - HttpClient client = Http.GetHttpProxyClient(); - client.DefaultRequestHeaders.Accept.Add(new MediaTypeWithQualityHeaderValue("text/plain")); - query.Headers.ContentType = new MediaTypeHeaderValue("application/json"); - client.PostAsync(MixpanelServer + "/engage#profile-union", query); - } - catch (Exception e) - { - // POKEMON: Gotta catch 'em all! - } - - }); - } - - - - - - private static string GetOs() - { - if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) return "Windows"; - if (RuntimeInformation.IsOSPlatform(OSPlatform.OSX)) return "Mac OS X"; - if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux)) return "Linux"; - return "Unknown"; - } + }, + { + "set", + new Dictionary() { { "Identified", true } } + } + }; + string json = JsonConvert.SerializeObject(data); + + var query = new StreamContent( + new MemoryStream(Encoding.UTF8.GetBytes("data=" + HttpUtility.UrlEncode(json))) + ); + HttpClient client = Http.GetHttpProxyClient(); + client.DefaultRequestHeaders.Accept.Add(new MediaTypeWithQualityHeaderValue("text/plain")); + query.Headers.ContentType = new MediaTypeHeaderValue("application/json"); + client.PostAsync(MixpanelServer + "/engage#profile-union", query); + } + catch (Exception e) + { + // POKEMON: Gotta catch 'em all! + } + }); + } + private static string GetOs() + { + if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) + return "Windows"; + if (RuntimeInformation.IsOSPlatform(OSPlatform.OSX)) + return "Mac OS X"; + if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux)) + return "Linux"; + return "Unknown"; } } diff --git a/Core/Core/Logging/Log.cs b/Core/Core/Logging/Log.cs index f73853ce2e..1a619025e0 100644 --- a/Core/Core/Logging/Log.cs +++ b/Core/Core/Logging/Log.cs @@ -1,4 +1,4 @@ -using System; +using System; using System.Collections.Generic; using System.Net.Http; using System.Reflection; @@ -6,107 +6,107 @@ using Sentry; using Speckle.Core.Credentials; -namespace Speckle.Core.Logging +namespace Speckle.Core.Logging; + +/// +/// Anonymous telemetry to help us understand how to make a better Speckle. +/// This really helps us to deliver a better open source project and product! +/// +public static class OldLog { + private static bool _initialized = false; + /// - /// Anonymous telemetry to help us understand how to make a better Speckle. - /// This really helps us to deliver a better open source project and product! + /// Initializes Sentry /// - public static class OldLog + public static void Initialize() { - private static bool _initialized = false; - - /// - /// Initializes Sentry - /// - public static void Initialize() + try { - try - { - if (_initialized) - return; + if (_initialized) + return; - var dsn = "https://f29ec716d14d4121bb2a71c4f3ef7786@o436188.ingest.sentry.io/5396846"; - var env = "production"; - var debug = false; + var dsn = "https://f29ec716d14d4121bb2a71c4f3ef7786@o436188.ingest.sentry.io/5396846"; + var env = "production"; + var debug = false; #if DEBUG - env = "dev"; - dsn = null; - debug = true; + env = "dev"; + dsn = null; + debug = true; #endif - SentrySdk.Init(o => - { - o.Dsn = dsn; - o.Environment = env; - o.Debug = debug; - o.Release = "SpeckleCore@" + Assembly.GetExecutingAssembly().GetName().Version.ToString(); - o.StackTraceMode = StackTraceMode.Enhanced; - o.AttachStacktrace = true; - o.AddExceptionFilterForType(); - o.AddExceptionFilterForType(); - }); - - var id = "unknown"; - - try - { - var da = AccountManager.GetDefaultAccount(); - if (da != null) - { - id = da.GetHashedEmail(); - } - } - catch (Exception ex) - { - } - + SentrySdk.Init(o => + { + o.Dsn = dsn; + o.Environment = env; + o.Debug = debug; + o.Release = "SpeckleCore@" + Assembly.GetExecutingAssembly().GetName().Version.ToString(); + o.StackTraceMode = StackTraceMode.Enhanced; + o.AttachStacktrace = true; + o.AddExceptionFilterForType(); + o.AddExceptionFilterForType(); + }); - SentrySdk.ConfigureScope(scope => - { - scope.User = new User { Id = id, }; - scope.SetTag("hostApplication", Setup.HostApplication); - }); + var id = "unknown"; - _initialized = true; - } - catch (Exception ex) + try { - //swallow + var da = AccountManager.GetDefaultAccount(); + if (da != null) + id = da.GetHashedEmail(); } - } + catch (Exception ex) { } + SentrySdk.ConfigureScope(scope => + { + scope.User = new User { Id = id }; + scope.SetTag("hostApplication", Setup.HostApplication); + }); - /// - /// Captures an Exception and makes sure Sentry is initialized - /// - /// - /// - /// - public static void CaptureException(Exception e, SentryLevel level = SentryLevel.Info, List> extra = null) + _initialized = true; + } + catch (Exception ex) { - Initialize(); + //swallow + } + } - //ignore infos as they're hogging us - if (level == SentryLevel.Info) - return; + /// + /// Captures an Exception and makes sure Sentry is initialized + /// + /// + /// + /// + public static void CaptureException( + Exception e, + SentryLevel level = SentryLevel.Info, + List> extra = null + ) + { + Initialize(); - SentrySdk.CaptureException(e, scope => + //ignore infos as they're hogging us + if (level == SentryLevel.Info) + return; + + SentrySdk.CaptureException( + e, + scope => { scope.Level = level; if (extra != null) scope.SetExtras(extra); - }); - } + } + ); + } - /// - /// Adds a Breadcrumb and makes sure Sentry is initialized - /// - /// - public static void AddBreadcrumb(string message) - { - Initialize(); - SentrySdk.AddBreadcrumb(message); - } + /// + /// Adds a Breadcrumb and makes sure Sentry is initialized + /// + /// + public static void AddBreadcrumb(string message) + { + Initialize(); + SentrySdk.AddBreadcrumb(message); } } diff --git a/Core/Core/Logging/Setup.cs b/Core/Core/Logging/Setup.cs index fe5e433b2e..faa1ac8c33 100644 --- a/Core/Core/Logging/Setup.cs +++ b/Core/Core/Logging/Setup.cs @@ -1,73 +1,69 @@ -using System; -using System.Collections.Generic; using System.Diagnostics; -using System.IO; -using System.Net; -using System.Text; using System.Threading; -using Speckle.Core.Api; using Speckle.Core.Credentials; using Speckle.Core.Kits; -namespace Speckle.Core.Logging +namespace Speckle.Core.Logging; + +/// +/// Anonymous telemetry to help us understand how to make a better Speckle. +/// This really helps us to deliver a better open source project and product! +/// +public static class Setup { - /// - /// Anonymous telemetry to help us understand how to make a better Speckle. - /// This really helps us to deliver a better open source project and product! - /// - public static class Setup - { - public static Mutex mutex; + public static Mutex mutex; - private static bool initialized = false; + private static bool initialized = false; - static Setup() + static Setup() + { + //Set fallback values + try { - //Set fallback values - try - { - HostApplication = Process.GetCurrentProcess().ProcessName; - } - catch - { - HostApplication = "other (.NET)"; - } + HostApplication = Process.GetCurrentProcess().ProcessName; } - - public static void Init(string versionedHostApplication, string hostApplication) + catch { - if (initialized) - return; - - initialized = true; + HostApplication = "other (.NET)"; + } + } - HostApplication = hostApplication; - VersionedHostApplication = versionedHostApplication; + /// + /// Set from the connectors, defines which current host application we're running on. + /// + internal static string HostApplication { get; private set; } - //start mutex so that Manager can detect if this process is running - mutex = new Mutex(false, "SpeckleConnector-" + hostApplication); + /// + /// Set from the connectors, defines which current host application we're running on - includes the version. + /// + internal static string VersionedHostApplication { get; private set; } = + HostApplications.Other.Slug; -#if !NETSTANDARD1_5_OR_GREATER - //needed by older .net frameworks, eg Revit 2019 - ServicePointManager.SecurityProtocol = SecurityProtocolType.Ssl3 | SecurityProtocolType.Tls | SecurityProtocolType.Tls11 | SecurityProtocolType.Tls12; -#endif + public static void Init(string versionedHostApplication, string hostApplication) + { + if (initialized) + return; + initialized = true; - SpeckleLog.Initialize(hostApplication, versionedHostApplication); + HostApplication = hostApplication; + VersionedHostApplication = versionedHostApplication; - foreach (var account in AccountManager.GetAccounts()) - Analytics.AddConnectorToProfile(account.GetHashedEmail(), hostApplication); - } + //start mutex so that Manager can detect if this process is running + mutex = new Mutex(false, "SpeckleConnector-" + hostApplication); - /// - /// Set from the connectors, defines which current host application we're running on. - /// - internal static string HostApplication { get; private set; } - /// - /// Set from the connectors, defines which current host application we're running on - includes the version. - /// - internal static string VersionedHostApplication { get; private set; } = HostApplications.Other.Slug; +#if !NETSTANDARD1_5_OR_GREATER + //needed by older .net frameworks, eg Revit 2019 + ServicePointManager.SecurityProtocol = + SecurityProtocolType.Ssl3 + | SecurityProtocolType.Tls + | SecurityProtocolType.Tls11 + | SecurityProtocolType.Tls12; +#endif + SpeckleLog.Initialize(hostApplication, versionedHostApplication); + foreach (var account in AccountManager.GetAccounts()) + Analytics.AddConnectorToProfile(account.GetHashedEmail(), hostApplication); } } diff --git a/Core/Core/Logging/SpeckleException.cs b/Core/Core/Logging/SpeckleException.cs index 03a1778b89..5715e91bd6 100644 --- a/Core/Core/Logging/SpeckleException.cs +++ b/Core/Core/Logging/SpeckleException.cs @@ -1,45 +1,46 @@ -#nullable enable +#nullable enable using System; using System.Collections.Generic; using System.Linq; using GraphQL; using Sentry; -namespace Speckle.Core.Logging +namespace Speckle.Core.Logging; + +public class SpeckleException : Exception { - public class SpeckleException : Exception + public SpeckleException() { } + + public SpeckleException(string message, Exception? inner = null) + : base(message, inner) { } + + [Obsolete("Use any other constructor")] + public SpeckleException(string message, bool log, SentryLevel level = SentryLevel.Info) + : base(message) { } + + public SpeckleException( + string message, + GraphQLError[] errors, + bool log = true, + SentryLevel level = SentryLevel.Info + ) + : base(message) { - public List> GraphQLErrors { get; set; } - - public SpeckleException() { } - - public SpeckleException(string message, Exception? inner = null) : base(message, inner) { } - - [Obsolete("Use any other constructor")] - public SpeckleException(string message, bool log, SentryLevel level = SentryLevel.Info) - : base(message) - { - } - - public SpeckleException( - string message, - GraphQLError[] errors, - bool log = true, - SentryLevel level = SentryLevel.Info - ) : base(message) - { - GraphQLErrors = errors - .Select(error => new KeyValuePair("error", error.Message)) - .ToList(); - } - - public SpeckleException( - string message, - Exception? inner, - bool log = true, - SentryLevel level = SentryLevel.Info - ) : base(message, inner) - { - } + GraphQLErrors = errors + .Select(error => new KeyValuePair("error", error.Message)) + .ToList(); } + + public SpeckleException( + string message, + Exception? inner, + bool log = true, + SentryLevel level = SentryLevel.Info + ) + : base(message, inner) { } + + public List> GraphQLErrors { get; set; } + + public SpeckleException(string message) + : base(message) { } } diff --git a/Core/Core/Logging/SpeckleLog.cs b/Core/Core/Logging/SpeckleLog.cs index c6d4b37c3c..ff63a114e7 100644 --- a/Core/Core/Logging/SpeckleLog.cs +++ b/Core/Core/Logging/SpeckleLog.cs @@ -1,4 +1,4 @@ -#nullable enable +#nullable enable using System; using System.Diagnostics; @@ -8,274 +8,277 @@ using Sentry; using Serilog; using Serilog.Context; +using Serilog.Core; using Serilog.Events; using Serilog.Exceptions; using Speckle.Core.Credentials; using Speckle.Core.Helpers; -namespace Speckle.Core.Logging +namespace Speckle.Core.Logging; + +/// +/// Configuration object for the Speckle logging system. +/// +public class SpeckleLogConfiguration { /// - /// Configuration object for the Speckle logging system. + /// Flag to enable enhanced log context. This adds the following enrich calls: + /// - WithClientAgent + /// - WithClientIp + /// - WithExceptionDetails + /// + public bool enhancedLogContext; + + /// + /// Flag to enable console sink + /// + public bool logToConsole; + + /// + /// Flag to enable File sink + /// + public bool logToFile; + + /// + /// Flag to enable Sentry sink + /// + public bool logToSentry; + + /// + /// Flag to enable Seq sink + /// + public bool logToSeq; + + /// + /// Log events bellow this level are silently dropped + /// + public LogEventLevel minimumLevel; + + /// + /// Flag to override the default Sentry DNS /// - public class SpeckleLogConfiguration + public string sentryDns = + "https://f29ec716d14d4121bb2a71c4f3ef7786@o436188.ingest.sentry.io/5396846"; + + /// + /// Default SpeckleLogConfiguration constructor. + /// These are the sane defaults we should be using across connectors. + /// + /// Log events bellow this level are silently dropped + /// Flag to enable console log sink + /// Flag to enable Seq log sink + /// Flag to enable Sentry log sink + /// Flag to enable File log sink + /// Flag to enable enhanced context on every log event + public SpeckleLogConfiguration( + LogEventLevel minimumLevel = LogEventLevel.Debug, + bool logToConsole = true, + bool logToSeq = true, + bool logToSentry = true, + bool logToFile = true, + bool enhancedLogContext = true + ) { - /// - /// Log events bellow this level are silently dropped - /// - public LogEventLevel minimumLevel; - - /// - /// Flag to enable console sink - /// - public bool logToConsole; - - /// - /// Flag to enable Seq sink - /// - public bool logToSeq; - - /// - /// Flag to enable Sentry sink - /// - public bool logToSentry; - - /// - /// Flag to override the default Sentry DNS - /// - public string sentryDns = "https://f29ec716d14d4121bb2a71c4f3ef7786@o436188.ingest.sentry.io/5396846"; - - /// - /// Flag to enable File sink - /// - public bool logToFile; - - /// - /// Flag to enable enhanced log context. This adds the following enrich calls: - /// - WithClientAgent - /// - WithClientIp - /// - WithExceptionDetails - /// - public bool enhancedLogContext; - - /// - /// Default SpeckleLogConfiguration constructor. - /// These are the sane defaults we should be using across connectors. - /// - /// Log events bellow this level are silently dropped - /// Flag to enable console log sink - /// Flag to enable Seq log sink - /// Flag to enable Sentry log sink - /// Flag to enable File log sink - /// Flag to enable enhanced context on every log event - public SpeckleLogConfiguration( - LogEventLevel minimumLevel = LogEventLevel.Debug, - bool logToConsole = true, - bool logToSeq = true, - bool logToSentry = true, - bool logToFile = true, - bool enhancedLogContext = true - ) - { - this.minimumLevel = minimumLevel; - this.logToConsole = logToConsole; - this.logToSeq = logToSeq; - this.logToSentry = logToSentry; - this.logToFile = logToFile; - this.enhancedLogContext = enhancedLogContext; - } + this.minimumLevel = minimumLevel; + this.logToConsole = logToConsole; + this.logToSeq = logToSeq; + this.logToSentry = logToSentry; + this.logToFile = logToFile; + this.enhancedLogContext = enhancedLogContext; } +} + +/// +/// Configurator class for a standardized logging system across Speckle (sharp). +/// +public static class SpeckleLog +{ + private static ILogger? _logger; + private static bool _initialized = false; + + public static ILogger Logger => + _logger + ?? throw new SpeckleException( + $"The logger has not been initialized. Please call {typeof(SpeckleLog).FullName}.{nameof(Initialize)}" + ); /// - /// Configurator class for a standardized logging system across Speckle (sharp). + /// Initialize logger configuration for a global Serilog.Log logger. /// - public static class SpeckleLog + public static void Initialize( + string hostApplicationName, + string? hostApplicationVersion, + SpeckleLogConfiguration? logConfiguration = null + ) { - private static ILogger? _logger; - - public static ILogger Logger => _logger ?? throw new SpeckleException( - $"The logger has not been initialized. Please call {typeof(SpeckleLog).FullName}.{nameof(Initialize)}"); - private static bool _initialized = false; - - /// - /// Initialize logger configuration for a global Serilog.Log logger. - /// - public static void Initialize( - string hostApplicationName, - string? hostApplicationVersion, - SpeckleLogConfiguration? logConfiguration = null - ) - { - if (_initialized) - return; + if (_initialized) + return; - logConfiguration ??= new SpeckleLogConfiguration(); + logConfiguration ??= new SpeckleLogConfiguration(); - _logger = CreateConfiguredLogger( - hostApplicationName, - hostApplicationVersion, - logConfiguration - ); - Log.Logger = Logger; + _logger = CreateConfiguredLogger(hostApplicationName, hostApplicationVersion, logConfiguration); + Log.Logger = Logger; - _addUserIdToGlobalContextFromDefaultAccount(); - _addVersionInfoToGlobalContext(); - _addHostOsInfoToGlobalContext(); - _addHostApplicationDataToGlobalContext(hostApplicationName, hostApplicationVersion); + _addUserIdToGlobalContextFromDefaultAccount(); + _addVersionInfoToGlobalContext(); + _addHostOsInfoToGlobalContext(); + _addHostApplicationDataToGlobalContext(hostApplicationName, hostApplicationVersion); - Logger.ForContext("userApplicationDataPath", SpecklePathProvider.UserApplicationDataPath()) - .ForContext("installApplicationDataPath", SpecklePathProvider.InstallApplicationDataPath) - .ForContext("speckleLogConfiguration", logConfiguration) - .Information( - "Initialized logger inside {hostApplication}/{productVersion}/{version} for user {id}. Path info {userApplicationDataPath} {installApplicationDataPath}." - ); + Logger + .ForContext("userApplicationDataPath", SpecklePathProvider.UserApplicationDataPath()) + .ForContext("installApplicationDataPath", SpecklePathProvider.InstallApplicationDataPath) + .ForContext("speckleLogConfiguration", logConfiguration) + .Information( + "Initialized logger inside {hostApplication}/{productVersion}/{version} for user {id}. Path info {userApplicationDataPath} {installApplicationDataPath}." + ); - _initialized = true; - } + _initialized = true; + } - /// - /// Create a new fully configured Logger instance. - /// - /// Name of the application using this SDK ie.: "Rhino" - /// Public version slug of the application using this SDK ie.: "2023" - /// Input configuration object. - /// Logger instance - public static Serilog.Core.Logger CreateConfiguredLogger( - string hostApplicationName, - string? hostApplicationVersion, - SpeckleLogConfiguration logConfiguration - ) - { - // TODO: check if we have write permissions to the file. - // if not, disable file sink, even if its enabled in the config - // show a warning about that... - var canLogToFile = true; - var logFilePath = Path.Combine( - SpecklePathProvider.LogFolderPath(hostApplicationName, hostApplicationVersion), - "SpeckleCoreLog.txt" + /// + /// Create a new fully configured Logger instance. + /// + /// Name of the application using this SDK ie.: "Rhino" + /// Public version slug of the application using this SDK ie.: "2023" + /// Input configuration object. + /// Logger instance + public static Logger CreateConfiguredLogger( + string hostApplicationName, + string? hostApplicationVersion, + SpeckleLogConfiguration logConfiguration + ) + { + // TODO: check if we have write permissions to the file. + // if not, disable file sink, even if its enabled in the config + // show a warning about that... + var canLogToFile = true; + var logFilePath = Path.Combine( + SpecklePathProvider.LogFolderPath(hostApplicationName, hostApplicationVersion), + "SpeckleCoreLog.txt" + ); + var serilogLogConfiguration = new LoggerConfiguration().MinimumLevel + .Is(logConfiguration.minimumLevel) + .Enrich.FromLogContext() + .Enrich.FromGlobalLogContext(); + + if (logConfiguration.enhancedLogContext) + serilogLogConfiguration = serilogLogConfiguration.Enrich + .WithClientAgent() + .Enrich.WithClientIp() + .Enrich.WithExceptionDetails(); + + if (logConfiguration.logToFile && canLogToFile) + serilogLogConfiguration = serilogLogConfiguration.WriteTo.File( + logFilePath, + rollingInterval: RollingInterval.Day, + retainedFileCountLimit: 10 ); - var serilogLogConfiguration = new LoggerConfiguration().MinimumLevel - .Is(logConfiguration.minimumLevel) - .Enrich.FromLogContext() - .Enrich.FromGlobalLogContext(); - - if (logConfiguration.enhancedLogContext) - serilogLogConfiguration = serilogLogConfiguration.Enrich.WithClientAgent() - .Enrich.WithClientIp() - .Enrich.WithExceptionDetails(); - - - if (logConfiguration.logToFile && canLogToFile) - serilogLogConfiguration = serilogLogConfiguration.WriteTo.File( - logFilePath, - rollingInterval: RollingInterval.Day, - retainedFileCountLimit: 10 - ); - - if (logConfiguration.logToConsole) - serilogLogConfiguration = serilogLogConfiguration.WriteTo.Console(); - - if (logConfiguration.logToSeq) - serilogLogConfiguration = serilogLogConfiguration.WriteTo.Seq( - "https://seq.speckle.systems", - apiKey: "agZqxG4jQELxQQXh0iZQ" - ); - - if (logConfiguration.logToSentry) - { - var env = "production"; -#if DEBUG - env = "dev"; -#endif + if (logConfiguration.logToConsole) + serilogLogConfiguration = serilogLogConfiguration.WriteTo.Console(); - serilogLogConfiguration = serilogLogConfiguration.WriteTo.Sentry(o => - { - o.Dsn = logConfiguration.sentryDns; - o.Debug = false; - o.Environment = env; - // Set traces_sample_rate to 1.0 to capture 100% of transactions for performance monitoring. - // We recommend adjusting this value in production. - o.TracesSampleRate = 1.0; - // Enable Global Mode if running in a client app - o.IsGlobalModeEnabled = true; - // Debug and higher are stored as breadcrumbs (default is Information) - o.MinimumBreadcrumbLevel = LogEventLevel.Debug; - // Warning and higher is sent as event (default is Error) - o.MinimumEventLevel = LogEventLevel.Error; - }); - } - - - var logger = serilogLogConfiguration.CreateLogger(); - if (logConfiguration.logToFile && !canLogToFile) - logger.Warning("Log to file is enabled, but cannot write to {LogFilePath}", logFilePath); - return logger; - } + if (logConfiguration.logToSeq) + serilogLogConfiguration = serilogLogConfiguration.WriteTo.Seq( + "https://seq.speckle.systems", + apiKey: "agZqxG4jQELxQQXh0iZQ" + ); - private static void _addUserIdToGlobalContextFromDefaultAccount() + if (logConfiguration.logToSentry) { - var machineName = Environment.MachineName; - var userName = Environment.UserName; - var id = Crypt.Hash($"{machineName}:{userName}"); - try - { - var defaultAccount = AccountManager.GetDefaultAccount(); - if (defaultAccount != null) - id = defaultAccount.GetHashedEmail(); - } - catch (Exception ex) - { - Logger.Warning(ex, "Cannot set user id for the global log context."); - } - GlobalLogContext.PushProperty("id", id); + var env = "production"; + +#if DEBUG + env = "dev"; +#endif - SentrySdk.ConfigureScope(scope => + serilogLogConfiguration = serilogLogConfiguration.WriteTo.Sentry(o => { - scope.User = new User { Id = id, }; + o.Dsn = logConfiguration.sentryDns; + o.Debug = false; + o.Environment = env; + // Set traces_sample_rate to 1.0 to capture 100% of transactions for performance monitoring. + // We recommend adjusting this value in production. + o.TracesSampleRate = 1.0; + // Enable Global Mode if running in a client app + o.IsGlobalModeEnabled = true; + // Debug and higher are stored as breadcrumbs (default is Information) + o.MinimumBreadcrumbLevel = LogEventLevel.Debug; + // Warning and higher is sent as event (default is Error) + o.MinimumEventLevel = LogEventLevel.Error; }); } - private static void _addVersionInfoToGlobalContext() - { - var assembly = Assembly.GetExecutingAssembly().Location; - var fileVersionInfo = FileVersionInfo.GetVersionInfo(assembly); - - GlobalLogContext.PushProperty("version", fileVersionInfo.FileVersion); - GlobalLogContext.PushProperty("productVersion", fileVersionInfo.ProductVersion); - } + var logger = serilogLogConfiguration.CreateLogger(); + if (logConfiguration.logToFile && !canLogToFile) + logger.Warning("Log to file is enabled, but cannot write to {LogFilePath}", logFilePath); + return logger; + } - private static string _deterimineHostOsSlug() + private static void _addUserIdToGlobalContextFromDefaultAccount() + { + var machineName = Environment.MachineName; + var userName = Environment.UserName; + var id = Crypt.Hash($"{machineName}:{userName}"); + try { - if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) return "Windows"; - if (RuntimeInformation.IsOSPlatform(OSPlatform.OSX)) return "MacOS"; - if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux)) return "Linux"; - return RuntimeInformation.OSDescription; + var defaultAccount = AccountManager.GetDefaultAccount(); + if (defaultAccount != null) + id = defaultAccount.GetHashedEmail(); } - private static void _addHostOsInfoToGlobalContext() + catch (Exception ex) { - - var osVersion = Environment.OSVersion; - var osArchitecture = RuntimeInformation.ProcessArchitecture.ToString(); - GlobalLogContext.PushProperty("hostOs", _deterimineHostOsSlug()); - GlobalLogContext.PushProperty("hostOsVersion", osVersion); - GlobalLogContext.PushProperty("hostOsArchitecture", osArchitecture); + Logger.Warning(ex, "Cannot set user id for the global log context."); } + GlobalLogContext.PushProperty("id", id); - private static void _addHostApplicationDataToGlobalContext( - string hostApplicationName, - string? hostApplicationVersion - ) + SentrySdk.ConfigureScope(scope => { - GlobalLogContext.PushProperty( - "hostApplication", - $"{hostApplicationName}{hostApplicationVersion ?? ""}" - ); + scope.User = new User { Id = id }; + }); + } - SentrySdk.ConfigureScope(scope => - { - scope.SetTag("hostApplication", hostApplicationName); - }); - } + private static void _addVersionInfoToGlobalContext() + { + var assembly = Assembly.GetExecutingAssembly().Location; + var fileVersionInfo = FileVersionInfo.GetVersionInfo(assembly); + + GlobalLogContext.PushProperty("version", fileVersionInfo.FileVersion); + GlobalLogContext.PushProperty("productVersion", fileVersionInfo.ProductVersion); + } + + private static string _deterimineHostOsSlug() + { + if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) + return "Windows"; + if (RuntimeInformation.IsOSPlatform(OSPlatform.OSX)) + return "MacOS"; + if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux)) + return "Linux"; + return RuntimeInformation.OSDescription; + } + + private static void _addHostOsInfoToGlobalContext() + { + var osVersion = Environment.OSVersion; + var osArchitecture = RuntimeInformation.ProcessArchitecture.ToString(); + GlobalLogContext.PushProperty("hostOs", _deterimineHostOsSlug()); + GlobalLogContext.PushProperty("hostOsVersion", osVersion); + GlobalLogContext.PushProperty("hostOsArchitecture", osArchitecture); + } + + private static void _addHostApplicationDataToGlobalContext( + string hostApplicationName, + string? hostApplicationVersion + ) + { + GlobalLogContext.PushProperty( + "hostApplication", + $"{hostApplicationName}{hostApplicationVersion ?? ""}" + ); + + SentrySdk.ConfigureScope(scope => + { + scope.SetTag("hostApplication", hostApplicationName); + }); } } diff --git a/Core/Core/Models/Attributes.cs b/Core/Core/Models/Attributes.cs index 1450feb724..c606ff0d31 100644 --- a/Core/Core/Models/Attributes.cs +++ b/Core/Core/Models/Attributes.cs @@ -1,47 +1,47 @@ -using System; +using System; -namespace Speckle.Core.Models -{ +namespace Speckle.Core.Models; +/// +/// Flags an object's property as being detachable. +/// If set to true the default serialiser will persist it separately, and add a reference to the property's value in the original object. +/// Only applies to properties of types derived from the Base class. +/// +public sealed class DetachProperty : Attribute +{ /// /// Flags an object's property as being detachable. /// If set to true the default serialiser will persist it separately, and add a reference to the property's value in the original object. /// Only applies to properties of types derived from the Base class. /// - public class DetachProperty : Attribute + /// Wether to detach the property or not. + public DetachProperty(bool _detachable) { - public bool Detachable { get; set; } = true; - - /// - /// Flags an object's property as being detachable. - /// If set to true the default serialiser will persist it separately, and add a reference to the property's value in the original object. - /// Only applies to properties of types derived from the Base class. - /// - /// Wether to detach the property or not. - public DetachProperty(bool _detachable) - { - Detachable = _detachable; - } - - public DetachProperty() - { - Detachable = true; - } + Detachable = _detachable; } - /// - /// Flags a list or array as splittable into chunks during serialisation. These chunks will be recomposed on deserialisation into the original list. Note: this attribute should be used in conjunction with . - /// Use this attribute on properties that can become very long and are not worth detaching into individual elements. - /// Objects per chunk: for simple types, like numbers, use a high value (>10000); for other objects, use a more conservative number depending on their serialised size. - /// - [AttributeUsage(AttributeTargets.Property)] - public class Chunkable : Attribute + public DetachProperty() { - public int MaxObjCountPerChunk { get; set; } + Detachable = true; + } + + public bool Detachable { get; set; } = true; + public bool _detachable { get; } +} - public Chunkable(int ObjectsPerChunk = 1000) - { - MaxObjCountPerChunk = ObjectsPerChunk; - } +/// +/// Flags a list or array as splittable into chunks during serialisation. These chunks will be recomposed on deserialisation into the original list. Note: this attribute should be used in conjunction with . +/// Use this attribute on properties that can become very long and are not worth detaching into individual elements. +/// Objects per chunk: for simple types, like numbers, use a high value (>10000); for other objects, use a more conservative number depending on their serialised size. +/// +[AttributeUsage(AttributeTargets.Property)] +public sealed class Chunkable : Attribute +{ + public Chunkable(int ObjectsPerChunk = 1000) + { + MaxObjCountPerChunk = ObjectsPerChunk; } + + public int MaxObjCountPerChunk { get; set; } + public int ObjectsPerChunk { get; } } diff --git a/Core/Core/Models/Base.cs b/Core/Core/Models/Base.cs index ceffcb3022..3363740963 100644 --- a/Core/Core/Models/Base.cs +++ b/Core/Core/Models/Base.cs @@ -1,4 +1,4 @@ -using System; +using System; using System.Collections; using System.Collections.Generic; using System.IO; @@ -8,330 +8,322 @@ using System.Text.RegularExpressions; using Speckle.Core.Api; using Speckle.Core.Kits; +using Speckle.Core.Serialisation; using Speckle.Core.Transports; using Speckle.Newtonsoft.Json; using Speckle.Newtonsoft.Json.Linq; -namespace Speckle.Core.Models +namespace Speckle.Core.Models; + +/// +/// Base class for all Speckle object definitions. Provides unified hashing, type extraction and serialisation. +/// When developing a speckle kit, use this class as a parent class. +/// Dynamic properties naming conventions: +/// 👉 "__" at the start of a property means it will be ignored, both for hashing and serialisation (e.g., "__ignoreMe"). +/// 👉 "@" at the start of a property name means it will be detached (when serialised with a transport) (e.g.((dynamic)obj)["@meshEquivalent"] = ...) . +/// +[Serializable] +public class Base : DynamicBase { + private static readonly Regex ChunkSyntax = new(@"^@\((\d*)\)"); //TODO: this same regex is duplicated a few times across the code base, we could consolidate them + + private string __type; /// - /// Base class for all Speckle object definitions. Provides unified hashing, type extraction and serialisation. - /// When developing a speckle kit, use this class as a parent class. - /// Dynamic properties naming conventions: - /// 👉 "__" at the start of a property means it will be ignored, both for hashing and serialisation (e.g., "__ignoreMe"). - /// 👉 "@" at the start of a property name means it will be detached (when serialised with a transport) (e.g.((dynamic)obj)["@meshEquivalent"] = ...) . + /// A speckle object's id is an unique hash based on its properties. NOTE: this field will be null unless the object was deserialised from a source. Use the function to get it. /// - [Serializable] - public class Base : DynamicBase - { - /// - /// A speckle object's id is an unique hash based on its properties. NOTE: this field will be null unless the object was deserialised from a source. Use the function to get it. - /// - [SchemaIgnore] - public virtual string id - { - get; set; - } + [SchemaIgnore] + public virtual string id { get; set; } - /// - /// Gets the id (a unique hash) of this object. ⚠️ This method fully serializes the object, which in the case of large objects (with many sub-objects), has a tangible cost. Avoid using it! - /// Hint: Objects that are retrieved/pulled from a server/local cache do have an id (hash) property pre-populated. - /// Note:The hash of a decomposed object differs from the hash of a non-decomposed object. - /// - /// If true, will decompose the object in the process of hashing. - /// - public string GetId(bool decompose = false, SerializerVersion serializerVersion = SerializerVersion.V2) + /// + /// This property will only be populated if the object is retreieved from storage. Use otherwise. + /// + [SchemaIgnore] + public virtual long totalChildrenCount { get; set; } + + /// + /// Secondary, ideally host application driven, object identifier. + /// + [SchemaIgnore] + public string applicationId { get; set; } + + /// + /// Holds the type information of this speckle object, derived automatically + /// from its assembly name and inheritance. + /// + [SchemaIgnore] + public virtual string speckle_type + { + get { - if (serializerVersion == SerializerVersion.V1) + if (__type == null) { - var (s, t) = Operations.GetSerializerInstance(); - if (decompose) + List bases = new(); + Type myType = GetType(); + + while (myType.Name != nameof(Base)) { - s.WriteTransports = new List() { new MemoryTransport() }; + if (!myType.IsAbstract) + bases.Add(myType.FullName); + myType = myType.BaseType; } - var obj = JsonConvert.SerializeObject(this, t); - return JObject.Parse(obj).GetValue("id").ToString(); - } - else - { - var s = new Serialisation.BaseObjectSerializerV2(); - if (decompose) + + if (bases.Count == 0) + { + __type = nameof(Base); + } + else { - s.WriteTransports = new List() { new MemoryTransport() }; + bases.Reverse(); + __type = string.Join(":", bases); } - var obj = s.Serialize(this); - return JObject.Parse(obj).GetValue("id").ToString(); } + return __type; } + } - /// - /// Attempts to count the total number of detachable objects. - /// - /// The total count of the detachable children + 1 (itself). - public long GetTotalChildrenCount() + /// + /// Gets the id (a unique hash) of this object. ⚠️ This method fully serializes the object, which in the case of large objects (with many sub-objects), has a tangible cost. Avoid using it! + /// Hint: Objects that are retrieved/pulled from a server/local cache do have an id (hash) property pre-populated. + /// Note:The hash of a decomposed object differs from the hash of a non-decomposed object. + /// + /// If true, will decompose the object in the process of hashing. + /// + public string GetId( + bool decompose = false, + SerializerVersion serializerVersion = SerializerVersion.V2 + ) + { + if (serializerVersion == SerializerVersion.V1) { - var parsed = new HashSet(); - return 1 + CountDescendants(this, parsed); + var (s, t) = Operations.GetSerializerInstance(); + if (decompose) + s.WriteTransports = new List() { new MemoryTransport() }; + var obj = JsonConvert.SerializeObject(this, t); + return JObject.Parse(obj).GetValue(nameof(id)).ToString(); } - - private static readonly Regex ChunkSyntax = new Regex(@"^@\((\d*)\)"); //TODO: this same regex is duplicated a few times across the code base, we could consolidate them - private static long CountDescendants(Base @base, HashSet parsed) + else { - if (parsed.Contains(@base.GetHashCode())) - { - return 0; - } + var s = new BaseObjectSerializerV2(); + if (decompose) + s.WriteTransports = new List() { new MemoryTransport() }; + var obj = s.Serialize(this); + return JObject.Parse(obj).GetValue(nameof(id)).ToString(); + } + } + + /// + /// Attempts to count the total number of detachable objects. + /// + /// The total count of the detachable children + 1 (itself). + public long GetTotalChildrenCount() + { + var parsed = new HashSet(); + return 1 + CountDescendants(this, parsed); + } - parsed.Add(@base.GetHashCode()); + private static long CountDescendants(Base @base, HashSet parsed) + { + if (parsed.Contains(@base.GetHashCode())) + return 0; - long count = 0; - var typedProps = @base.GetInstanceMembers(); - foreach (var prop in typedProps.Where(p => p.CanRead)) - { - bool isIgnored = prop.IsDefined(typeof(ObsoleteAttribute), true) || prop.IsDefined(typeof(JsonIgnoreAttribute), true); - if (isIgnored) continue; + parsed.Add(@base.GetHashCode()); - var detachAttribute = prop.GetCustomAttribute(true); - var chunkAttribute = prop.GetCustomAttribute(true); + long count = 0; + var typedProps = @base.GetInstanceMembers(); + foreach (var prop in typedProps.Where(p => p.CanRead)) + { + bool isIgnored = + prop.IsDefined(typeof(ObsoleteAttribute), true) + || prop.IsDefined(typeof(JsonIgnoreAttribute), true); + if (isIgnored) + continue; - object value = prop.GetValue(@base); + var detachAttribute = prop.GetCustomAttribute(true); + var chunkAttribute = prop.GetCustomAttribute(true); - if (detachAttribute != null && detachAttribute.Detachable && chunkAttribute == null) - { - count += HandleObjectCount(value, parsed); - } - else if (detachAttribute != null && detachAttribute.Detachable && chunkAttribute != null) - { - // Simplified chunking count handling. - var asList = value as IList; - if (asList != null) - { - count += asList.Count / chunkAttribute.MaxObjCountPerChunk; - continue; - } - } - } + object value = prop.GetValue(@base); - var dynamicProps = @base.GetDynamicMembers(); - foreach (var propName in dynamicProps) + if (detachAttribute != null && detachAttribute.Detachable && chunkAttribute == null) { - if (!propName.StartsWith("@")) + count += HandleObjectCount(value, parsed); + } + else if (detachAttribute != null && detachAttribute.Detachable && chunkAttribute != null) + { + // Simplified chunking count handling. + var asList = value as IList; + if (asList != null) { + count += asList.Count / chunkAttribute.MaxObjCountPerChunk; continue; } - - // Simplfied dynamic prop chunking handling - if (ChunkSyntax.IsMatch(propName)) - { - int chunkSize = -1; - var match = ChunkSyntax.Match(propName); - int.TryParse(match.Groups[match.Groups.Count - 1].Value, out chunkSize); - - if (chunkSize != -1 && @base[propName] is IList asList) - { - count += asList.Count / chunkSize; - continue; - } - } - - count += HandleObjectCount(@base[propName], parsed); } - - return count; } - private static long HandleObjectCount(object value, HashSet parsed) + var dynamicProps = @base.GetDynamicMembers(); + foreach (var propName in dynamicProps) { - long count = 0; - switch (value) - { - case Base b: - count++; - count += CountDescendants(b, parsed); - return count; - case IDictionary d: - { - foreach (DictionaryEntry kvp in d) - { - if (kvp.Value is Base) - { - count++; - count += CountDescendants(kvp.Value as Base, parsed); - } - else - { - count += HandleObjectCount(kvp.Value, parsed); - } - } - return count; - } - case IEnumerable e when !(value is string): - { - foreach (var arrValue in e) - { - if (arrValue is Base) - { - count++; - count += CountDescendants(arrValue as Base, parsed); - } - else - { - count += HandleObjectCount(arrValue, parsed); - } - } - - return count; - } - default: - return count; - } - } + if (!propName.StartsWith("@")) + continue; - /// - /// Creates a shallow copy of the current base object. - /// This operation does NOT copy/duplicate the data inside each prop. - /// The new object's property values will be pointers to the original object's property value. - /// - /// A shallow copy of the original object. - public Base ShallowCopy() - { - var myDuplicate = (Base)Activator.CreateInstance(GetType()); - myDuplicate.id = id; - myDuplicate.applicationId = applicationId; - - foreach (var kvp in GetMembers( - DynamicBaseMemberType.Instance - | DynamicBaseMemberType.Dynamic - | DynamicBaseMemberType.SchemaIgnored) - ) + // Simplfied dynamic prop chunking handling + if (ChunkSyntax.IsMatch(propName)) { - var p = GetType().GetProperty(kvp.Key); - if (p != null && !p.CanWrite) - { - continue; - } + int chunkSize = -1; + var match = ChunkSyntax.Match(propName); + int.TryParse(match.Groups[match.Groups.Count - 1].Value, out chunkSize); - try - { - myDuplicate[kvp.Key] = kvp.Value; - } - catch + if (chunkSize != -1 && @base[propName] is IList asList) { - // avoids any last ditch unsettable or strange props. + count += asList.Count / chunkSize; + continue; } } - return myDuplicate; + count += HandleObjectCount(@base[propName], parsed); } - /// - /// This property will only be populated if the object is retreieved from storage. Use otherwise. - /// - [SchemaIgnore] - public virtual long totalChildrenCount { get; set; } - - /// - /// Secondary, ideally host application driven, object identifier. - /// - [SchemaIgnore] - public string applicationId { get; set; } - - - private string __type; + return count; + } - /// - /// Holds the type information of this speckle object, derived automatically - /// from its assembly name and inheritance. - /// - [SchemaIgnore] - public virtual string speckle_type + private static long HandleObjectCount(object value, HashSet parsed) + { + long count = 0; + switch (value) { - get + case Base b: + count++; + count += CountDescendants(b, parsed); + return count; + case IDictionary d: { - if (__type == null) - { - List bases = new List(); - Type myType = GetType(); - - while (myType.Name != nameof(Base)) + foreach (DictionaryEntry kvp in d) + if (kvp.Value is Base) + { + count++; + count += CountDescendants(kvp.Value as Base, parsed); + } + else { - if (!myType.IsAbstract) - { - bases.Add(myType.FullName); - } - myType = myType.BaseType; + count += HandleObjectCount(kvp.Value, parsed); } - if (bases.Count == 0) + return count; + } + case IEnumerable e when !(value is string): + { + foreach (var arrValue in e) + if (arrValue is Base) { - __type = nameof(Base); + count++; + count += CountDescendants(arrValue as Base, parsed); } else { - bases.Reverse(); - __type = string.Join(":", bases); + count += HandleObjectCount(arrValue, parsed); } - } - return __type; + + return count; } + default: + return count; } } - - public class Blob : Base + /// + /// Creates a shallow copy of the current base object. + /// This operation does NOT copy/duplicate the data inside each prop. + /// The new object's property values will be pointers to the original object's property value. + /// + /// A shallow copy of the original object. + public Base ShallowCopy() { - private string _hash; - private bool hashExpired = true; - - private string _filePath; - public string filePath + var myDuplicate = (Base)Activator.CreateInstance(GetType()); + myDuplicate.id = id; + myDuplicate.applicationId = applicationId; + + foreach ( + var kvp in GetMembers( + DynamicBaseMemberType.Instance + | DynamicBaseMemberType.Dynamic + | DynamicBaseMemberType.SchemaIgnored + ) + ) { - get => _filePath; - set + var p = GetType().GetProperty(kvp.Key); + if (p != null && !p.CanWrite) + continue; + + try + { + myDuplicate[kvp.Key] = kvp.Value; + } + catch { - if (originalPath is null) originalPath = value; - _filePath = value; - hashExpired = true; + // avoids any last ditch unsettable or strange props. } } - public string originalPath { get; set; } - public Blob() { } + return myDuplicate; + } +} - public Blob(string filePath) - { - this.filePath = filePath; - } +public class Blob : Base +{ + [JsonIgnore] + public static int LocalHashPrefixLength = 20; - /// - /// For blobs, the id is the same as the file hash. Please note, when deserialising, the id will be set from the original hash generated on sending. - /// - public override string id { get => GetFileHash(); set => base.id = value; } + private string _filePath; + private string _hash; + private bool hashExpired = true; - public string GetFileHash() - { - if ((hashExpired || _hash == null) && filePath != null) - { - _hash = Utilities.hashFile(filePath); - } + public Blob() { } - return _hash; - } + public Blob(string filePath) + { + this.filePath = filePath; + } - [OnDeserialized] - internal void OnDeserialized(StreamingContext context) + public string filePath + { + get => _filePath; + set { - hashExpired = false; + if (originalPath is null) + originalPath = value; + _filePath = value; + hashExpired = true; } + } - public string getLocalDestinationPath(string blobStorageFolder) - { - var fileName = Path.GetFileName(filePath); - return Path.Combine(blobStorageFolder, $"{id.Substring(0, 10)}-{fileName}"); - } + public string originalPath { get; set; } + + /// + /// For blobs, the id is the same as the file hash. Please note, when deserialising, the id will be set from the original hash generated on sending. + /// + public override string id + { + get => GetFileHash(); + set => base.id = value; + } + + public string GetFileHash() + { + if ((hashExpired || _hash == null) && filePath != null) + _hash = Utilities.hashFile(filePath); + + return _hash; + } - [JsonIgnore] - public static int LocalHashPrefixLength = 20; + [OnDeserialized] + internal void OnDeserialized(StreamingContext context) + { + hashExpired = false; + } + + public string getLocalDestinationPath(string blobStorageFolder) + { + var fileName = Path.GetFileName(filePath); + return Path.Combine(blobStorageFolder, $"{id.Substring(0, 10)}-{fileName}"); } } diff --git a/Core/Core/Models/DynamicBase.cs b/Core/Core/Models/DynamicBase.cs index 669922434e..e61a354c86 100644 --- a/Core/Core/Models/DynamicBase.cs +++ b/Core/Core/Models/DynamicBase.cs @@ -1,296 +1,314 @@ -using System; +using System; using System.Collections.Generic; using System.Dynamic; using System.Linq; using System.Reflection; using System.Text.RegularExpressions; -using Serilog; using Speckle.Core.Kits; using Speckle.Core.Logging; -namespace Speckle.Core.Models -{ +namespace Speckle.Core.Models; +/// +/// Base class implementing a bunch of nice dynamic object methods, like adding and removing props dynamically. Makes c# feel like json. +/// Orginally adapted from Rick Strahl 🤘 +/// https://weblog.west-wind.com/posts/2012/feb/08/creating-a-dynamic-extensible-c-expando-object +/// +public class DynamicBase : DynamicObject, IDynamicMetaObjectProvider +{ /// - /// Base class implementing a bunch of nice dynamic object methods, like adding and removing props dynamically. Makes c# feel like json. - /// Orginally adapted from Rick Strahl 🤘 - /// https://weblog.west-wind.com/posts/2012/feb/08/creating-a-dynamic-extensible-c-expando-object + /// Default value for /// - public class DynamicBase : DynamicObject, IDynamicMetaObjectProvider - { - /// - /// The actual property bag, where dynamically added props are stored. - /// - private Dictionary properties = new Dictionary(); + public const DynamicBaseMemberType DefaultIncludeMembers = + DynamicBaseMemberType.Instance | DynamicBaseMemberType.Dynamic; - public DynamicBase() - { + // Rule for multiple leading @. + private static Regex manyLeadingAtChars = new(@"^@{2,}"); - } + // Rule for invalid chars. + private static Regex invalidChars = new(@"[\.\/]"); - /// - /// Gets properties via the dot syntax. - ///
((dynamic)myObject).superProperty;
- ///
- /// - /// - /// - public override bool TryGetMember(GetMemberBinder binder, out object result) - { - return (properties.TryGetValue(binder.Name, out result)); - } + private static Dictionary> propInfoCache = new(); - /// - /// Sets properties via the dot syntax. - ///
((dynamic)myObject).superProperty = something;
- ///
- /// - /// - /// - public override bool TrySetMember(SetMemberBinder binder, object value) - { - var valid = IsPropNameValid(binder.Name, out _); - if (valid) - properties[binder.Name] = value; - return valid; - } + /// + /// The actual property bag, where dynamically added props are stored. + /// + private Dictionary properties = new(); - // Rule for multiple leading @. - private static Regex manyLeadingAtChars = new Regex(@"^@{2,}"); - // Rule for invalid chars. - private static Regex invalidChars = new Regex(@"[\.\/]"); - public bool IsPropNameValid(string name, out string reason) + public DynamicBase() { } + + /// + /// Sets and gets properties using the key accessor pattern. E.g.: + ///
((dynamic)myObject)["superProperty"] = 42;
+ ///
+ /// + /// + [IgnoreTheItem] + public object this[string key] + { + get { - // Existing members - //var members = GetInstanceMembersNames(); + if (properties.ContainsKey(key)) + return properties[key]; - // TODO: Check for detached/non-detached duplicate names? i.e: '@something' vs 'something' - // TODO: Instance members will not be overwritten, this may cause issues. - var checks = new List<(bool, string)> - { - (!(string.IsNullOrEmpty(name) || name == "@"), "Found empty prop name"), - // Checks for multiple leading @ - (!manyLeadingAtChars.IsMatch(name), "Only one leading '@' char is allowed. This signals the property value should be detached."), - // Checks for invalid chars - (!invalidChars.IsMatch(name), $"Prop with name '{name}' contains invalid characters. The following characters are not allowed: ./"), - // Checks if you are trying to change a member property - //(!members.Contains(name), "Modifying the value of instance member properties is not allowed.") - }; - - var r = ""; - // Prop name is valid if none of the checks are true - var isValid = checks.TrueForAll(v => - { - if (!v.Item1) r = v.Item2; - return v.Item1; - }); + PopulatePropInfoCache(GetType()); + var prop = propInfoCache[GetType()].FirstOrDefault(p => p.Name == key); - reason = r; - return isValid; - } + if (prop == null) + return null; - /// - /// Sets and gets properties using the key accessor pattern. E.g.: - ///
((dynamic)myObject)["superProperty"] = 42;
- ///
- /// - /// - [IgnoreTheItem] - public object this[string key] + return prop.GetValue(this); + } + set { - get - { - if (properties.ContainsKey(key)) - return properties[key]; + if (!IsPropNameValid(key, out string reason)) + throw new InvalidPropNameException(key, reason); - PopulatePropInfoCache(GetType()); - var prop = propInfoCache[GetType()].FirstOrDefault(p => p.Name == key); + if (properties.ContainsKey(key)) + { + properties[key] = value; + return; + } - if (prop == null) - return null; + PopulatePropInfoCache(GetType()); + var prop = propInfoCache[GetType()].FirstOrDefault(p => p.Name == key); - return prop.GetValue(this); + if (prop == null) + { + properties[key] = value; + return; } - set + try { - if (!IsPropNameValid(key, out string reason)) throw new InvalidPropNameException(key, reason); - - if (properties.ContainsKey(key)) - { - properties[key] = value; - return; - } - - PopulatePropInfoCache(GetType()); - var prop = propInfoCache[GetType()].FirstOrDefault(p => p.Name == key); - - if (prop == null) - { - properties[key] = value; - return; - } - try - { - prop.SetValue(this, value); - } - catch (Exception ex) - { - throw new SpeckleException($"Failed to set value for {GetType().Name}.{prop.Name}", ex); - } + prop.SetValue(this, value); } - } - - private static Dictionary> propInfoCache = new Dictionary>(); - - private static void PopulatePropInfoCache(Type type) - { - if (!propInfoCache.ContainsKey(type)) + catch (Exception ex) { - propInfoCache[type] = type.GetProperties(BindingFlags.Instance | BindingFlags.Public).Where(p => !p.IsDefined(typeof(IgnoreTheItemAttribute), true)).ToList(); + throw new SpeckleException($"Failed to set value for {GetType().Name}.{prop.Name}", ex); } } + } - /// - /// Gets all of the property names on this class, dynamic or not. - /// - [Obsolete("Use `GetMembers(DynamicBaseMemberType.All).Keys` instead")] - public override IEnumerable GetDynamicMemberNames() + /// + /// Gets properties via the dot syntax. + ///
((dynamic)myObject).superProperty;
+ ///
+ /// + /// + /// + public override bool TryGetMember(GetMemberBinder binder, out object result) + { + return properties.TryGetValue(binder.Name, out result); + } + + /// + /// Sets properties via the dot syntax. + ///
((dynamic)myObject).superProperty = something;
+ ///
+ /// + /// + /// + public override bool TrySetMember(SetMemberBinder binder, object value) + { + var valid = IsPropNameValid(binder.Name, out _); + if (valid) + properties[binder.Name] = value; + return valid; + } + + public bool IsPropNameValid(string name, out string reason) + { + // Existing members + //var members = GetInstanceMembersNames(); + + // TODO: Check for detached/non-detached duplicate names? i.e: '@something' vs 'something' + // TODO: Instance members will not be overwritten, this may cause issues. + var checks = new List<(bool, string)> { - PopulatePropInfoCache(GetType()); - var pinfos = propInfoCache[GetType()]; + (!(string.IsNullOrEmpty(name) || name == "@"), "Found empty prop name"), + // Checks for multiple leading @ + ( + !manyLeadingAtChars.IsMatch(name), + "Only one leading '@' char is allowed. This signals the property value should be detached." + ), + // Checks for invalid chars + ( + !invalidChars.IsMatch(name), + $"Prop with name '{name}' contains invalid characters. The following characters are not allowed: ./" + ) + // Checks if you are trying to change a member property + //(!members.Contains(name), "Modifying the value of instance member properties is not allowed.") + }; + + var r = ""; + // Prop name is valid if none of the checks are true + var isValid = checks.TrueForAll(v => + { + if (!v.Item1) + r = v.Item2; + return v.Item1; + }); - var names = new List(properties.Count + pinfos.Count); - foreach (var pinfo in pinfos) names.Add(pinfo.Name); - foreach (var kvp in properties) names.Add(kvp.Key); + reason = r; + return isValid; + } - return names; - } + private static void PopulatePropInfoCache(Type type) + { + if (!propInfoCache.ContainsKey(type)) + propInfoCache[type] = type.GetProperties(BindingFlags.Instance | BindingFlags.Public) + .Where(p => !p.IsDefined(typeof(IgnoreTheItemAttribute), true)) + .ToList(); + } - /// - /// Gets the names of the defined class properties (typed). - /// - /// - [Obsolete("Use GetMembers(DynamicBaseMemberType.InstanceAll).Keys instead")] - public IEnumerable GetInstanceMembersNames() => GetInstanceMembersNames(GetType()); + /// + /// Gets all of the property names on this class, dynamic or not. + /// + [Obsolete("Use `GetMembers(DynamicBaseMemberType.All).Keys` instead")] + public override IEnumerable GetDynamicMemberNames() + { + PopulatePropInfoCache(GetType()); + var pinfos = propInfoCache[GetType()]; - public static IEnumerable GetInstanceMembersNames(Type t) - { - PopulatePropInfoCache(t); - var pinfos = propInfoCache[t]; + var names = new List(properties.Count + pinfos.Count); + foreach (var pinfo in pinfos) + names.Add(pinfo.Name); + foreach (var kvp in properties) + names.Add(kvp.Key); - var names = new List(pinfos.Count); - foreach (var pinfo in pinfos) names.Add(pinfo.Name); + return names; + } - return names; - } + /// + /// Gets the names of the defined class properties (typed). + /// + /// + [Obsolete("Use GetMembers(DynamicBaseMemberType.InstanceAll).Keys instead")] + public IEnumerable GetInstanceMembersNames() + { + return GetInstanceMembersNames(GetType()); + } - /// - /// Gets the defined (typed) properties of this object. - /// - /// - public IEnumerable GetInstanceMembers() => GetInstanceMembers(GetType()); + public static IEnumerable GetInstanceMembersNames(Type t) + { + PopulatePropInfoCache(t); + var pinfos = propInfoCache[t]; - public static IEnumerable GetInstanceMembers(Type t) - { - PopulatePropInfoCache(t); - var pinfos = propInfoCache[t]; + var names = new List(pinfos.Count); + foreach (var pinfo in pinfos) + names.Add(pinfo.Name); - var names = new List(pinfos.Count); + return names; + } - foreach (var pinfo in pinfos) - if (pinfo.Name != "Item") names.Add(pinfo); + /// + /// Gets the defined (typed) properties of this object. + /// + /// + public IEnumerable GetInstanceMembers() + { + return GetInstanceMembers(GetType()); + } - return names; - } + public static IEnumerable GetInstanceMembers(Type t) + { + PopulatePropInfoCache(t); + var pinfos = propInfoCache[t]; - /// - /// Gets the names of the typed and dynamic properties that don't have a [SchemaIgnore] attribute. - /// - /// - [Obsolete("Use GetMembers().Keys instead")] - public IEnumerable GetMemberNames() => GetMembers().Keys; - - /// - /// Default value for - /// - public const DynamicBaseMemberType DefaultIncludeMembers = DynamicBaseMemberType.Instance | DynamicBaseMemberType.Dynamic; - - /// - /// Gets the typed and dynamic properties. - /// - /// Specifies which members should be included in the resulting dictionary. Can be concatenated with "|" - /// A dictionary containing the key's and values of the object. - public Dictionary GetMembers(DynamicBaseMemberType includeMembers = DefaultIncludeMembers) - { - // Initialize an empty dict - var dic = new Dictionary(); + var names = new List(pinfos.Count); - // Add dynamic members - if (includeMembers.HasFlag(DynamicBaseMemberType.Dynamic)) - dic = new Dictionary(properties); + foreach (var pinfo in pinfos) + if (pinfo.Name != "Item") + names.Add(pinfo); - if (includeMembers.HasFlag(DynamicBaseMemberType.Instance)) - { - PopulatePropInfoCache(GetType()); - var pinfos = propInfoCache[GetType()].Where(x => - { - var hasIgnored = x.IsDefined(typeof(SchemaIgnore), true); - var hasObsolete = x.IsDefined(typeof(ObsoleteAttribute), true); - - // If obsolete is false and prop has obsolete attr - // OR - // If schemaIgnored is true and prop has schemaIgnore attr - return !((!includeMembers.HasFlag(DynamicBaseMemberType.SchemaIgnored) && hasIgnored) || - (!includeMembers.HasFlag(DynamicBaseMemberType.Obsolete) && hasObsolete)); - }); - foreach (var pi in pinfos) - { - if (!dic.ContainsKey(pi.Name)) //todo This is a TEMP FIX FOR #1969, and should be reverted after a proper fix is made! - dic.Add(pi.Name, pi.GetValue(this)); - } - } + return names; + } + + /// + /// Gets the names of the typed and dynamic properties that don't have a [SchemaIgnore] attribute. + /// + /// + [Obsolete("Use GetMembers().Keys instead")] + public IEnumerable GetMemberNames() + { + return GetMembers().Keys; + } + + /// + /// Gets the typed and dynamic properties. + /// + /// Specifies which members should be included in the resulting dictionary. Can be concatenated with "|" + /// A dictionary containing the key's and values of the object. + public Dictionary GetMembers( + DynamicBaseMemberType includeMembers = DefaultIncludeMembers + ) + { + // Initialize an empty dict + var dic = new Dictionary(); + + // Add dynamic members + if (includeMembers.HasFlag(DynamicBaseMemberType.Dynamic)) + dic = new Dictionary(properties); - if (includeMembers.HasFlag(DynamicBaseMemberType.SchemaComputed)) + if (includeMembers.HasFlag(DynamicBaseMemberType.Instance)) + { + PopulatePropInfoCache(GetType()); + var pinfos = propInfoCache[GetType()].Where(x => { - GetType() - .GetMethods() - .Where(e => e.IsDefined(typeof(SchemaComputedAttribute)) && !e.IsDefined(typeof(ObsoleteAttribute))) - .ToList() - .ForEach( + var hasIgnored = x.IsDefined(typeof(SchemaIgnore), true); + var hasObsolete = x.IsDefined(typeof(ObsoleteAttribute), true); + + // If obsolete is false and prop has obsolete attr + // OR + // If schemaIgnored is true and prop has schemaIgnore attr + return !( + !includeMembers.HasFlag(DynamicBaseMemberType.SchemaIgnored) && hasIgnored + || !includeMembers.HasFlag(DynamicBaseMemberType.Obsolete) && hasObsolete + ); + }); + foreach (var pi in pinfos) + if (!dic.ContainsKey(pi.Name)) //todo This is a TEMP FIX FOR #1969, and should be reverted after a proper fix is made! + dic.Add(pi.Name, pi.GetValue(this)); + } + + if (includeMembers.HasFlag(DynamicBaseMemberType.SchemaComputed)) + GetType() + .GetMethods() + .Where( e => + e.IsDefined(typeof(SchemaComputedAttribute)) && !e.IsDefined(typeof(ObsoleteAttribute)) + ) + .ToList() + .ForEach(e => + { + var attr = e.GetCustomAttribute(); + try { - var attr = e.GetCustomAttribute(); - try - { - dic[attr.Name] = e.Invoke(this, null); - } - catch (Exception ex) - { - SpeckleLog.Logger.Warning(ex, "Failed to get computed member: {name}", attr.Name); - dic[attr.Name] = null; - } + dic[attr.Name] = e.Invoke(this, null); } - ); - } - - return dic; - } + catch (Exception ex) + { + SpeckleLog.Logger.Warning(ex, "Failed to get computed member: {name}", attr.Name); + dic[attr.Name] = null; + } + }); - /// - /// Gets the dynamically added property names only. - /// - /// - [Obsolete("Use GetMembers(DynamicBaseMemberType.Dynamic).Keys instead")] - public IEnumerable GetDynamicMembers() - { - return properties.Keys; - } + return dic; } /// - /// This attribute is used internally to hide the this[key]{get; set;} property from inner reflection on members. - /// For more info see this discussion: https://speckle.community/t/why-do-i-keep-forgetting-base-objects-cant-use-item-as-a-dynamic-member/3246/5 + /// Gets the dynamically added property names only. /// - internal class IgnoreTheItemAttribute : Attribute { } - + /// + [Obsolete("Use GetMembers(DynamicBaseMemberType.Dynamic).Keys instead")] + public IEnumerable GetDynamicMembers() + { + return properties.Keys; + } } + +/// +/// This attribute is used internally to hide the this[key]{get; set;} property from inner reflection on members. +/// For more info see this discussion: https://speckle.community/t/why-do-i-keep-forgetting-base-objects-cant-use-item-as-a-dynamic-member/3246/5 +/// +internal sealed class IgnoreTheItemAttribute : Attribute { } diff --git a/Core/Core/Models/DynamicBaseMemberType.cs b/Core/Core/Models/DynamicBaseMemberType.cs index fc9fc1126d..9e9262dc6a 100644 --- a/Core/Core/Models/DynamicBaseMemberType.cs +++ b/Core/Core/Models/DynamicBaseMemberType.cs @@ -1,40 +1,45 @@ -using System; +using System; -namespace Speckle.Core.Models +namespace Speckle.Core.Models; + +/// +/// Represents all different types of members that can be returned by +/// +[Flags] +public enum DynamicBaseMemberType { /// - /// Represents all different types of members that can be returned by + /// The typed members of the DynamicBase object + /// + Instance = 1, + + /// + /// The dynamically added members of the DynamicBase object + /// + Dynamic = 2, + + /// + /// The typed members flagged with attribute. + /// + Obsolete = 4, + + /// + /// The typed members flagged with attribute. + /// + SchemaIgnored = 8, + + /// + /// The typed methods flagged with TODO: + /// + SchemaComputed = 16, + + /// + /// All the typed members, including ones with or attributes. + /// + InstanceAll = Instance + Obsolete + SchemaIgnored, + + /// + /// All the members, including dynamic and instance members flagged with or attributes /// - [Flags] - public enum DynamicBaseMemberType - { - /// - /// The typed members of the DynamicBase object - /// - Instance = 1, - /// - /// The dynamically added members of the DynamicBase object - /// - Dynamic = 2, - /// - /// The typed members flagged with attribute. - /// - Obsolete = 4, - /// - /// The typed members flagged with attribute. - /// - SchemaIgnored = 8, - /// - /// The typed methods flagged with TODO: - /// - SchemaComputed = 16, - /// - /// All the typed members, including ones with or attributes. - /// - InstanceAll = Instance + Obsolete + SchemaIgnored, - /// - /// All the members, including dynamic and instance members flagged with or attributes - /// - All = InstanceAll + Dynamic - } + All = InstanceAll + Dynamic } diff --git a/Core/Core/Models/Extensions.cs b/Core/Core/Models/Extensions.cs index 50d092c8cc..99dd4e2fa3 100644 --- a/Core/Core/Models/Extensions.cs +++ b/Core/Core/Models/Extensions.cs @@ -1,120 +1,115 @@ -using System; +using System; using System.Collections; using System.Collections.Generic; using System.Linq; -namespace Speckle.Core.Models.Extensions +namespace Speckle.Core.Models.Extensions; + +public static class BaseExtensions { - public static class BaseExtensions - { - /// - /// Provides access to each base object in the traverse function, and decides whether the traverse function should continue traversing it's children or not. - /// - /// - /// Should return 'true' if you wish to stop the traverse behaviour, 'false' otherwise. - /// - public delegate bool BaseRecursionBreaker(Base @base); + /// + /// Provides access to each base object in the traverse function, and decides whether the traverse function should continue traversing it's children or not. + /// + /// + /// Should return 'true' if you wish to stop the traverse behaviour, 'false' otherwise. + /// + public delegate bool BaseRecursionBreaker(Base @base); - /// - /// Traverses through the object and its children. - /// Only traverses through the first occurrence of a object (to prevent infinite recursion on circular references) - /// - /// The root object of the tree to flatten - /// Optional predicate function to determine whether to break (or continue) traversal of a object's children. - /// A flat List of objects. - /// - public static IEnumerable Flatten(this Base root, BaseRecursionBreaker recursionBreaker = null) - { - recursionBreaker ??= b => false; + /// + /// Traverses through the object and its children. + /// Only traverses through the first occurrence of a object (to prevent infinite recursion on circular references) + /// + /// The root object of the tree to flatten + /// Optional predicate function to determine whether to break (or continue) traversal of a object's children. + /// A flat List of objects. + /// + public static IEnumerable Flatten( + this Base root, + BaseRecursionBreaker recursionBreaker = null + ) + { + recursionBreaker ??= b => false; - var cache = new HashSet(); - var traversal = Traverse(root, b => + var cache = new HashSet(); + var traversal = Traverse( + root, + b => { - if (!cache.Add(b.id)) return true; + if (!cache.Add(b.id)) + return true; return recursionBreaker.Invoke(b); - }); - - foreach (var b in traversal) - { - if (!cache.Contains(b.id)) yield return b; - //Recursion break will be called after the above } - } + ); + foreach (var b in traversal) + if (!cache.Contains(b.id)) + yield return b; + //Recursion break will be called after the above + } - /// - /// Depth-first traversal of the specified object and all of its children as a deferred Enumerable, with a function to break the traversal. - /// - /// The object to traverse. - /// Predicate function to determine whether to break (or continue) traversal of a object's children. - /// Deferred Enumerable of the objects being traversed (iterable only once). - public static IEnumerable Traverse(this Base root, BaseRecursionBreaker recursionBreaker) - { - var stack = new Stack(); - stack.Push(root); + /// + /// Depth-first traversal of the specified object and all of its children as a deferred Enumerable, with a function to break the traversal. + /// + /// The object to traverse. + /// Predicate function to determine whether to break (or continue) traversal of a object's children. + /// Deferred Enumerable of the objects being traversed (iterable only once). + public static IEnumerable Traverse(this Base root, BaseRecursionBreaker recursionBreaker) + { + var stack = new Stack(); + stack.Push(root); - while (stack.Count > 0) - { - Base current = stack.Pop(); - yield return current; + while (stack.Count > 0) + { + Base current = stack.Pop(); + yield return current; - if (recursionBreaker(current)) continue; + if (recursionBreaker(current)) + continue; - foreach (string child in current.GetDynamicMemberNames()) + foreach (string child in current.GetDynamicMemberNames()) + switch (current[child]) { - switch (current[child]) + case Base o: + stack.Push(o); + break; + case IDictionary dictionary: + { + foreach (object obj in dictionary.Keys) + if (obj is Base b) + stack.Push(b); + break; + } + case IList collection: { - case Base o: - stack.Push(o); - break; - case IDictionary dictionary: - { - foreach (object obj in dictionary.Keys) - { - if (obj is Base b) - stack.Push(b); - } - break; - } - case IList collection: - { - foreach (object obj in collection) - { - if (obj is Base b) - stack.Push(b); - } - break; - } + foreach (object obj in collection) + if (obj is Base b) + stack.Push(b); + break; } } - } } + } - public static string ToFormattedString(this Exception exception) - { - var messages = exception - .GetAllExceptions() - .Where(e => !string.IsNullOrWhiteSpace(e.Message)) - .Select(e => e.Message.Trim()); - string flattened = string.Join(Environment.NewLine + " ", messages); // <-- the separator here - return flattened; - } + public static string ToFormattedString(this Exception exception) + { + var messages = exception + .GetAllExceptions() + .Where(e => !string.IsNullOrWhiteSpace(e.Message)) + .Select(e => e.Message.Trim()); + string flattened = string.Join(Environment.NewLine + " ", messages); // <-- the separator here + return flattened; + } - private static IEnumerable GetAllExceptions(this Exception exception) - { - yield return exception; + private static IEnumerable GetAllExceptions(this Exception exception) + { + yield return exception; - if (exception is AggregateException aggrEx) - { - foreach (var innerEx in aggrEx.InnerExceptions.SelectMany(e => e.GetAllExceptions())) - yield return innerEx; - } - else if (exception.InnerException != null) - { - foreach (var innerEx in exception.InnerException.GetAllExceptions()) - yield return innerEx; - } - } + if (exception is AggregateException aggrEx) + foreach (var innerEx in aggrEx.InnerExceptions.SelectMany(e => e.GetAllExceptions())) + yield return innerEx; + else if (exception.InnerException != null) + foreach (var innerEx in exception.InnerException.GetAllExceptions()) + yield return innerEx; } } diff --git a/Core/Core/Models/Extras.cs b/Core/Core/Models/Extras.cs index 0199d3dfbc..0389a51261 100644 --- a/Core/Core/Models/Extras.cs +++ b/Core/Core/Models/Extras.cs @@ -1,342 +1,392 @@ -using System; +using System; using System.Collections.Generic; -using System.Globalization; using System.Linq; using Speckle.Core.Models.Extensions; using Speckle.Newtonsoft.Json; -namespace Speckle.Core.Models +namespace Speckle.Core.Models; + +/// +/// Wrapper around other, third party, classes that are not coming from a speckle kit. +/// Serialization and deserialization of the base object happens through default Newtonsoft converters. If your object does not de/serialize correctly, this class will not prevent that from happening. +/// Limitations: +/// - Base object needs to be serializable. +/// - Inline collection declarations with values do not behave correctly. +/// - Your class needs to have a void constructor. +/// - Probably more. File a bug! +/// +public class Abstract : Base { + private object _base; + + /// + /// See for limitations of this approach. + /// + public Abstract() { } + /// - /// Wrapper around other, third party, classes that are not coming from a speckle kit. - /// Serialization and deserialization of the base object happens through default Newtonsoft converters. If your object does not de/serialize correctly, this class will not prevent that from happening. - /// Limitations: - /// - Base object needs to be serializable. - /// - Inline collection declarations with values do not behave correctly. - /// - Your class needs to have a void constructor. - /// - Probably more. File a bug! + /// See for limitations of this approach. /// - public class Abstract : Base + /// + public Abstract(object _original) { - public string assemblyQualifiedName { get; set; } + @base = _original; + assemblyQualifiedName = @base.GetType().AssemblyQualifiedName; + } - private object _base; + public string assemblyQualifiedName { get; set; } - /// - /// The original object. - /// - public object @base + /// + /// The original object. + /// + public object @base + { + get => _base; + set { - get => _base; set - { - _base = value; - assemblyQualifiedName = value.GetType().AssemblyQualifiedName; - } + _base = value; + assemblyQualifiedName = value.GetType().AssemblyQualifiedName; } + } +} - /// - /// See for limitations of this approach. - /// - public Abstract() { } +/// +/// In short, this helps you chunk big things into smaller things. +/// See the following reference. +/// +/// +public class DataChunk : Base +{ + public DataChunk() { } - /// - /// See for limitations of this approach. - /// - /// - public Abstract(object _original) - { - @base = _original; - assemblyQualifiedName = @base.GetType().AssemblyQualifiedName; - } + public List data { get; set; } = new(); +} - } +public class ObjectReference +{ + public string speckle_type = "reference"; - /// - /// In short, this helps you chunk big things into smaller things. - /// See the following reference. - /// - /// - public class DataChunk : Base + public ObjectReference() { } + + public string referencedId { get; set; } +} + +public class ProgressEventArgs : EventArgs +{ + public ProgressEventArgs(int current, int total, string scope) { - public List data { get; set; } = new List(); - public DataChunk() { } + this.current = current; + this.total = total; + this.scope = scope; } - public class ObjectReference - { - public string referencedId { get; set; } - public string speckle_type = "reference"; + public int current { get; set; } + public int total { get; set; } + public string scope { get; set; } +} - public ObjectReference() { } +/// +/// A simple wrapper to keep track of the relationship between speckle objects and their host-application siblings in cases where the +/// cannot correspond with the (ie, on receiving operations). +/// +public class ApplicationObject +{ + public enum State + { + Created, // Speckle object created on send, or native objects created on receive + Skipped, // Speckle or Application object is not going to be sent or received + Updated, // Application object is replacing an existing object in the application + Failed, // Tried to convert & send or convert & bake but something went wrong + Removed, //Removed object from application + Unknown } - public class ProgressEventArgs : EventArgs + public ApplicationObject(string id, string type) { - public int current { get; set; } - public int total { get; set; } - public string scope { get; set; } - public ProgressEventArgs(int current, int total, string scope) - { - this.current = current; this.total = total; this.scope = scope; - } + OriginalId = id; + Descriptor = type; + Status = State.Unknown; } /// - /// A simple wrapper to keep track of the relationship between speckle objects and their host-application siblings in cases where the - /// cannot correspond with the (ie, on receiving operations). + /// ID of the object from host application that generated it. /// - public class ApplicationObject + public string applicationId { get; set; } + + /// + /// The container for the object in the native application + /// + public string Container { get; set; } + + /// + /// Indicates if conversion is supported by the converter + /// + public bool Convertible { get; set; } + + /// + /// The fallback values if direct conversion is not available, typically displayValue + /// + [JsonIgnore] + public List Fallback { get; set; } = new(); + + /// + /// The Speckle id (on receive) or native id (on send) + /// + /// + /// Used to retrieve this object in ProgressReport.GetReportObject(), typically to pass between connectors and converters + /// + public string OriginalId { get; set; } + + /// + /// A descriptive string to describe the object. Use the object type as default. + /// + public string Descriptor { get; set; } + + /// + /// The created object ids associated with this object + /// + /// + /// On send, this is currently left empty as generating Speckle ids would be performance expensive + /// + public List CreatedIds { get; set; } = new(); + + /// + /// Conversion status of object + /// + public State Status { get; set; } + + /// + /// Conversion notes or other important information to expose to the user + /// + public List Log { get; set; } = new(); + + /// + /// Converted objects corresponding to this object + /// + /// + /// Used during receive for convenience, corresponds to CreatedIds + /// + [JsonIgnore] + public List Converted { get; set; } = new(); + + public void Update( + string createdId = null, + List createdIds = null, + State? status = null, + string container = null, + List log = null, + string logItem = null, + List converted = null, + object convertedItem = null, + string descriptor = null + ) { - public enum State - { - Created, // Speckle object created on send, or native objects created on receive - Skipped, // Speckle or Application object is not going to be sent or received - Updated, // Application object is replacing an existing object in the application - Failed, // Tried to convert & send or convert & bake but something went wrong - Removed, //Removed object from application - Unknown - } + if (createdIds != null) + createdIds + .Where(o => !string.IsNullOrEmpty(o) && !CreatedIds.Contains(o)) + ?.ToList() + .ForEach(o => CreatedIds.Add(o)); + if (createdId != null && !CreatedIds.Contains(createdId)) + CreatedIds.Add(createdId); + if (status.HasValue) + Status = status.Value; + if (log != null) + log.Where(o => !string.IsNullOrEmpty(o) && !Log.Contains(o)) + ?.ToList() + .ForEach(o => Log.Add(o)); + if (!string.IsNullOrEmpty(logItem) && !Log.Contains(logItem)) + Log.Add(logItem); + if (convertedItem != null && !Converted.Contains(convertedItem)) + Converted.Add(convertedItem); + if (converted != null) + converted + .Where(o => o != null && !Converted.Contains(o)) + ?.ToList() + .ForEach(o => Converted.Add(o)); + if (!string.IsNullOrEmpty(container)) + Container = container; + if (!string.IsNullOrEmpty(descriptor)) + Descriptor = descriptor; + } +} + +public class ProgressReport +{ + public Dictionary ReportObjects { get; set; } = new(); - /// - /// ID of the object from host application that generated it. - /// - public string applicationId { get; set; } - - /// - /// The container for the object in the native application - /// - public string Container { get; set; } - - /// - /// Indicates if conversion is supported by the converter - /// - public bool Convertible { get; set; } - - /// - /// The fallback values if direct conversion is not available, typically displayValue - /// - [JsonIgnore] - public List Fallback { get; set; } = new List(); - - /// - /// The Speckle id (on receive) or native id (on send) - /// - /// - /// Used to retrieve this object in ProgressReport.GetReportObject(), typically to pass between connectors and converters - /// - public string OriginalId { get; set; } - - /// - /// A descriptive string to describe the object. Use the object type as default. - /// - public string Descriptor { get; set; } - - /// - /// The created object ids associated with this object - /// - /// - /// On send, this is currently left empty as generating Speckle ids would be performance expensive - /// - public List CreatedIds { get; set; } = new List(); - - /// - /// Conversion status of object - /// - public State Status { get; set; } - - /// - /// Conversion notes or other important information to expose to the user - /// - public List Log { get; set; } = new List(); - - /// - /// Converted objects corresponding to this object - /// - /// - /// Used during receive for convenience, corresponds to CreatedIds - /// - [JsonIgnore] - public List Converted { get; set; } = new List(); - - public ApplicationObject(string id, string type) + public List SelectedReportObjects { get; set; } = new(); + + public void Log(ApplicationObject obj) + { + var _reportObject = UpdateReportObject(obj); + if (_reportObject == null) + ReportObjects.Add(obj.OriginalId, obj); + } + + public ApplicationObject UpdateReportObject(ApplicationObject obj) + { + if (ReportObjects.TryGetValue(obj.OriginalId, out ApplicationObject reportObject)) { - OriginalId = id; - Descriptor = type; - Status = State.Unknown; + reportObject.Update( + createdIds: obj.CreatedIds, + container: obj.Container, + converted: obj.Converted, + log: obj.Log, + descriptor: obj.Descriptor + ); + + if (obj.Status != ApplicationObject.State.Unknown) + reportObject.Update(status: obj.Status); + return reportObject; } - - public void Update(string createdId = null, List createdIds = null, State? status = null, string container = null, List log = null, string logItem = null, List converted = null, object convertedItem = null, string descriptor = null) + else { - if (createdIds != null) createdIds.Where(o => !string.IsNullOrEmpty(o) && !CreatedIds.Contains(o))?.ToList().ForEach(o => CreatedIds.Add(o)); - if (createdId != null && !CreatedIds.Contains(createdId)) CreatedIds.Add(createdId); - if (status.HasValue) Status = status.Value; - if (log != null) log.Where(o => !string.IsNullOrEmpty(o) && !Log.Contains(o))?.ToList().ForEach(o => Log.Add(o)); - if (!string.IsNullOrEmpty(logItem) && !Log.Contains(logItem)) Log.Add(logItem); - if (convertedItem != null && !Converted.Contains(convertedItem)) Converted.Add(convertedItem); - if (converted != null) converted.Where(o => o != null && !Converted.Contains(o))?.ToList().ForEach(o => Converted.Add(o)); - if (!string.IsNullOrEmpty(container)) Container = container; - if (!string.IsNullOrEmpty(descriptor)) Descriptor = descriptor; + return null; } } - public class ProgressReport + [Obsolete("Use TryGetValue or Dictionary indexing", true)] + public bool GetReportObject(string id, out int index) { - public Dictionary ReportObjects { get; set; } = new Dictionary(); - public List SelectedReportObjects { get; set; } = new List(); - - public void Log(ApplicationObject obj) - { - var _reportObject = UpdateReportObject(obj); - if (_reportObject == null) - ReportObjects.Add(obj.OriginalId, obj); - } + throw new NotImplementedException(); + // var _reportObject = ReportObjects.Where(o => o.OriginalId == id)?.FirstOrDefault(); + // index = _reportObject != null ? ReportObjects.IndexOf(_reportObject) : -1; + // return index == -1 ? false : true; + } - public ApplicationObject UpdateReportObject(ApplicationObject obj) - { - if (ReportObjects.TryGetValue(obj.OriginalId, out ApplicationObject reportObject)) - { - reportObject.Update(createdIds: obj.CreatedIds, container: obj.Container, converted: obj.Converted, log: obj.Log, descriptor: obj.Descriptor); + public void Merge(ProgressReport report) + { + lock (OperationErrorsLock) + OperationErrors.AddRange(report.OperationErrors); - if (obj.Status != ApplicationObject.State.Unknown) - reportObject.Update(status: obj.Status); - return reportObject; - } - else return null; - } + lock (ConversionLogLock) + ConversionLog.AddRange(report.ConversionLog); - [Obsolete("Use TryGetValue or Dictionary indexing", true)] - public bool GetReportObject(string id, out int index) + // update report object notes + foreach (var item in ReportObjects.Values) { - throw new NotImplementedException(); - // var _reportObject = ReportObjects.Where(o => o.OriginalId == id)?.FirstOrDefault(); - // index = _reportObject != null ? ReportObjects.IndexOf(_reportObject) : -1; - // return index == -1 ? false : true; - } + var ids = new List { item.OriginalId }; + if (item.Fallback.Count > 0) + ids.AddRange(item.Fallback.Select(o => o.OriginalId)); - #region Conversion - /// - /// Keeps track of the conversion process - /// - public List ConversionLog { get; } = new List(); + if (item.Status == ApplicationObject.State.Unknown) + if (report.ReportObjects.TryGetValue(item.OriginalId, out var reportObject)) + item.Status = reportObject.Status; - private readonly object ConversionLogLock = new object(); - public string ConversionLogString - { - get - { - var summary = ""; - lock (ConversionLogLock) + foreach (var id in ids) + //if (report.GetReportObject(id, out int index)) + if (report.ReportObjects.TryGetValue(id, out var reportObject)) { - var converted = ConversionLog.Count(x => x.ToLowerInvariant().Contains("converted")); - var created = ConversionLog.Count(x => x.ToLowerInvariant().Contains("created")); - var skipped = ConversionLog.Count(x => x.ToLowerInvariant().Contains("skipped")); - var failed = ConversionLog.Count(x => x.ToLowerInvariant().Contains("failed")); - var updated = ConversionLog.Count(x => x.ToLowerInvariant().Contains("updated")); - - summary += converted > 0 ? $"CONVERTED: {converted}\n" : ""; - summary += created > 0 ? $"CREATED: {created}\n" : ""; - summary += updated > 0 ? $"UPDATED: {updated}\n" : ""; - summary += skipped > 0 ? $"SKIPPED: {skipped}\n" : ""; - summary += failed > 0 ? $"FAILED: {failed}\n" : ""; - summary = !string.IsNullOrEmpty(summary) ? $"SUMMARY\n\n{summary}\n\n" : ""; - - return summary + string.Join("\n", ConversionLog); + foreach (var logItem in reportObject.Log) + if (!item.Log.Contains(logItem)) + item.Log.Add(logItem); + foreach (var createdId in reportObject.CreatedIds) + if (!item.CreatedIds.Contains(createdId)) + item.CreatedIds.Add(createdId); + foreach (var convertedItem in reportObject.Converted) + if (!item.Converted.Contains(convertedItem)) + item.Converted.Add(convertedItem); } - } } + } - public void Log(string text) - { - var time = DateTime.Now.ToLocalTime().ToString("dd/MM/yy HH:mm:ss"); - lock (ConversionLogLock) - ConversionLog.Add(time + " " + text); - } + #region Conversion - /// - /// Keeps track of errors in the conversions. - /// - public List ConversionErrors { get; } = new List(); - private readonly object ConversionErrorsLock = new object(); - public string ConversionErrorsString + /// + /// Keeps track of the conversion process + /// + public List ConversionLog { get; } = new(); + + private readonly object ConversionLogLock = new(); + + public string ConversionLogString + { + get { - get + var summary = ""; + lock (ConversionLogLock) { - lock (ConversionErrorsLock) - return string.Join("\n", ConversionErrors.Select(x => x.Message).Distinct()); + var converted = ConversionLog.Count(x => x.ToLowerInvariant().Contains("converted")); + var created = ConversionLog.Count(x => x.ToLowerInvariant().Contains("created")); + var skipped = ConversionLog.Count(x => x.ToLowerInvariant().Contains("skipped")); + var failed = ConversionLog.Count(x => x.ToLowerInvariant().Contains("failed")); + var updated = ConversionLog.Count(x => x.ToLowerInvariant().Contains("updated")); + + summary += converted > 0 ? $"CONVERTED: {converted}\n" : ""; + summary += created > 0 ? $"CREATED: {created}\n" : ""; + summary += updated > 0 ? $"UPDATED: {updated}\n" : ""; + summary += skipped > 0 ? $"SKIPPED: {skipped}\n" : ""; + summary += failed > 0 ? $"FAILED: {failed}\n" : ""; + summary = !string.IsNullOrEmpty(summary) ? $"SUMMARY\n\n{summary}\n\n" : ""; + + return summary + string.Join("\n", ConversionLog); } } + } + + public void Log(string text) + { + var time = DateTime.Now.ToLocalTime().ToString("dd/MM/yy HH:mm:ss"); + lock (ConversionLogLock) + ConversionLog.Add(time + " " + text); + } + + /// + /// Keeps track of errors in the conversions. + /// + public List ConversionErrors { get; } = new(); - public int ConversionErrorsCount => ConversionErrors.Count; + private readonly object ConversionErrorsLock = new(); - public void LogConversionError(Exception exception) + public string ConversionErrorsString + { + get { lock (ConversionErrorsLock) - ConversionErrors.Add(exception); - Log(exception.Message); - } - #endregion - - #region Operation - /// - /// Keeps track of HANDLED errors that occur during send/recieve commands. - /// - /// - /// Handled errors specific to the conversion, should be added to ConversionErrors - /// Unhandleable errors (i.e. that lead to the entire send/receive failing) should be Thrown instead. - /// - public List OperationErrors { get; } = new List(); - private readonly object OperationErrorsLock = new object(); - public string OperationErrorsString - { - get - { - lock (OperationErrorsLock) - return string.Join("\n", OperationErrors.Select(x => x.ToFormattedString()).Distinct()); - } + return string.Join("\n", ConversionErrors.Select(x => x.Message).Distinct()); } + } - public int OperationErrorsCount => OperationErrors.Count; + public int ConversionErrorsCount => ConversionErrors.Count; - public void LogOperationError(Exception exception) - { - lock (OperationErrorsLock) - OperationErrors.Add(exception); - } - #endregion + public void LogConversionError(Exception exception) + { + lock (ConversionErrorsLock) + ConversionErrors.Add(exception); + Log(exception.Message); + } - public void Merge(ProgressReport report) + #endregion + + #region Operation + + /// + /// Keeps track of HANDLED errors that occur during send/recieve commands. + /// + /// + /// Handled errors specific to the conversion, should be added to ConversionErrors + /// Unhandleable errors (i.e. that lead to the entire send/receive failing) should be Thrown instead. + /// + public List OperationErrors { get; } = new(); + + private readonly object OperationErrorsLock = new(); + + public string OperationErrorsString + { + get { lock (OperationErrorsLock) - OperationErrors.AddRange(report.OperationErrors); + return string.Join("\n", OperationErrors.Select(x => x.ToFormattedString()).Distinct()); + } + } - lock (ConversionLogLock) - ConversionLog.AddRange(report.ConversionLog); + public int OperationErrorsCount => OperationErrors.Count; - // update report object notes - foreach (var item in ReportObjects.Values) - { - var ids = new List { item.OriginalId }; - if (item.Fallback.Count > 0) ids.AddRange(item.Fallback.Select(o => o.OriginalId)); - - if (item.Status == ApplicationObject.State.Unknown) - if (report.ReportObjects.TryGetValue(item.OriginalId, out var reportObject)) - item.Status = reportObject.Status; - - foreach (var id in ids) - //if (report.GetReportObject(id, out int index)) - if (report.ReportObjects.TryGetValue(id, out var reportObject)) - { - foreach (var logItem in reportObject.Log) - if (!item.Log.Contains(logItem)) - item.Log.Add(logItem); - foreach (var createdId in reportObject.CreatedIds) - if (!item.CreatedIds.Contains(createdId)) - item.CreatedIds.Add(createdId); - foreach (var convertedItem in reportObject.Converted) - if (!item.Converted.Contains(convertedItem)) - item.Converted.Add(convertedItem); - } - } - } + public void LogOperationError(Exception exception) + { + lock (OperationErrorsLock) + OperationErrors.Add(exception); } + + #endregion } diff --git a/Core/Core/Models/GraphTraversal/DefaultTraversal.cs b/Core/Core/Models/GraphTraversal/DefaultTraversal.cs index ab62a704fd..df274de385 100644 --- a/Core/Core/Models/GraphTraversal/DefaultTraversal.cs +++ b/Core/Core/Models/GraphTraversal/DefaultTraversal.cs @@ -1,118 +1,165 @@ -using System.Collections.Generic; +#nullable enable +using System.Collections.Generic; using System.Linq; -using System.Runtime.Serialization; using Speckle.Core.Kits; -#nullable enable -namespace Speckle.Core.Models.GraphTraversal +namespace Speckle.Core.Models.GraphTraversal; + +public static class DefaultTraversal { - public static class DefaultTraversal + /// + /// Traverses until finds a convertable object (or fallback) then traverses members + /// + /// + /// + public static GraphTraversal CreateTraverseFunc(ISpeckleConverter converter) { + var convertableRule = TraversalRule + .NewTraversalRule() + .When(converter.CanConvertToNative) + .When(HasDisplayValue) + .ContinueTraversing(b => + { + var membersToTraverse = b.GetDynamicMembers() + .Concat(displayValueAliases) + .Concat(elementsAliases) + .Except(ignoreProps); + return membersToTraverse; + }); + + var ignoreResultsRule = TraversalRule + .NewTraversalRule() + .When(o => o.speckle_type.Contains("Objects.Structural.Results")) + .ContinueTraversing(None); + + var defaultRule = TraversalRule + .NewTraversalRule() + .When(_ => true) + .ContinueTraversing(Members()); + + return new GraphTraversal(convertableRule, ignoreResultsRule, defaultRule); + } - /// - /// Traverses until finds a convertable object (or fallback) then traverses members - /// - /// - /// - public static GraphTraversal CreateTraverseFunc(ISpeckleConverter converter) - { - - var convertableRule = TraversalRule.NewTraversalRule() - .When(converter.CanConvertToNative) - .When(HasDisplayValue) - .ContinueTraversing(b => - { - var membersToTraverse = b.GetDynamicMembers() + /// + /// Traverses until finds a convertable object then HALTS deeper traversal + /// + /// + /// + public static GraphTraversal CreateRevitTraversalFunc(ISpeckleConverter converter) + { + var convertableRule = TraversalRule + .NewTraversalRule() + .When(converter.CanConvertToNative) + .ContinueTraversing(None); + + var displayValueRule = TraversalRule + .NewTraversalRule() + .When(HasDisplayValue) + .ContinueTraversing( + b => + b.GetDynamicMembers() .Concat(displayValueAliases) - .Concat(elementsAliases) - .Except(ignoreProps); - return membersToTraverse; - }); - - var ignoreResultsRule = TraversalRule.NewTraversalRule() - .When(o => o.speckle_type.Contains("Objects.Structural.Results")) - .ContinueTraversing(None); - - var defaultRule = TraversalRule.NewTraversalRule() - .When(_ => true) - .ContinueTraversing(Members()); - - return new GraphTraversal(convertableRule, ignoreResultsRule, defaultRule); - } - - /// - /// Traverses until finds a convertable object then HALTS deeper traversal - /// - /// - /// - public static GraphTraversal CreateRevitTraversalFunc(ISpeckleConverter converter) - { - var convertableRule = TraversalRule.NewTraversalRule() - .When(converter.CanConvertToNative) - .ContinueTraversing(None); - - var displayValueRule = TraversalRule.NewTraversalRule() - .When(HasDisplayValue) - .ContinueTraversing(b => b.GetDynamicMembers() - .Concat(displayValueAliases) - .Except(elementsAliases) - .Except(ignoreProps) - ); - - //WORKAROUND: ideally, traversal rules would not have Objects specific rules. - var ignoreResultsRule = TraversalRule.NewTraversalRule() - .When(o => o.speckle_type.Contains("Objects.Structural.Results")) - .ContinueTraversing(None); - - var defaultRule = TraversalRule.NewTraversalRule() - .When(_ => true) - .ContinueTraversing(Members()); - - return new GraphTraversal(convertableRule, displayValueRule, ignoreResultsRule, defaultRule); - } - - - /// - /// Traverses until finds a convertable object (or fallback) then traverses members - /// - /// - /// - public static GraphTraversal CreateBIMTraverseFunc(ISpeckleConverter converter) - { - var bimElementRule = TraversalRule.NewTraversalRule() - .When(converter.CanConvertToNative) - .ContinueTraversing(ElementsAliases); - - //WORKAROUND: ideally, traversal rules would not have Objects specific rules. - var ignoreResultsRule = TraversalRule.NewTraversalRule() - .When(o => o.speckle_type.Contains("Objects.Structural.Results")) - .ContinueTraversing(None); - - var defaultRule = TraversalRule.NewTraversalRule() - .When(_ => true) - .ContinueTraversing(Members()); - - return new GraphTraversal(bimElementRule, ignoreResultsRule, defaultRule); - } - - - //These functions are just meant to make the syntax of defining rules less verbose, they are likely to change frequently/be restructured - #region Helper Functions - - internal static readonly string[] elementsAliases = { "elements", "@elements" }; - internal static IEnumerable ElementsAliases(Base _) => elementsAliases; - - internal static readonly string[] displayValueAliases = { "displayValue", "@displayValue" }; - internal static readonly string[] ignoreProps = new[] { "@blockDefinition" }.Concat(displayValueAliases).ToArray(); - internal static IEnumerable DisplayValueAliases(Base _) => displayValueAliases; - internal static IEnumerable None(Base _) => Enumerable.Empty(); - internal static SelectMembers Members(DynamicBaseMemberType includeMembers = DynamicBase.DefaultIncludeMembers) => x => x.GetMembers(includeMembers).Keys; - internal static SelectMembers DynamicMembers() => x => x.GetDynamicMembers(); - internal static SelectMembers Concat(params SelectMembers[] selectProps) => x => selectProps.SelectMany(i => i.Invoke(x)); - internal static SelectMembers Except(SelectMembers selectProps, IEnumerable excludeProps) => x => selectProps.Invoke(x).Except(excludeProps); - internal static bool HasElements(Base x) => elementsAliases.Any(m => x[m] != null); - internal static bool HasDisplayValue(Base x) => displayValueAliases.Any(m => x[m] != null); - - #endregion + .Except(elementsAliases) + .Except(ignoreProps) + ); + + //WORKAROUND: ideally, traversal rules would not have Objects specific rules. + var ignoreResultsRule = TraversalRule + .NewTraversalRule() + .When(o => o.speckle_type.Contains("Objects.Structural.Results")) + .ContinueTraversing(None); + + var defaultRule = TraversalRule + .NewTraversalRule() + .When(_ => true) + .ContinueTraversing(Members()); + + return new GraphTraversal(convertableRule, displayValueRule, ignoreResultsRule, defaultRule); + } + + /// + /// Traverses until finds a convertable object (or fallback) then traverses members + /// + /// + /// + public static GraphTraversal CreateBIMTraverseFunc(ISpeckleConverter converter) + { + var bimElementRule = TraversalRule + .NewTraversalRule() + .When(converter.CanConvertToNative) + .ContinueTraversing(ElementsAliases); + + //WORKAROUND: ideally, traversal rules would not have Objects specific rules. + var ignoreResultsRule = TraversalRule + .NewTraversalRule() + .When(o => o.speckle_type.Contains("Objects.Structural.Results")) + .ContinueTraversing(None); + + var defaultRule = TraversalRule + .NewTraversalRule() + .When(_ => true) + .ContinueTraversing(Members()); + + return new GraphTraversal(bimElementRule, ignoreResultsRule, defaultRule); + } + + //These functions are just meant to make the syntax of defining rules less verbose, they are likely to change frequently/be restructured + + #region Helper Functions + + internal static readonly string[] elementsAliases = { "elements", "@elements" }; + + internal static IEnumerable ElementsAliases(Base _) + { + return elementsAliases; + } + + internal static readonly string[] displayValueAliases = { "displayValue", "@displayValue" }; + internal static readonly string[] ignoreProps = new[] { "@blockDefinition" } + .Concat(displayValueAliases) + .ToArray(); + + internal static IEnumerable DisplayValueAliases(Base _) + { + return displayValueAliases; + } + + internal static IEnumerable None(Base _) + { + return Enumerable.Empty(); + } + + internal static SelectMembers Members( + DynamicBaseMemberType includeMembers = DynamicBase.DefaultIncludeMembers + ) + { + return x => x.GetMembers(includeMembers).Keys; + } + + internal static SelectMembers DynamicMembers() + { + return x => x.GetDynamicMembers(); + } + + internal static SelectMembers Concat(params SelectMembers[] selectProps) + { + return x => selectProps.SelectMany(i => i.Invoke(x)); } + + internal static SelectMembers Except(SelectMembers selectProps, IEnumerable excludeProps) + { + return x => selectProps.Invoke(x).Except(excludeProps); + } + + internal static bool HasElements(Base x) + { + return elementsAliases.Any(m => x[m] != null); + } + + internal static bool HasDisplayValue(Base x) + { + return displayValueAliases.Any(m => x[m] != null); + } + + #endregion } diff --git a/Core/Core/Models/GraphTraversal/GraphTraversal.cs b/Core/Core/Models/GraphTraversal/GraphTraversal.cs index 58ea4ed85f..077cfb2353 100644 --- a/Core/Core/Models/GraphTraversal/GraphTraversal.cs +++ b/Core/Core/Models/GraphTraversal/GraphTraversal.cs @@ -1,137 +1,130 @@ -using System.Collections; +#nullable enable +using System.Collections; using System.Collections.Generic; -using System.Linq; -#nullable enable -namespace Speckle.Core.Models.GraphTraversal +namespace Speckle.Core.Models.GraphTraversal; + +public sealed class TraversalContext { + public readonly Base current; + public readonly TraversalContext? parent; + public readonly string? propName; - public sealed class TraversalContext + public TraversalContext(Base current, string? propName = null, TraversalContext? parent = null) { - public readonly string? propName; - public readonly TraversalContext? parent; - public readonly Base current; - - public TraversalContext(Base current, string? propName = null, TraversalContext? parent = null) - { - this.current = current; - this.parent = parent; - this.propName = propName; - } + this.current = current; + this.parent = parent; + this.propName = propName; } +} + +public sealed class GraphTraversal +{ + private readonly ITraversalRule[] rules; - public sealed class GraphTraversal + public GraphTraversal(params ITraversalRule[] traversalRule) { - private readonly ITraversalRule[] rules; + rules = traversalRule; + } - public GraphTraversal(params ITraversalRule[] traversalRule) - { - rules = traversalRule; - } + /// + /// Given object, will recursively traverse members according to the provided traversal rules. + /// + /// The object to traverse members + /// Lazily returns objects found during traversal (including ), wrapped within a + public IEnumerable Traverse(Base root) + { + var stack = new List(); + stack.Add(new TraversalContext(root)); - /// - /// Given object, will recursively traverse members according to the provided traversal rules. - /// - /// The object to traverse members - /// Lazily returns objects found during traversal (including ), wrapped within a - public IEnumerable Traverse(Base root) + while (stack.Count > 0) { - var stack = new List(); - stack.Add(new TraversalContext(root)); + int headIndex = stack.Count - 1; + TraversalContext head = stack[headIndex]; + stack.RemoveAt(headIndex); + yield return head; - while (stack.Count > 0) - { - int headIndex = stack.Count - 1; - TraversalContext head = stack[headIndex]; - stack.RemoveAt(headIndex); - yield return head; + Base current = head.current; + var activeRule = GetActiveRuleOrDefault(current); - Base current = head.current; - var activeRule = GetActiveRuleOrDefault(current); - - foreach (string childProp in activeRule.MembersToTraverse(current)) - { - TraverseMemberToStack(stack, current[childProp], childProp, head); - } - } + foreach (string childProp in activeRule.MembersToTraverse(current)) + TraverseMemberToStack(stack, current[childProp], childProp, head); } + } - private static void TraverseMemberToStack(ICollection stack, object? value, string? memberName = null, TraversalContext? parent = null) + private static void TraverseMemberToStack( + ICollection stack, + object? value, + string? memberName = null, + TraversalContext? parent = null + ) + { + //test + switch (value) { - //test - switch (value) + case Base o: + stack.Add(new TraversalContext(o, memberName, parent)); + break; + case IList list: + { + foreach (object? obj in list) + TraverseMemberToStack(stack, obj, memberName, parent); + break; + } + case IDictionary dictionary: { - case Base o: - stack.Add(new TraversalContext(o, memberName, parent)); - break; - case IList list: - { - foreach (object? obj in list) - { - TraverseMemberToStack(stack, obj, memberName, parent); - } - break; - } - case IDictionary dictionary: - { - foreach (object? obj in dictionary.Values) - { - TraverseMemberToStack(stack, obj, memberName, parent); - } - break; - } + foreach (object? obj in dictionary.Values) + TraverseMemberToStack(stack, obj, memberName, parent); + break; } } + } - - - /// - /// Traverses supported Collections yielding objects. - /// Does not traverse , only (potentially nested) collections. - /// - /// The value to traverse - public static IEnumerable TraverseMember(object? value) + /// + /// Traverses supported Collections yielding objects. + /// Does not traverse , only (potentially nested) collections. + /// + /// The value to traverse + public static IEnumerable TraverseMember(object? value) + { + //TODO we should benchmark this, as yield returning like this could be suboptimal + switch (value) { - //TODO we should benchmark this, as yield returning like this could be suboptimal - switch (value) + case Base o: + yield return o; + break; + case IList list: { - case Base o: - yield return o; - break; - case IList list: - { - foreach (object? obj in list) - { - foreach (Base o in TraverseMember(obj)) - yield return o; - } - break; - } - case IDictionary dictionary: - { - foreach (object? obj in dictionary.Values) - { - foreach (Base o in TraverseMember(obj)) - yield return o; - } - break; - } + foreach (object? obj in list) + { + foreach (Base o in TraverseMember(obj)) + yield return o; + } + break; + } + case IDictionary dictionary: + { + foreach (object? obj in dictionary.Values) + { + foreach (Base o in TraverseMember(obj)) + yield return o; + } + break; } } + } - private ITraversalRule GetActiveRuleOrDefault(Base o) - { - return GetActiveRule(o) ?? DefaultRule.Instance; - } + private ITraversalRule GetActiveRuleOrDefault(Base o) + { + return GetActiveRule(o) ?? DefaultRule.Instance; + } - private ITraversalRule? GetActiveRule(Base o) - { - foreach (var rule in rules) - { - if (rule.DoesRuleHold(o)) return rule; - } + private ITraversalRule? GetActiveRule(Base o) + { + foreach (var rule in rules) + if (rule.DoesRuleHold(o)) + return rule; - return null; - } + return null; } } diff --git a/Core/Core/Models/GraphTraversal/ITraversalRule.cs b/Core/Core/Models/GraphTraversal/ITraversalRule.cs index ac7ca5aaa3..8e67b007a8 100644 --- a/Core/Core/Models/GraphTraversal/ITraversalRule.cs +++ b/Core/Core/Models/GraphTraversal/ITraversalRule.cs @@ -1,38 +1,45 @@ -using System.Collections.Generic; +#nullable enable +using System.Collections.Generic; using System.Linq; -#nullable enable -namespace Speckle.Core.Models.GraphTraversal +namespace Speckle.Core.Models.GraphTraversal; + +/// +/// Interface for a definition of conditional traversal of objects. +/// +public interface ITraversalRule { + /// + /// The member names to traverse + /// Return may include member names doesn't have + public IEnumerable MembersToTraverse(Base b); + /// - /// Interface for a definition of conditional traversal of objects. + /// Evaluates the traversal rule given /// - public interface ITraversalRule + /// + /// + public bool DoesRuleHold(Base o); +} + +/// +/// The "traverse none" rule that always holds true +/// +public sealed class DefaultRule : ITraversalRule +{ + private static DefaultRule? instance; + + private DefaultRule() { } + + public static DefaultRule Instance => instance ??= new DefaultRule(); + + public IEnumerable MembersToTraverse(Base b) { - /// - /// The member names to traverse - /// Return may include member names doesn't have - public IEnumerable MembersToTraverse(Base b); - - /// - /// Evaluates the traversal rule given - /// - /// - /// - public bool DoesRuleHold(Base o); + return Enumerable.Empty(); } - /// - /// The "traverse none" rule that always holds true - /// - public sealed class DefaultRule : ITraversalRule + public bool DoesRuleHold(Base o) { - private static DefaultRule? instance; - public static DefaultRule Instance => instance ??= new DefaultRule(); - - private DefaultRule() { } - public IEnumerable MembersToTraverse(Base b) => Enumerable.Empty(); - - public bool DoesRuleHold(Base o) => true; + return true; } } diff --git a/Core/Core/Models/GraphTraversal/RuleBuilder.cs b/Core/Core/Models/GraphTraversal/RuleBuilder.cs index 52ae183b56..612c4b88a9 100644 --- a/Core/Core/Models/GraphTraversal/RuleBuilder.cs +++ b/Core/Core/Models/GraphTraversal/RuleBuilder.cs @@ -1,92 +1,85 @@ -using System; +#nullable enable using System.Collections.Generic; using System.Linq; -#nullable enable -namespace Speckle.Core.Models.GraphTraversal -{ - /// - /// A traversal rule defines the conditional traversal behaviour when traversing a given objects. - /// Specifies what members to traverse if any provided are met. - /// - /// Follows the builder pattern to ensure that a rule is complete before usable, see usages - public sealed class TraversalRule - : ITraversalRule, - ITraversalBuilderWhen, - ITraversalBuilderTraverse - { - private List conditions; - private SelectMembers membersToTraverse; - - private TraversalRule() - { - conditions = new List(); - } - - /// a new Traversal Rule to be initialised using the Builder Pattern interfaces - public static ITraversalBuilderWhen NewTraversalRule() - { - return new TraversalRule(); - } +namespace Speckle.Core.Models.GraphTraversal; - bool ITraversalRule.DoesRuleHold(Base o) - { - foreach (var condition in conditions) - { - if (condition.Invoke(o)) return true; - } - return false; - } +/// +/// A traversal rule defines the conditional traversal behaviour when traversing a given objects. +/// Specifies what members to traverse if any provided are met. +/// +/// Follows the builder pattern to ensure that a rule is complete before usable, see usages +public sealed class TraversalRule : ITraversalRule, ITraversalBuilderWhen, ITraversalBuilderTraverse +{ + private List conditions; + private SelectMembers membersToTraverse; - IEnumerable ITraversalRule.MembersToTraverse(Base o) - { - return membersToTraverse(o).Distinct(); //TODO distinct is expensive, there may be a better way for us to avoid duplicates - } + private TraversalRule() + { + conditions = new List(); + } - public ITraversalBuilderTraverse When(WhenCondition condition) - { - conditions.Add(condition); - return this; - } + public ITraversalRule ContinueTraversing(SelectMembers membersToTraverse) + { + this.membersToTraverse = membersToTraverse; + return this; + } - public ITraversalRule ContinueTraversing(SelectMembers membersToTraverse) - { - this.membersToTraverse = membersToTraverse; - return this; - } + public ITraversalBuilderTraverse When(WhenCondition condition) + { + conditions.Add(condition); + return this; } + bool ITraversalRule.DoesRuleHold(Base o) + { + foreach (var condition in conditions) + if (condition.Invoke(o)) + return true; + return false; + } - #region Builder interfaces/delegates - public delegate bool WhenCondition(Base o); + IEnumerable ITraversalRule.MembersToTraverse(Base o) + { + return membersToTraverse(o).Distinct(); //TODO distinct is expensive, there may be a better way for us to avoid duplicates + } - /// - /// Interface for traversal rule in a building (unusable) state - /// - public interface ITraversalBuilderWhen + /// a new Traversal Rule to be initialised using the Builder Pattern interfaces + public static ITraversalBuilderWhen NewTraversalRule() { - /// - /// Adds a condition to this rule. This rule will hold true when ANY of its conditions holds true. - /// - /// - /// Traversal rule in a building (unusable) state - ITraversalBuilderTraverse When(WhenCondition condition); + return new TraversalRule(); } +} - /// - /// Delegate for selecting members (by member name) of an given object - /// - public delegate IEnumerable SelectMembers(Base o); +#region Builder interfaces/delegates +public delegate bool WhenCondition(Base o); +/// +/// Interface for traversal rule in a building (unusable) state +/// +public interface ITraversalBuilderWhen +{ /// - /// Interface for traversal rule in a building (unusable) state + /// Adds a condition to this rule. This rule will hold true when ANY of its conditions holds true. /// - public interface ITraversalBuilderTraverse : ITraversalBuilderWhen - { - /// - /// Function returning the members that should be traversed for objects where this rule holds - /// Traversal rule in a usable state - ITraversalRule ContinueTraversing(SelectMembers membersToTraverse); - } - #endregion + /// + /// Traversal rule in a building (unusable) state + ITraversalBuilderTraverse When(WhenCondition condition); +} + +/// +/// Delegate for selecting members (by member name) of an given object +/// +public delegate IEnumerable SelectMembers(Base o); + +/// +/// Interface for traversal rule in a building (unusable) state +/// +public interface ITraversalBuilderTraverse : ITraversalBuilderWhen +{ + /// + /// Function returning the members that should be traversed for objects where this rule holds + /// Traversal rule in a usable state + ITraversalRule ContinueTraversing(SelectMembers membersToTraverse); } +#endregion diff --git a/Core/Core/Models/InvalidPropNameException.cs b/Core/Core/Models/InvalidPropNameException.cs index 741083a15d..a4ab2cae0c 100644 --- a/Core/Core/Models/InvalidPropNameException.cs +++ b/Core/Core/Models/InvalidPropNameException.cs @@ -1,13 +1,14 @@ -using System; using Speckle.Core.Logging; -namespace Speckle.Core.Models +namespace Speckle.Core.Models; + +public class InvalidPropNameException : SpeckleException { - public class InvalidPropNameException : SpeckleException - { + public InvalidPropNameException(string propName, string reason) + : base($"Property '{propName}' is invalid: {reason}") { } + + public InvalidPropNameException() { } - public InvalidPropNameException(string propName, string reason) : base($"Property '{propName}' is invalid: {reason}") - { - } - } + public InvalidPropNameException(string message) + : base(message) { } } diff --git a/Core/Core/Models/Utilities.cs b/Core/Core/Models/Utilities.cs index 94f8479344..55d800fd7d 100644 --- a/Core/Core/Models/Utilities.cs +++ b/Core/Core/Models/Utilities.cs @@ -1,210 +1,218 @@ -using System; +using System; using System.Collections.Generic; using System.IO; using System.Linq; using System.Reflection; +using System.Runtime.Serialization.Formatters.Binary; using System.Security.Cryptography; using System.Text; -namespace Speckle.Core.Models +namespace Speckle.Core.Models; + +public static class Utilities { - public static class Utilities + public enum HashingFuctions { + SHA256, + MD5 + } - public static int HashLength { get; } = 32; - - public enum HashingFuctions - { - SHA256, MD5 - } + public static int HashLength { get; } = 32; - /// - /// Wrapper method around hashing functions. Defaults to md5. - /// - /// - /// - public static string hashString(string input, HashingFuctions func = HashingFuctions.SHA256) + /// + /// Wrapper method around hashing functions. Defaults to md5. + /// + /// + /// + public static string hashString(string input, HashingFuctions func = HashingFuctions.SHA256) + { + switch (func) { - switch (func) - { - case HashingFuctions.SHA256: - return Utilities.sha256(input).Substring(0, HashLength); + case HashingFuctions.SHA256: + return sha256(input).Substring(0, HashLength); - case HashingFuctions.MD5: - default: - return Utilities.md5(input).Substring(0, HashLength); - - } + case HashingFuctions.MD5: + default: + return md5(input).Substring(0, HashLength); } + } - public static string hashFile(string filePath, HashingFuctions func = HashingFuctions.SHA256) - { - HashAlgorithm hashAlgorithm = SHA256.Create(); - if (func == HashingFuctions.MD5) - hashAlgorithm = MD5.Create(); + public static string hashFile(string filePath, HashingFuctions func = HashingFuctions.SHA256) + { + HashAlgorithm hashAlgorithm = SHA256.Create(); + if (func == HashingFuctions.MD5) + hashAlgorithm = MD5.Create(); - using (var stream = File.OpenRead(filePath)) - { - var hash = hashAlgorithm.ComputeHash(stream); - return BitConverter.ToString(hash).Replace("-", "").ToLowerInvariant().Substring(0, HashLength); - } + using (var stream = File.OpenRead(filePath)) + { + var hash = hashAlgorithm.ComputeHash(stream); + return BitConverter + .ToString(hash) + .Replace("-", "") + .ToLowerInvariant() + .Substring(0, HashLength); } + } - static string sha256(string input) + private static string sha256(string input) + { + using (MemoryStream ms = new()) { - using (System.IO.MemoryStream ms = new System.IO.MemoryStream()) + new BinaryFormatter().Serialize(ms, input); + using (SHA256 sha = SHA256.Create()) { - new System.Runtime.Serialization.Formatters.Binary.BinaryFormatter().Serialize(ms, input); - using (SHA256 sha = SHA256.Create()) - { - var hash = sha.ComputeHash(ms.ToArray()); - StringBuilder sb = new StringBuilder(); - foreach (byte b in hash) - { - sb.Append(b.ToString("X2")); - } + var hash = sha.ComputeHash(ms.ToArray()); + StringBuilder sb = new(); + foreach (byte b in hash) + sb.Append(b.ToString("X2")); - return sb.ToString().ToLower(); - } + return sb.ToString().ToLower(); } } + } - static string md5(string input) + private static string md5(string input) + { + using (MD5 md5 = MD5.Create()) { - using (System.Security.Cryptography.MD5 md5 = System.Security.Cryptography.MD5.Create()) - { - byte[] inputBytes = System.Text.Encoding.ASCII.GetBytes(input.ToLowerInvariant()); - byte[] hashBytes = md5.ComputeHash(inputBytes); + byte[] inputBytes = Encoding.ASCII.GetBytes(input.ToLowerInvariant()); + byte[] hashBytes = md5.ComputeHash(inputBytes); - StringBuilder sb = new StringBuilder(); - for (int i = 0; i < hashBytes.Length; i++) - { - sb.Append(hashBytes[i].ToString("X2")); - } - return sb.ToString().ToLower(); - } + StringBuilder sb = new(); + for (int i = 0; i < hashBytes.Length; i++) + sb.Append(hashBytes[i].ToString("X2")); + return sb.ToString().ToLower(); } + } - public static bool IsSimpleType(this Type type) - { - return - type.IsPrimitive || - new Type[] { + public static bool IsSimpleType(this Type type) + { + return type.IsPrimitive + || new Type[] + { typeof(string), typeof(decimal), typeof(DateTime), typeof(DateTimeOffset), typeof(TimeSpan), typeof(Guid) - }.Contains(type) || - Convert.GetTypeCode(type) != TypeCode.Object; - } + }.Contains(type) + || Convert.GetTypeCode(type) != TypeCode.Object; + } - /// - /// Retrieves the simple type properties of an object - /// - /// - /// - /// Set to true to also retrieve simple props of direct parent type - /// Names of props to ignore - /// - public static Base GetApplicationProps(object o, Type t, bool getParentProps = false, List ignore = null) - { - var appProps = new Base(); - appProps["class"] = t.Name; + /// + /// Retrieves the simple type properties of an object + /// + /// + /// + /// Set to true to also retrieve simple props of direct parent type + /// Names of props to ignore + /// + public static Base GetApplicationProps( + object o, + Type t, + bool getParentProps = false, + List ignore = null + ) + { + var appProps = new Base(); + appProps["class"] = t.Name; - try + try + { + // set primitive writeable props + foreach ( + var propInfo in t.GetProperties( + BindingFlags.DeclaredOnly | BindingFlags.Instance | BindingFlags.Public + ) + ) { - // set primitive writeable props - foreach (var propInfo in t.GetProperties(BindingFlags.DeclaredOnly | BindingFlags.Instance | BindingFlags.Public)) + if (ignore != null && ignore.Contains(propInfo.Name)) + continue; + if (IsMeaningfulProp(propInfo, o, out object propValue)) + appProps[propInfo.Name] = propValue; + } + if (getParentProps) + foreach ( + var propInfo in t.BaseType.GetProperties( + BindingFlags.DeclaredOnly | BindingFlags.Instance | BindingFlags.Public + ) + ) { - if (ignore != null && ignore.Contains(propInfo.Name)) continue; + if (ignore != null && ignore.Contains(propInfo.Name)) + continue; if (IsMeaningfulProp(propInfo, o, out object propValue)) appProps[propInfo.Name] = propValue; } - if (getParentProps) - { - foreach (var propInfo in t.BaseType.GetProperties(BindingFlags.DeclaredOnly | BindingFlags.Instance | BindingFlags.Public)) - { - if (ignore != null && ignore.Contains(propInfo.Name)) continue; - if (IsMeaningfulProp(propInfo, o, out object propValue)) - appProps[propInfo.Name] = propValue; - } - } - } - catch (Exception e) - { + } + catch (Exception e) { } - } + return appProps; + } - return appProps; - } - private static bool IsMeaningfulProp(PropertyInfo propInfo, object o, out object value) + private static bool IsMeaningfulProp(PropertyInfo propInfo, object o, out object value) + { + value = propInfo.GetValue(o); + if (propInfo.GetSetMethod() != null && value != null) { - value = propInfo.GetValue(o); - if (propInfo.GetSetMethod() != null && value != null) + if (propInfo.PropertyType.IsPrimitive || propInfo.PropertyType == typeof(decimal)) + return true; + if (propInfo.PropertyType == typeof(string) && !string.IsNullOrEmpty((string)value)) + return true; + if (propInfo.PropertyType.BaseType.Name == "Enum") // for some reason "IsEnum" prop returns false { - if (propInfo.PropertyType.IsPrimitive || propInfo.PropertyType == typeof(decimal)) return true; - if (propInfo.PropertyType == typeof(string) && !string.IsNullOrEmpty((string)value)) return true; - if (propInfo.PropertyType.BaseType.Name == "Enum") // for some reason "IsEnum" prop returns false - { - value = value.ToString(); - return true; - } + value = value.ToString(); + return true; } - return false; } + return false; + } - /// - /// Sets the properties of an object with the properties of a base object - /// - /// - /// - /// The base class object representing application props - public static void SetApplicationProps(object o, Type t, Base props) - { - var propNames = props.GetDynamicMembers(); - if (o == null || propNames.Count() == 0) - return; - - var typeProperties = t.GetProperties().ToList(); - typeProperties.AddRange(t.BaseType.GetProperties().ToList()); - foreach (var propInfo in typeProperties) + /// + /// Sets the properties of an object with the properties of a base object + /// + /// + /// + /// The base class object representing application props + public static void SetApplicationProps(object o, Type t, Base props) + { + var propNames = props.GetDynamicMembers(); + if (o == null || propNames.Count() == 0) + return; + + var typeProperties = t.GetProperties().ToList(); + typeProperties.AddRange(t.BaseType.GetProperties().ToList()); + foreach (var propInfo in typeProperties) + if (propInfo.CanWrite && propNames.Contains(propInfo.Name)) { - if (propInfo.CanWrite && propNames.Contains(propInfo.Name)) - { - var value = props[propInfo.Name]; - if (propInfo.PropertyType.BaseType.Name == "Enum") - value = Enum.Parse(propInfo.PropertyType, (string)value); - if (value != null) + var value = props[propInfo.Name]; + if (propInfo.PropertyType.BaseType.Name == "Enum") + value = Enum.Parse(propInfo.PropertyType, (string)value); + if (value != null) + try { - try - { - t.InvokeMember(propInfo.Name, + t.InvokeMember( + propInfo.Name, BindingFlags.Instance | BindingFlags.Public | BindingFlags.SetProperty, - Type.DefaultBinder, o, new object[] { value }); - } - catch { } + Type.DefaultBinder, + o, + new object[] { value } + ); } - } - } - } - - /// - /// Chunks a list into pieces. - /// - /// - /// - /// - /// - public static IEnumerable> SplitList(List list, int chunkSize = 50) - { - for (int i = 0; i < list.Count; i += chunkSize) - { - yield return list.GetRange(i, Math.Min(chunkSize, list.Count - i)); + catch { } } - } - } + /// + /// Chunks a list into pieces. + /// + /// + /// + /// + /// + public static IEnumerable> SplitList(List list, int chunkSize = 50) + { + for (int i = 0; i < list.Count; i += chunkSize) + yield return list.GetRange(i, Math.Min(chunkSize, list.Count - i)); + } } diff --git a/Core/Core/Serialisation/BaseObjectDeserializerV2.cs b/Core/Core/Serialisation/BaseObjectDeserializerV2.cs index 56b14d6ea9..92325eef93 100644 --- a/Core/Core/Serialisation/BaseObjectDeserializerV2.cs +++ b/Core/Core/Serialisation/BaseObjectDeserializerV2.cs @@ -1,6 +1,7 @@ -using System; +using System; using System.Collections.Generic; using System.Diagnostics; +using System.IO; using System.Reflection; using System.Text.RegularExpressions; using System.Threading; @@ -11,317 +12,322 @@ using Speckle.Newtonsoft.Json; using Speckle.Newtonsoft.Json.Linq; -namespace Speckle.Core.Serialisation +namespace Speckle.Core.Serialisation; + +public class BaseObjectDeserializerV2 { - public class BaseObjectDeserializerV2 - { - /// - /// Property that describes the type of the object. - /// - public string TypeDiscriminator = "speckle_type"; + private bool Busy = false; + private object CallbackLock = new(); - public CancellationToken CancellationToken { get; set; } + // id -> Base if already deserialized or id -> Task if was handled by a bg thread + private Dictionary DeserializedObjects; - /// - /// The sync transport. This transport will be used synchronously. - /// - public ITransport ReadTransport { get; set; } + public int TotalProcessedCount = 0; - public int TotalProcessedCount = 0; + /// + /// Property that describes the type of the object. + /// + public string TypeDiscriminator = "speckle_type"; - public Action OnProgressAction { get; set; } + private DeserializationWorkerThreads WorkerThreads; - public Action OnErrorAction { get; set; } + public BaseObjectDeserializerV2() { } - private DeserializationWorkerThreads WorkerThreads; - private bool Busy = false; - // id -> Base if already deserialized or id -> Task if was handled by a bg thread - private Dictionary DeserializedObjects; - private object CallbackLock = new object(); + public CancellationToken CancellationToken { get; set; } - private Regex ChunkPropertyNameRegex = new Regex(@"^@\((\d*)\)"); + /// + /// The sync transport. This transport will be used synchronously. + /// + public ITransport ReadTransport { get; set; } - public string BlobStorageFolder { get; set; } - public TimeSpan Elapsed { get; private set; } + public Action OnProgressAction { get; set; } - public BaseObjectDeserializerV2() - { + public Action OnErrorAction { get; set; } - } + public string BlobStorageFolder { get; set; } + public TimeSpan Elapsed { get; private set; } - /// The JSON string of the object to be deserialized - /// A typed object deserialized from the - /// Thrown when - /// Thrown when deserializes to a type other than - public Base Deserialize(string rootObjectJson) - { - if (Busy) - throw new InvalidOperationException("A deserializer instance can deserialize only 1 object at a time. Consider creating multiple deserializer instances"); + /// The JSON string of the object to be deserialized + /// A typed object deserialized from the + /// Thrown when + /// Thrown when deserializes to a type other than + public Base Deserialize(string rootObjectJson) + { + if (Busy) + throw new InvalidOperationException( + "A deserializer instance can deserialize only 1 object at a time. Consider creating multiple deserializer instances" + ); - try + try + { + Busy = true; + var stopwatch = Stopwatch.StartNew(); + DeserializedObjects = new Dictionary(); + WorkerThreads = new DeserializationWorkerThreads(this); + WorkerThreads.Start(); + + List<(string, int)> closures = GetClosures(rootObjectJson); + closures.Sort((a, b) => b.Item2.CompareTo(a.Item2)); + foreach (var closure in closures) { - Busy = true; - var stopwatch = Stopwatch.StartNew(); - DeserializedObjects = new Dictionary(); - WorkerThreads = new DeserializationWorkerThreads(this); - WorkerThreads.Start(); - - List<(string, int)> closures = GetClosures(rootObjectJson); - closures.Sort((a, b) => b.Item2.CompareTo(a.Item2)); - foreach (var closure in closures) - { - string objId = closure.Item1; - // pausing for getting object from the transport - stopwatch.Stop(); - string objJson = ReadTransport.GetObject(objId); - stopwatch.Start(); - object deserializedOrPromise = DeserializeTransportObjectProxy(objJson); - lock (DeserializedObjects) - { - DeserializedObjects[objId] = deserializedOrPromise; - } - } - - object ret = DeserializeTransportObject(rootObjectJson); - + string objId = closure.Item1; + // pausing for getting object from the transport stopwatch.Stop(); - Elapsed += stopwatch.Elapsed; - if (ret is Base b) return b; + string objJson = ReadTransport.GetObject(objId); + stopwatch.Start(); + object deserializedOrPromise = DeserializeTransportObjectProxy(objJson); + lock (DeserializedObjects) + DeserializedObjects[objId] = deserializedOrPromise; + } - else throw new Exception( + object ret = DeserializeTransportObject(rootObjectJson); + + stopwatch.Stop(); + Elapsed += stopwatch.Elapsed; + if (ret is Base b) + return b; + else + throw new Exception( $"Expected {nameof(rootObjectJson)} to be deserialized to type {nameof(Base)} but was {ret}" ); - } - finally - { - DeserializedObjects = null; - WorkerThreads.Dispose(); - WorkerThreads = null; - Busy = false; - } } + finally + { + DeserializedObjects = null; + WorkerThreads.Dispose(); + WorkerThreads = null; + Busy = false; + } + } - private List<(string, int)> GetClosures(string rootObjectJson) + private List<(string, int)> GetClosures(string rootObjectJson) + { + try { - try - { - List<(string, int)> closureList = new List<(string, int)>(); - JObject doc1 = JObject.Parse(rootObjectJson); + List<(string, int)> closureList = new(); + JObject doc1 = JObject.Parse(rootObjectJson); - if (!doc1.ContainsKey("__closure")) - return new List<(string, int)>(); - foreach (JToken prop in doc1["__closure"]) - { - string childId = ((JProperty)prop).Name; - int childMinDepth = (int)((JProperty)prop).Value; - closureList.Add((childId, childMinDepth)); - } - return closureList; - } - catch - { + if (!doc1.ContainsKey("__closure")) return new List<(string, int)>(); + foreach (JToken prop in doc1["__closure"]) + { + string childId = ((JProperty)prop).Name; + int childMinDepth = (int)((JProperty)prop).Value; + closureList.Add((childId, childMinDepth)); } + return closureList; } - - private object DeserializeTransportObjectProxy(String objectJson) + catch { - // Try background work - Task bgResult = WorkerThreads.TryStartTask(WorkerThreadTaskType.Deserialize, objectJson); - if (bgResult != null) - return bgResult; - - // Sync - return DeserializeTransportObject(objectJson); + return new List<(string, int)>(); } + } - public object DeserializeTransportObject(String objectJson) - { - // Apparently this automatically parses DateTimes in strings if it matches the format: - // JObject doc1 = JObject.Parse(objectJson); - - // This is equivalent code that doesn't parse datetimes: - JObject doc1; - using (JsonReader reader = new JsonTextReader(new System.IO.StringReader(objectJson))) - { - reader.DateParseHandling = DateParseHandling.None; - doc1 = JObject.Load(reader); - } + private object DeserializeTransportObjectProxy(string objectJson) + { + // Try background work + Task bgResult = WorkerThreads.TryStartTask( + WorkerThreadTaskType.Deserialize, + objectJson + ); + if (bgResult != null) + return bgResult; + + // Sync + return DeserializeTransportObject(objectJson); + } + public object DeserializeTransportObject(string objectJson) + { + // Apparently this automatically parses DateTimes in strings if it matches the format: + // JObject doc1 = JObject.Parse(objectJson); - object converted = ConvertJsonElement(doc1); - lock (CallbackLock) - { - OnProgressAction?.Invoke("DS", 1); - } - return converted; + // This is equivalent code that doesn't parse datetimes: + JObject doc1; + using (JsonReader reader = new JsonTextReader(new StringReader(objectJson))) + { + reader.DateParseHandling = DateParseHandling.None; + doc1 = JObject.Load(reader); } - public object ConvertJsonElement(JToken doc) - { - CancellationToken.ThrowIfCancellationRequested(); + object converted = ConvertJsonElement(doc1); + lock (CallbackLock) + OnProgressAction?.Invoke("DS", 1); + return converted; + } - switch (doc.Type) - { - case JTokenType.Undefined: - case JTokenType.Null: - case JTokenType.None: - return null; - case JTokenType.Boolean: - return (bool)doc; - case JTokenType.Integer: - try - { - return (long)doc; - } - catch (OverflowException ex) - { - var v = (object)(double)doc; - SpeckleLog.Logger.Debug(ex, "Json property {tokenType} failed to deserialize {value} to {targetType}, will be deserialized as {fallbackType}", doc.Type, v, typeof(long), typeof(double)); - return v; - } - case JTokenType.Float: - return (double)doc; - case JTokenType.String: - return (string)doc; - case JTokenType.Date: - return (DateTime)doc; - case JTokenType.Array: - JArray docAsArray = (JArray)doc; - List jsonList = new List(docAsArray.Count); - int retListCount = 0; - foreach (JToken value in docAsArray) - { - object convertedValue = ConvertJsonElement(value); - retListCount += (convertedValue is DataChunk) ? ((DataChunk)convertedValue).data.Count : 1; - jsonList.Add(convertedValue); - } + public object ConvertJsonElement(JToken doc) + { + CancellationToken.ThrowIfCancellationRequested(); - List retList = new List(retListCount); - foreach (object jsonObj in jsonList) - { - if (jsonObj is DataChunk) - retList.AddRange(((DataChunk)jsonObj).data); - else - retList.Add(jsonObj); - } + switch (doc.Type) + { + case JTokenType.Undefined: + case JTokenType.Null: + case JTokenType.None: + return null; + case JTokenType.Boolean: + return (bool)doc; + case JTokenType.Integer: + try + { + return (long)doc; + } + catch (OverflowException ex) + { + var v = (object)(double)doc; + SpeckleLog.Logger.Debug( + ex, + "Json property {tokenType} failed to deserialize {value} to {targetType}, will be deserialized as {fallbackType}", + doc.Type, + v, + typeof(long), + typeof(double) + ); + return v; + } + case JTokenType.Float: + return (double)doc; + case JTokenType.String: + return (string)doc; + case JTokenType.Date: + return (DateTime)doc; + case JTokenType.Array: + JArray docAsArray = (JArray)doc; + List jsonList = new(docAsArray.Count); + int retListCount = 0; + foreach (JToken value in docAsArray) + { + object convertedValue = ConvertJsonElement(value); + retListCount += convertedValue is DataChunk ? ((DataChunk)convertedValue).data.Count : 1; + jsonList.Add(convertedValue); + } + + List retList = new(retListCount); + foreach (object jsonObj in jsonList) + if (jsonObj is DataChunk) + retList.AddRange(((DataChunk)jsonObj).data); + else + retList.Add(jsonObj); - return retList; - case JTokenType.Object: - Dictionary dict = new Dictionary(); + return retList; + case JTokenType.Object: + Dictionary dict = new(); - foreach (JToken propJToken in doc) - { - JProperty prop = (JProperty)propJToken; - if (prop.Name == "__closure") - continue; - dict[prop.Name] = ConvertJsonElement(prop.Value); - } + foreach (JToken propJToken in doc) + { + JProperty prop = (JProperty)propJToken; + if (prop.Name == "__closure") + continue; + dict[prop.Name] = ConvertJsonElement(prop.Value); + } - if (!dict.ContainsKey(TypeDiscriminator)) - return dict; + if (!dict.ContainsKey(TypeDiscriminator)) + return dict; - if ((dict[TypeDiscriminator] as String) == "reference" && dict.ContainsKey("referencedId")) + if (dict[TypeDiscriminator] as string == "reference" && dict.ContainsKey("referencedId")) + { + string objId = dict["referencedId"] as string; + object deserialized = null; + lock (DeserializedObjects) + if (DeserializedObjects.ContainsKey(objId)) + deserialized = DeserializedObjects[objId]; + if (deserialized != null && deserialized is Task) { - string objId = dict["referencedId"] as String; - object deserialized = null; - lock (DeserializedObjects) + try { - if (DeserializedObjects.ContainsKey(objId)) - deserialized = DeserializedObjects[objId]; + deserialized = ((Task)deserialized).Result; } - if (deserialized != null && deserialized is Task) + catch (AggregateException aggregateEx) { - try - { - deserialized = ((Task)deserialized).Result; - } - catch (AggregateException aggregateEx) - { - throw aggregateEx.InnerException; - } - lock (DeserializedObjects) - { - DeserializedObjects[objId] = deserialized; - } + throw aggregateEx.InnerException; } - - if (deserialized != null) - return deserialized; - - // This reference was not already deserialized. Do it now in sync mode - string objectJson = ReadTransport.GetObject(objId); - deserialized = DeserializeTransportObject(objectJson); lock (DeserializedObjects) - { DeserializedObjects[objId] = deserialized; - } - return deserialized; } - return Dict2Base(dict); - default: - throw new Exception("Json value not supported: " + doc.Type.ToString()); - } + if (deserialized != null) + return deserialized; + + // This reference was not already deserialized. Do it now in sync mode + string objectJson = ReadTransport.GetObject(objId); + deserialized = DeserializeTransportObject(objectJson); + lock (DeserializedObjects) + DeserializedObjects[objId] = deserialized; + return deserialized; + } + + return Dict2Base(dict); + default: + throw new Exception("Json value not supported: " + doc.Type.ToString()); } + } - private Base Dict2Base(Dictionary dictObj) - { - String typeName = dictObj[TypeDiscriminator] as String; - Type type = SerializationUtilities.GetType(typeName); - Base baseObj = Activator.CreateInstance(type) as Base; + private Base Dict2Base(Dictionary dictObj) + { + string typeName = dictObj[TypeDiscriminator] as string; + Type type = SerializationUtilities.GetType(typeName); + Base baseObj = Activator.CreateInstance(type) as Base; - dictObj.Remove(TypeDiscriminator); - dictObj.Remove("__closure"); + dictObj.Remove(TypeDiscriminator); + dictObj.Remove("__closure"); - Dictionary staticProperties = SerializationUtilities.GetTypePropeties(typeName); - List onDeserializedCallbacks = SerializationUtilities.GetOnDeserializedCallbacks(typeName); + Dictionary staticProperties = SerializationUtilities.GetTypePropeties( + typeName + ); + List onDeserializedCallbacks = SerializationUtilities.GetOnDeserializedCallbacks( + typeName + ); - foreach (KeyValuePair entry in dictObj) + foreach (KeyValuePair entry in dictObj) + { + string lowerPropertyName = entry.Key.ToLower(); + if ( + staticProperties.ContainsKey(lowerPropertyName) + && staticProperties[lowerPropertyName].CanWrite + ) { - string lowerPropertyName = entry.Key.ToLower(); - if (staticProperties.ContainsKey(lowerPropertyName) && staticProperties[lowerPropertyName].CanWrite) + PropertyInfo property = staticProperties[lowerPropertyName]; + if (entry.Value == null) { - PropertyInfo property = staticProperties[lowerPropertyName]; - if (entry.Value == null) - { - // Check for JsonProperty(NullValueHandling = NullValueHandling.Ignore) attribute - JsonPropertyAttribute attr = property.GetCustomAttribute(true); - if (attr != null && attr.NullValueHandling == NullValueHandling.Ignore) - continue; - } - - Type targetValueType = property.PropertyType; - object convertedValue; - bool conversionOk = ValueConverter.ConvertValue(targetValueType, entry.Value, out convertedValue); - if (conversionOk) - { - property.SetValue(baseObj, convertedValue); - } - else - { - // Cannot convert the value in the json to the static property type - throw new Exception(String.Format("Cannot deserialize {0} to {1}", entry.Value.GetType().FullName, targetValueType.FullName)); - } + // Check for JsonProperty(NullValueHandling = NullValueHandling.Ignore) attribute + JsonPropertyAttribute attr = property.GetCustomAttribute(true); + if (attr != null && attr.NullValueHandling == NullValueHandling.Ignore) + continue; } + + Type targetValueType = property.PropertyType; + object convertedValue; + bool conversionOk = ValueConverter.ConvertValue( + targetValueType, + entry.Value, + out convertedValue + ); + if (conversionOk) + property.SetValue(baseObj, convertedValue); else - { - // No writable property with this name - CallSiteCache.SetValue(entry.Key, baseObj, entry.Value); - } + // Cannot convert the value in the json to the static property type + throw new Exception( + string.Format( + "Cannot deserialize {0} to {1}", + entry.Value.GetType().FullName, + targetValueType.FullName + ) + ); } - - if (baseObj is Blob b && BlobStorageFolder != null) + else { - b.filePath = b.getLocalDestinationPath(BlobStorageFolder); + // No writable property with this name + CallSiteCache.SetValue(entry.Key, baseObj, entry.Value); } + } - foreach (MethodInfo onDeserialized in onDeserializedCallbacks) - { - onDeserialized.Invoke(baseObj, new object[] { null }); - } + if (baseObj is Blob b && BlobStorageFolder != null) + b.filePath = b.getLocalDestinationPath(BlobStorageFolder); - return baseObj; - } + foreach (MethodInfo onDeserialized in onDeserializedCallbacks) + onDeserialized.Invoke(baseObj, new object[] { null }); + + return baseObj; } } diff --git a/Core/Core/Serialisation/BaseObjectSerializer.cs b/Core/Core/Serialisation/BaseObjectSerializer.cs index 6e462d8e8c..66c0884f56 100644 --- a/Core/Core/Serialisation/BaseObjectSerializer.cs +++ b/Core/Core/Serialisation/BaseObjectSerializer.cs @@ -1,4 +1,4 @@ -using System; +using System; using System.Collections; using System.Collections.Generic; using System.Runtime.Serialization; @@ -10,667 +10,636 @@ using Speckle.Newtonsoft.Json; using Speckle.Newtonsoft.Json.Linq; using Speckle.Newtonsoft.Json.Serialization; +using Utilities = Speckle.Core.Models.Utilities; -namespace Speckle.Core.Serialisation +namespace Speckle.Core.Serialisation; + +/// +/// Json converter that handles base speckle objects. Enables detachment & +/// simultaneous transport (persistence) of objects. +/// +public class BaseObjectSerializer : JsonConverter { /// - /// Json converter that handles base speckle objects. Enables detachment & - /// simultaneous transport (persistence) of objects. + /// Property that describes the type of the object. /// - public class BaseObjectSerializer : JsonConverter - { + public string TypeDiscriminator = "speckle_type"; - /// - /// Property that describes the type of the object. - /// - public string TypeDiscriminator = "speckle_type"; + public BaseObjectSerializer() + { + ResetAndInitialize(); + } - public CancellationToken CancellationToken { get; set; } + public CancellationToken CancellationToken { get; set; } - /// - /// The sync transport. This transport will be used synchronously. - /// - public ITransport ReadTransport { get; set; } + /// + /// The sync transport. This transport will be used synchronously. + /// + public ITransport ReadTransport { get; set; } - /// - /// List of transports to write to. - /// - public List WriteTransports { get; set; } = new List(); + /// + /// List of transports to write to. + /// + public List WriteTransports { get; set; } = new(); - #region Write Json Helper Properties + public override bool CanWrite => true; - /// - /// Keeps track of wether current property pointer is marked for detachment. - /// - List DetachLineage { get; set; } + public override bool CanRead => true; - /// - /// Keeps track of the hash chain through the object tree. - /// - List Lineage { get; set; } + public Action OnProgressAction { get; set; } - /// - /// Dictionary of object if and its subsequent closure table (a dictionary of hashes and min depth at which they are found). - /// - Dictionary> RefMinDepthTracker { get; set; } + public Action OnErrorAction { get; set; } - public int TotalProcessedCount = 0; - #endregion + /// + /// Reinitializes the lineage, and other variables that get used during the + /// json writing process. + /// + public void ResetAndInitialize() + { + DetachLineage = new List(); + Lineage = new List(); + RefMinDepthTracker = new Dictionary>(); + OnProgressAction = null; + TotalProcessedCount = 0; + } - public override bool CanWrite => true; + public override bool CanConvert(Type objectType) + { + return true; + } - public override bool CanRead => true; + #region Read Json - public Action OnProgressAction { get; set; } + public override object ReadJson( + JsonReader reader, + Type objectType, + object existingValue, + JsonSerializer serializer + ) + { + if (CancellationToken.IsCancellationRequested) + return null; // Check for cancellation - public Action OnErrorAction { get; set; } + if (reader.TokenType == JsonToken.Null) + return null; - public BaseObjectSerializer() + // Check if we passed in an array, rather than an object. + // TODO: Test the following branch. It's not used anywhere at the moment, and the default serializer prevents it from + // ever being used (only allows single object serialization) + if (reader.TokenType == JsonToken.StartArray) { - ResetAndInitialize(); - } + var list = new List(); + var jarr = JArray.Load(reader); - /// - /// Reinitializes the lineage, and other variables that get used during the - /// json writing process. - /// - public void ResetAndInitialize() - { - DetachLineage = new List(); - Lineage = new List(); - RefMinDepthTracker = new Dictionary>(); - OnProgressAction = null; - TotalProcessedCount = 0; + foreach (var val in jarr) + { + if (CancellationToken.IsCancellationRequested) + return null; // Check for cancellation + + var whatever = SerializationUtilities.HandleValue(val, serializer, CancellationToken); + list.Add(whatever as Base); + } + return list; } - public override bool CanConvert(Type objectType) => true; + if (CancellationToken.IsCancellationRequested) + return null; // Check for cancellation + + var jObject = JObject.Load(reader); - #region Read Json + if (jObject == null) + return null; - public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer) + var objType = jObject.GetValue(TypeDiscriminator); + + // Assume dictionary! + if (objType == null) { + var dict = new Dictionary(); - if (CancellationToken.IsCancellationRequested) + foreach (var val in jObject) { - return null; // Check for cancellation - } + if (CancellationToken.IsCancellationRequested) + return null; // Check for cancellation - if (reader.TokenType == JsonToken.Null) - { - return null; + dict[val.Key] = SerializationUtilities.HandleValue( + val.Value, + serializer, + CancellationToken + ); } + return dict; + } - // Check if we passed in an array, rather than an object. - // TODO: Test the following branch. It's not used anywhere at the moment, and the default serializer prevents it from - // ever being used (only allows single object serialization) - if (reader.TokenType == JsonToken.StartArray) - { - var list = new List(); - var jarr = JArray.Load(reader); + if (CancellationToken.IsCancellationRequested) + return null; // Check for cancellation - foreach (var val in jarr) - { - if (CancellationToken.IsCancellationRequested) - { - return null; // Check for cancellation - } + var discriminator = Extensions.Value(objType); - var whatever = SerializationUtilities.HandleValue(val, serializer, CancellationToken); - list.Add(whatever as Base); - } - return list; - } + // Check for references. + if (discriminator == "reference") + { + var id = Extensions.Value(jObject.GetValue("referencedId")); + string str = ""; - if (CancellationToken.IsCancellationRequested) + if (ReadTransport != null) + str = ReadTransport.GetObject(id); + else + throw new SpeckleException("Cannot resolve reference, no transport is defined."); + + if (str != null && !string.IsNullOrEmpty(str)) { - return null; // Check for cancellation + jObject = JObject.Parse(str); + discriminator = Extensions.Value(jObject.GetValue(TypeDiscriminator)); } - - var jObject = JObject.Load(reader); - - if (jObject == null) + else { - return null; + throw new SpeckleException( + "Cannot resolve reference. The provided transport could not find it." + ); } + } - var objType = jObject.GetValue(TypeDiscriminator); + var type = SerializationUtilities.GetType(discriminator); + var obj = existingValue ?? Activator.CreateInstance(type); - // Assume dictionary! - if (objType == null) - { - var dict = new Dictionary(); + var contract = (JsonDynamicContract)serializer.ContractResolver.ResolveContract(type); + var used = new HashSet(); - foreach (var val in jObject) - { - if (CancellationToken.IsCancellationRequested) - { - return null; // Check for cancellation - } + // remove unsettable properties + jObject.Remove(TypeDiscriminator); + jObject.Remove("__closure"); - dict[val.Key] = SerializationUtilities.HandleValue(val.Value, serializer, CancellationToken); - } - return dict; - } + if (CancellationToken.IsCancellationRequested) + return null; // Check for cancellation + foreach (var jProperty in jObject.Properties()) + { if (CancellationToken.IsCancellationRequested) - { return null; // Check for cancellation - } - var discriminator = Extensions.Value(objType); + if (used.Contains(jProperty.Name)) + continue; - // Check for references. - if (discriminator == "reference") - { - var id = Extensions.Value(jObject.GetValue("referencedId")); - string str = ""; + used.Add(jProperty.Name); - if (ReadTransport != null) - { - str = ReadTransport.GetObject(id); - } - else - { - throw new SpeckleException("Cannot resolve reference, no transport is defined."); - } + // first attempt to find a settable property, otherwise fall back to a dynamic set without type + JsonProperty property = contract.Properties.GetClosestMatchProperty(jProperty.Name); - if (str != null && str != "") + if (property != null && property.Writable) + { + if (type == typeof(Abstract) && property.PropertyName == "base") { - jObject = JObject.Parse(str); - discriminator = Extensions.Value(jObject.GetValue(TypeDiscriminator)); + var propertyValue = SerializationUtilities.HandleAbstractOriginalValue( + jProperty.Value, + ((JValue)jObject.GetValue("assemblyQualifiedName")).Value as string, + serializer + ); + property.ValueProvider.SetValue(obj, propertyValue); } else { - throw new SpeckleException("Cannot resolve reference. The provided transport could not find it."); + var val = SerializationUtilities.HandleValue( + jProperty.Value, + serializer, + CancellationToken, + property + ); + property.ValueProvider.SetValue(obj, val); } } + else + { + // dynamic properties + CallSiteCache.SetValue( + jProperty.Name, + obj, + SerializationUtilities.HandleValue(jProperty.Value, serializer, CancellationToken) + ); + } + } - var type = SerializationUtilities.GetType(discriminator); - var obj = existingValue ?? Activator.CreateInstance(type); + if (CancellationToken.IsCancellationRequested) + return null; // Check for cancellation - var contract = (JsonDynamicContract)serializer.ContractResolver.ResolveContract(type); - var used = new HashSet(); + TotalProcessedCount++; + OnProgressAction?.Invoke("DS", 1); - // remove unsettable properties - jObject.Remove(TypeDiscriminator); - jObject.Remove("__closure"); + foreach (var callback in contract.OnDeserializedCallbacks) + callback(obj, serializer.Context); - if (CancellationToken.IsCancellationRequested) - { - return null; // Check for cancellation - } + return obj; + } - foreach (var jProperty in jObject.Properties()) - { - if (CancellationToken.IsCancellationRequested) - { - return null; // Check for cancellation - } + #endregion - if (used.Contains(jProperty.Name)) - { - continue; - } + #region Write Json Helper Properties - used.Add(jProperty.Name); + /// + /// Keeps track of wether current property pointer is marked for detachment. + /// + private List DetachLineage { get; set; } - // first attempt to find a settable property, otherwise fall back to a dynamic set without type - JsonProperty property = contract.Properties.GetClosestMatchProperty(jProperty.Name); + /// + /// Keeps track of the hash chain through the object tree. + /// + private List Lineage { get; set; } - if (property != null && property.Writable) - { + /// + /// Dictionary of object if and its subsequent closure table (a dictionary of hashes and min depth at which they are found). + /// + private Dictionary> RefMinDepthTracker { get; set; } - if (type == typeof(Abstract) && property.PropertyName == "base") - { - var propertyValue = SerializationUtilities.HandleAbstractOriginalValue(jProperty.Value, ((JValue)jObject.GetValue("assemblyQualifiedName")).Value as string, serializer); - property.ValueProvider.SetValue(obj, propertyValue); - } - else - { - var val = SerializationUtilities.HandleValue(jProperty.Value, serializer, CancellationToken, property); - property.ValueProvider.SetValue(obj, val); - } - } - else - { - // dynamic properties - CallSiteCache.SetValue(jProperty.Name, obj, SerializationUtilities.HandleValue(jProperty.Value, serializer, CancellationToken)); - } - } + public int TotalProcessedCount = 0; - if (CancellationToken.IsCancellationRequested) - { - return null; // Check for cancellation - } + #endregion - TotalProcessedCount++; - OnProgressAction?.Invoke("DS", 1); + #region Write Json - foreach (var callback in contract.OnDeserializedCallbacks) - { - callback(obj, serializer.Context); - } + // Keeps track of the actual tree structure of the objects being serialised. + // These tree references will thereafter be stored in the __tree prop. + private void TrackReferenceInTree(string refId) + { + // Help with creating closure table entries. + for (int i = 0; i < Lineage.Count; i++) + { + var parent = Lineage[i]; + + if (!RefMinDepthTracker.ContainsKey(parent)) + RefMinDepthTracker[parent] = new Dictionary(); - return obj; + if (!RefMinDepthTracker[parent].ContainsKey(refId)) + RefMinDepthTracker[parent][refId] = Lineage.Count - i; + else if (RefMinDepthTracker[parent][refId] > Lineage.Count - i) + RefMinDepthTracker[parent][refId] = Lineage.Count - i; } + } - #endregion + private bool FirstEntry = true, + FirstEntryWasListOrDict = false; - #region Write Json + // While this function looks complicated, it's actually quite smooth: + // The important things to remember is that serialization goes depth first: + // The first object to get fully serialised is the first nested one, with + // the parent object being last. + public override void WriteJson(JsonWriter writer, object value, JsonSerializer serializer) + { + writer.Formatting = serializer.Formatting; + if (CancellationToken.IsCancellationRequested) + return; // Check for cancellation - // Keeps track of the actual tree structure of the objects being serialised. - // These tree references will thereafter be stored in the __tree prop. - private void TrackReferenceInTree(string refId) - { - // Help with creating closure table entries. - for (int i = 0; i < Lineage.Count; i++) - { - var parent = Lineage[i]; + ///////////////////////////////////// + // Path one: nulls + ///////////////////////////////////// - if (!RefMinDepthTracker.ContainsKey(parent)) - { - RefMinDepthTracker[parent] = new Dictionary(); - } + if (value == null) + return; - if (!RefMinDepthTracker[parent].ContainsKey(refId)) - { - RefMinDepthTracker[parent][refId] = Lineage.Count - i; - } - else if (RefMinDepthTracker[parent][refId] > Lineage.Count - i) - { - RefMinDepthTracker[parent][refId] = Lineage.Count - i; - } - } + ///////////////////////////////////// + // Path two: primitives (string, bool, int, etc) + ///////////////////////////////////// + + if (value.GetType().IsPrimitive || value is string) + { + FirstEntry = false; + writer.WriteValue(value); + //var t = JToken.FromObject(value); // bypasses this converter as we do not pass in the serializer + //t.WriteTo(writer); + return; } - private bool FirstEntry = true, FirstEntryWasListOrDict = false; + ///////////////////////////////////// + // Path three: Bases + ///////////////////////////////////// - // While this function looks complicated, it's actually quite smooth: - // The important things to remember is that serialization goes depth first: - // The first object to get fully serialised is the first nested one, with - // the parent object being last. - public override void WriteJson(JsonWriter writer, object value, JsonSerializer serializer) + if (value is Base && !(value is ObjectReference)) { - writer.Formatting = serializer.Formatting; if (CancellationToken.IsCancellationRequested) - { return; // Check for cancellation - } - ///////////////////////////////////// - // Path one: nulls - ///////////////////////////////////// + var obj = value as Base; - if (value == null) - { - return; - } + FirstEntry = false; + //TotalProcessedCount++; - ///////////////////////////////////// - // Path two: primitives (string, bool, int, etc) - ///////////////////////////////////// + // Append to lineage tracker + Lineage.Add(Guid.NewGuid().ToString()); - if (value.GetType().IsPrimitive || value is string) - { - FirstEntry = false; - writer.WriteValue(value); - //var t = JToken.FromObject(value); // bypasses this converter as we do not pass in the serializer - //t.WriteTo(writer); - return; - } + var jo = new JObject(); + var propertyNames = obj.GetDynamicMemberNames(); - ///////////////////////////////////// - // Path three: Bases - ///////////////////////////////////// + var contract = (JsonDynamicContract) + serializer.ContractResolver.ResolveContract(value.GetType()); - if (value is Base && !(value is ObjectReference)) + // Iterate through the object's properties, one by one, checking for ignored ones + foreach (var prop in propertyNames) { if (CancellationToken.IsCancellationRequested) - { return; // Check for cancellation - } - - var obj = value as Base; + // Ignore properties starting with a double underscore. + if (prop.StartsWith("__")) + continue; - FirstEntry = false; - //TotalProcessedCount++; + if (prop == "id") + continue; - // Append to lineage tracker - Lineage.Add(Guid.NewGuid().ToString()); + var property = contract.Properties.GetClosestMatchProperty(prop); - var jo = new JObject(); - var propertyNames = obj.GetDynamicMemberNames(); + // Ignore properties decorated with [JsonIgnore]. + if (property != null && property.Ignored) + continue; - var contract = (JsonDynamicContract)serializer.ContractResolver.ResolveContract(value.GetType()); + // Ignore nulls + object propValue = obj[prop]; + if (propValue == null) + continue; - // Iterate through the object's properties, one by one, checking for ignored ones - foreach (var prop in propertyNames) + // Check if this property is marked for detachment: either by the presence of "@" at the beginning of the name, or by the presence of a DetachProperty attribute on a typed property. + if (property != null) { - if (CancellationToken.IsCancellationRequested) - { - return; // Check for cancellation - } - // Ignore properties starting with a double underscore. - if (prop.StartsWith("__")) - { - continue; - } - - if (prop == "id") - { - continue; - } - - var property = contract.Properties.GetClosestMatchProperty(prop); - - // Ignore properties decorated with [JsonIgnore]. - if (property != null && property.Ignored) - { - continue; - } - - // Ignore nulls - object propValue = obj[prop]; - if (propValue == null) - { - continue; - } - - // Check if this property is marked for detachment: either by the presence of "@" at the beginning of the name, or by the presence of a DetachProperty attribute on a typed property. - if (property != null) - { - var detachableAttributes = property.AttributeProvider.GetAttributes(typeof(DetachProperty), true); - if (detachableAttributes.Count > 0) - { - DetachLineage.Add(((DetachProperty)detachableAttributes[0]).Detachable); - } - else - { - DetachLineage.Add(false); - } - - var chunkableAttributes = property.AttributeProvider.GetAttributes(typeof(Chunkable), true); - if (chunkableAttributes.Count > 0) - { - //DetachLineage.Add(true); // NOOPE - serializer.Context = new StreamingContext(StreamingContextStates.Other, chunkableAttributes[0]); - } - else - { - //DetachLineage.Add(false); - serializer.Context = new StreamingContext(); - } - } - else if (prop.StartsWith("@")) // Convention check for dynamically added properties. - { - DetachLineage.Add(true); - - var chunkSyntax = new Regex(@"^@\((\d*)\)"); - - if (chunkSyntax.IsMatch(prop)) - { - int chunkSize; - var match = chunkSyntax.Match(prop); - int.TryParse(match.Groups[match.Groups.Count - 1].Value, out chunkSize); - serializer.Context = new StreamingContext(StreamingContextStates.Other, - chunkSize > 0 ? new Chunkable(chunkSize) : new Chunkable()); - } - else - { - serializer.Context = new StreamingContext(); - } - } + var detachableAttributes = property.AttributeProvider.GetAttributes( + typeof(DetachProperty), + true + ); + if (detachableAttributes.Count > 0) + DetachLineage.Add(((DetachProperty)detachableAttributes[0]).Detachable); else - { DetachLineage.Add(false); - } - - // Set and store a reference, if it is marked as detachable and the transport is not null. - if (WriteTransports != null && WriteTransports.Count != 0 && propValue is Base && DetachLineage[DetachLineage.Count - 1]) - { - var what = JToken.FromObject(propValue, serializer); // Trigger next. - if (CancellationToken.IsCancellationRequested) - { - return; // Check for cancellation - } - - if (what == null) - { - return; // HACK: Prevent nulls from borking our serialization on nested schema object refs. (i.e. Line has @SchemaObject, that has ref to line) - } + var chunkableAttributes = property.AttributeProvider.GetAttributes( + typeof(Chunkable), + true + ); + if (chunkableAttributes.Count > 0) + //DetachLineage.Add(true); // NOOPE + serializer.Context = new StreamingContext( + StreamingContextStates.Other, + chunkableAttributes[0] + ); + else + //DetachLineage.Add(false); + serializer.Context = new StreamingContext(); + } + else if (prop.StartsWith("@")) // Convention check for dynamically added properties. + { + DetachLineage.Add(true); - var refHash = ((JObject)what).GetValue("id").ToString(); + var chunkSyntax = new Regex(@"^@\((\d*)\)"); - var reference = new ObjectReference() { referencedId = refHash }; - TrackReferenceInTree(refHash); - jo.Add(prop, JToken.FromObject(reference)); + if (chunkSyntax.IsMatch(prop)) + { + int chunkSize; + var match = chunkSyntax.Match(prop); + int.TryParse(match.Groups[match.Groups.Count - 1].Value, out chunkSize); + serializer.Context = new StreamingContext( + StreamingContextStates.Other, + chunkSize > 0 ? new Chunkable(chunkSize) : new Chunkable() + ); } else { - jo.Add(prop, JToken.FromObject(propValue, serializer)); // Default route + serializer.Context = new StreamingContext(); } - - // Pop detach lineage. If you don't get this, remember this thing moves ONLY FORWARD, DEPTH FIRST - DetachLineage.RemoveAt(DetachLineage.Count - 1); - // Refresh the streaming context to remove chunking flag - serializer.Context = new StreamingContext(); } - - // Check if we actually have any transports present that would warrant a - if ((WriteTransports != null && WriteTransports.Count != 0) && RefMinDepthTracker.ContainsKey(Lineage[Lineage.Count - 1])) + else { - jo.Add("__closure", JToken.FromObject(RefMinDepthTracker[Lineage[Lineage.Count - 1]])); + DetachLineage.Add(false); } - var hash = Models.Utilities.hashString(jo.ToString()); - if (!jo.ContainsKey("id")) + // Set and store a reference, if it is marked as detachable and the transport is not null. + if ( + WriteTransports != null + && WriteTransports.Count != 0 + && propValue is Base + && DetachLineage[DetachLineage.Count - 1] + ) { - jo.Add("id", JToken.FromObject(hash)); - } - jo.WriteTo(writer); + var what = JToken.FromObject(propValue, serializer); // Trigger next. - if ((DetachLineage.Count == 0 || DetachLineage[DetachLineage.Count - 1]) && WriteTransports != null && WriteTransports.Count != 0) - { - var objString = jo.ToString(writer.Formatting); - var objId = jo["id"].Value(); + if (CancellationToken.IsCancellationRequested) + return; // Check for cancellation - OnProgressAction?.Invoke("S", 1); + if (what == null) + return; // HACK: Prevent nulls from borking our serialization on nested schema object refs. (i.e. Line has @SchemaObject, that has ref to line) - foreach (var transport in WriteTransports) - { - if (CancellationToken.IsCancellationRequested) - { - continue; // Check for cancellation - } + var refHash = ((JObject)what).GetValue("id").ToString(); - transport.SaveObject(objId, objString); - } + var reference = new ObjectReference() { referencedId = refHash }; + TrackReferenceInTree(refHash); + jo.Add(prop, JToken.FromObject(reference)); + } + else + { + jo.Add(prop, JToken.FromObject(propValue, serializer)); // Default route } - // Pop lineage tracker - Lineage.RemoveAt(Lineage.Count - 1); - return; + // Pop detach lineage. If you don't get this, remember this thing moves ONLY FORWARD, DEPTH FIRST + DetachLineage.RemoveAt(DetachLineage.Count - 1); + // Refresh the streaming context to remove chunking flag + serializer.Context = new StreamingContext(); } - ///////////////////////////////////// - // Path four: lists/arrays & dicts - ///////////////////////////////////// - - if (CancellationToken.IsCancellationRequested) + // Check if we actually have any transports present that would warrant a + if ( + WriteTransports != null + && WriteTransports.Count != 0 + && RefMinDepthTracker.ContainsKey(Lineage[Lineage.Count - 1]) + ) + jo.Add("__closure", JToken.FromObject(RefMinDepthTracker[Lineage[Lineage.Count - 1]])); + + var hash = Utilities.hashString(jo.ToString()); + if (!jo.ContainsKey("id")) + jo.Add("id", JToken.FromObject(hash)); + jo.WriteTo(writer); + + if ( + (DetachLineage.Count == 0 || DetachLineage[DetachLineage.Count - 1]) + && WriteTransports != null + && WriteTransports.Count != 0 + ) { - return; // Check for cancellation - } - - var type = value.GetType(); + var objString = jo.ToString(writer.Formatting); + var objId = jo["id"].Value(); - // TODO: List handling and dictionary serialisation handling can be sped up significantly if we first check by their inner type. - // This handles a broader case in which we are, essentially, checking only for object[] or List / Dictionary cases. - // A much faster approach is to check for List, where primitive = string, number, etc. and directly serialize it in full. - // Same goes for dictionaries. - if (typeof(IEnumerable).IsAssignableFrom(type) && !typeof(IDictionary).IsAssignableFrom(type) && type != typeof(string)) - { + OnProgressAction?.Invoke("S", 1); - if (TotalProcessedCount == 0 && FirstEntry) + foreach (var transport in WriteTransports) { - FirstEntry = false; - FirstEntryWasListOrDict = true; - TotalProcessedCount += 1; - DetachLineage.Add(WriteTransports != null && WriteTransports.Count != 0 ? true : false); + if (CancellationToken.IsCancellationRequested) + continue; // Check for cancellation + + transport.SaveObject(objId, objString); } + } - JArray arr = new JArray(); + // Pop lineage tracker + Lineage.RemoveAt(Lineage.Count - 1); + return; + } - // Chunking large lists into manageable parts. - if (DetachLineage[DetachLineage.Count - 1] && serializer.Context.Context is Chunkable chunkInfo) - { - var maxCount = chunkInfo.MaxObjCountPerChunk; - var i = 0; - var chunkList = new List(); - var currChunk = new DataChunk(); + ///////////////////////////////////// + // Path four: lists/arrays & dicts + ///////////////////////////////////// - foreach (var arrValue in ((IEnumerable)value)) - { - if (i == maxCount) - { - if (currChunk.data.Count != 0) - { - chunkList.Add(currChunk); - } - - currChunk = new DataChunk(); - i = 0; - } - currChunk.data.Add(arrValue); - i++; - } + if (CancellationToken.IsCancellationRequested) + return; // Check for cancellation - if (currChunk.data.Count != 0) - { - chunkList.Add(currChunk); - } - value = chunkList; + var type = value.GetType(); - } + // TODO: List handling and dictionary serialisation handling can be sped up significantly if we first check by their inner type. + // This handles a broader case in which we are, essentially, checking only for object[] or List / Dictionary cases. + // A much faster approach is to check for List, where primitive = string, number, etc. and directly serialize it in full. + // Same goes for dictionaries. + if ( + typeof(IEnumerable).IsAssignableFrom(type) + && !typeof(IDictionary).IsAssignableFrom(type) + && type != typeof(string) + ) + { + if (TotalProcessedCount == 0 && FirstEntry) + { + FirstEntry = false; + FirstEntryWasListOrDict = true; + TotalProcessedCount += 1; + DetachLineage.Add(WriteTransports != null && WriteTransports.Count != 0 ? true : false); + } - foreach (var arrValue in ((IEnumerable)value)) - { - if (CancellationToken.IsCancellationRequested) - { - return; // Check for cancellation - } + JArray arr = new(); - if (arrValue == null) - { - continue; - } + // Chunking large lists into manageable parts. + if ( + DetachLineage[DetachLineage.Count - 1] && serializer.Context.Context is Chunkable chunkInfo + ) + { + var maxCount = chunkInfo.MaxObjCountPerChunk; + var i = 0; + var chunkList = new List(); + var currChunk = new DataChunk(); - if (WriteTransports != null && WriteTransports.Count != 0 && arrValue is Base && DetachLineage[DetachLineage.Count - 1]) + foreach (var arrValue in (IEnumerable)value) + { + if (i == maxCount) { - var what = JToken.FromObject(arrValue, serializer); // Trigger next - - var refHash = ((JObject)what).GetValue("id").ToString(); + if (currChunk.data.Count != 0) + chunkList.Add(currChunk); - var reference = new ObjectReference() { referencedId = refHash }; - TrackReferenceInTree(refHash); - arr.Add(JToken.FromObject(reference)); - } - else - { - arr.Add(JToken.FromObject(arrValue, serializer)); // Default route + currChunk = new DataChunk(); + i = 0; } + currChunk.data.Add(arrValue); + i++; } + if (currChunk.data.Count != 0) + chunkList.Add(currChunk); + value = chunkList; + } + + foreach (var arrValue in (IEnumerable)value) + { if (CancellationToken.IsCancellationRequested) - { return; // Check for cancellation - } - arr.WriteTo(writer); + if (arrValue == null) + continue; - if (DetachLineage.Count == 1 && FirstEntryWasListOrDict) // are we in a list entry point case? + if ( + WriteTransports != null + && WriteTransports.Count != 0 + && arrValue is Base + && DetachLineage[DetachLineage.Count - 1] + ) { - DetachLineage.RemoveAt(0); - } + var what = JToken.FromObject(arrValue, serializer); // Trigger next - return; + var refHash = ((JObject)what).GetValue("id").ToString(); + + var reference = new ObjectReference() { referencedId = refHash }; + TrackReferenceInTree(refHash); + arr.Add(JToken.FromObject(reference)); + } + else + { + arr.Add(JToken.FromObject(arrValue, serializer)); // Default route + } } if (CancellationToken.IsCancellationRequested) - { return; // Check for cancellation - } - if (typeof(IDictionary).IsAssignableFrom(type)) - { - if (TotalProcessedCount == 0 && FirstEntry) - { - FirstEntry = false; - FirstEntryWasListOrDict = true; - TotalProcessedCount += 1; - DetachLineage.Add(WriteTransports != null && WriteTransports.Count != 0 ? true : false); - } - var dict = value as IDictionary; - var dictJo = new JObject(); - foreach (DictionaryEntry kvp in dict) - { - if (CancellationToken.IsCancellationRequested) - { - return; // Check for cancellation - } + arr.WriteTo(writer); - if (kvp.Value == null) - { - continue; - } + if (DetachLineage.Count == 1 && FirstEntryWasListOrDict) // are we in a list entry point case? + DetachLineage.RemoveAt(0); - JToken jToken; - if (WriteTransports != null && WriteTransports.Count != 0 && kvp.Value is Base && DetachLineage[DetachLineage.Count - 1]) - { - var what = JToken.FromObject(kvp.Value, serializer); // Trigger next - var refHash = ((JObject)what).GetValue("id").ToString(); + return; + } - var reference = new ObjectReference() { referencedId = refHash }; - TrackReferenceInTree(refHash); - jToken = JToken.FromObject(reference); - } - else - { - jToken = JToken.FromObject(kvp.Value, serializer); // Default route - } - dictJo.Add(kvp.Key.ToString(), jToken); - } - dictJo.WriteTo(writer); + if (CancellationToken.IsCancellationRequested) + return; // Check for cancellation + if (typeof(IDictionary).IsAssignableFrom(type)) + { + if (TotalProcessedCount == 0 && FirstEntry) + { + FirstEntry = false; + FirstEntryWasListOrDict = true; + TotalProcessedCount += 1; + DetachLineage.Add(WriteTransports != null && WriteTransports.Count != 0 ? true : false); + } + var dict = value as IDictionary; + var dictJo = new JObject(); + foreach (DictionaryEntry kvp in dict) + { if (CancellationToken.IsCancellationRequested) - { return; // Check for cancellation - } - if (DetachLineage.Count == 1 && FirstEntryWasListOrDict) // are we in a dictionary entry point case? + if (kvp.Value == null) + continue; + + JToken jToken; + if ( + WriteTransports != null + && WriteTransports.Count != 0 + && kvp.Value is Base + && DetachLineage[DetachLineage.Count - 1] + ) { - DetachLineage.RemoveAt(0); - } + var what = JToken.FromObject(kvp.Value, serializer); // Trigger next + var refHash = ((JObject)what).GetValue("id").ToString(); - return; + var reference = new ObjectReference() { referencedId = refHash }; + TrackReferenceInTree(refHash); + jToken = JToken.FromObject(reference); + } + else + { + jToken = JToken.FromObject(kvp.Value, serializer); // Default route + } + dictJo.Add(kvp.Key.ToString(), jToken); } - - ///////////////////////////////////// - // Path five: everything else (enums?) - ///////////////////////////////////// + dictJo.WriteTo(writer); if (CancellationToken.IsCancellationRequested) - { return; // Check for cancellation - } - FirstEntry = false; - var lastCall = JToken.FromObject(value); // bypasses this converter as we do not pass in the serializer - lastCall.WriteTo(writer); + if (DetachLineage.Count == 1 && FirstEntryWasListOrDict) // are we in a dictionary entry point case? + DetachLineage.RemoveAt(0); + + return; } - #endregion + ///////////////////////////////////// + // Path five: everything else (enums?) + ///////////////////////////////////// + + if (CancellationToken.IsCancellationRequested) + return; // Check for cancellation + FirstEntry = false; + var lastCall = JToken.FromObject(value); // bypasses this converter as we do not pass in the serializer + lastCall.WriteTo(writer); } + #endregion } diff --git a/Core/Core/Serialisation/BaseObjectSerializerV2.cs b/Core/Core/Serialisation/BaseObjectSerializerV2.cs index 2a6ed72426..ef9136c1d6 100644 --- a/Core/Core/Serialisation/BaseObjectSerializerV2.cs +++ b/Core/Core/Serialisation/BaseObjectSerializerV2.cs @@ -1,7 +1,9 @@ -using System; +using System; using System.Collections; using System.Collections.Generic; using System.Diagnostics; +using System.Drawing; +using System.Globalization; using System.Linq; using System.Numerics; using System.Reflection; @@ -10,387 +12,448 @@ using Speckle.Core.Models; using Speckle.Core.Transports; using Speckle.Newtonsoft.Json; +using Utilities = Speckle.Core.Models.Utilities; -namespace Speckle.Core.Serialisation -{ +namespace Speckle.Core.Serialisation; - public class BaseObjectSerializerV2 - { - public struct PropertyAttributeInfo - { - public PropertyAttributeInfo(bool isDetachable, bool isChunkable, int chunkSize, JsonPropertyAttribute jsonPropertyAttribute) - { - IsDetachable = isDetachable || isChunkable; - IsChunkable = isChunkable; - ChunkSize = chunkSize; - JsonPropertyInfo = jsonPropertyAttribute; - } +public class BaseObjectSerializerV2 +{ + private Stopwatch _stopwatch = new(); + private bool Busy = false; - public bool IsDetachable; - public bool IsChunkable; - public int ChunkSize; - public JsonPropertyAttribute JsonPropertyInfo; - } + private Regex ChunkPropertyNameRegex = new(@"^@\((\d*)\)"); + private List> ParentClosures = new(); - /// - /// Property that describes the type of the object. - /// - public string TypeDiscriminator = "speckle_type"; + private HashSet ParentObjects = new(); - public CancellationToken CancellationToken { get; set; } + /// + /// Property that describes the type of the object. + /// + public string TypeDiscriminator = "speckle_type"; - /// - /// The sync transport. This transport will be used synchronously. - /// - public List WriteTransports { get; set; } = new List(); + private Dictionary> TypedPropertiesCache = + new(); - public Action OnProgressAction { get; set; } + public BaseObjectSerializerV2() { } - public Action OnErrorAction { get; set; } + public CancellationToken CancellationToken { get; set; } - private Regex ChunkPropertyNameRegex = new Regex(@"^@\((\d*)\)"); + /// + /// The sync transport. This transport will be used synchronously. + /// + public List WriteTransports { get; set; } = new(); - private Dictionary> TypedPropertiesCache = new Dictionary>(); - private List> ParentClosures = new List>(); - private bool Busy = false; + public Action OnProgressAction { get; set; } - private HashSet ParentObjects = new HashSet(); + public Action OnErrorAction { get; set; } - // duration diagnostic stuff - public TimeSpan Elapsed => _stopwatch.Elapsed; - private Stopwatch _stopwatch = new Stopwatch(); + // duration diagnostic stuff + public TimeSpan Elapsed => _stopwatch.Elapsed; - public BaseObjectSerializerV2() + public string Serialize(Base baseObj) + { + if (Busy) + throw new Exception( + "A serializer instance can serialize only 1 object at a time. Consider creating multiple serializer instances" + ); + try { - + _stopwatch.Start(); + Busy = true; + Dictionary converted = + PreserializeObject(baseObj, true) as Dictionary; + string serialized = Dict2Json(converted); + StoreObject(converted["id"] as string, serialized); + return serialized; } - - public string Serialize(Base baseObj) + finally { - if (Busy) - throw new Exception("A serializer instance can serialize only 1 object at a time. Consider creating multiple serializer instances"); - try - { - _stopwatch.Start(); - Busy = true; - Dictionary converted = PreserializeObject(baseObj, true) as Dictionary; - String serialized = Dict2Json(converted); - StoreObject(converted["id"] as string, serialized); - return serialized; - } - finally - { - ParentClosures = new List>(); // cleanup in case of exceptions - ParentObjects = new HashSet(); - Busy = false; - _stopwatch.Stop(); - } + ParentClosures = new List>(); // cleanup in case of exceptions + ParentObjects = new HashSet(); + Busy = false; + _stopwatch.Stop(); } + } - // `Preserialize` means transforming all objects into the final form that will appear in json, with basic .net objects - // (primitives, lists and dictionaries with string keys) - public object PreserializeObject(object obj, bool computeClosures = false, PropertyAttributeInfo inheritedDetachInfo = default(PropertyAttributeInfo)) - { - // handle null objects and also check for cancelation - if (obj == null || CancellationToken.IsCancellationRequested) - return null; + // `Preserialize` means transforming all objects into the final form that will appear in json, with basic .net objects + // (primitives, lists and dictionaries with string keys) + public object PreserializeObject( + object obj, + bool computeClosures = false, + PropertyAttributeInfo inheritedDetachInfo = default + ) + { + // handle null objects and also check for cancelation + if (obj == null || CancellationToken.IsCancellationRequested) + return null; - Type type = obj.GetType(); + Type type = obj.GetType(); - if (type.IsPrimitive || obj is string) - return obj; + if (type.IsPrimitive || obj is string) + return obj; - if (obj is Base b) - { - // Complex enough to deserve its own function - return PreserializeBase(b, computeClosures, inheritedDetachInfo); - } + if (obj is Base b) + // Complex enough to deserve its own function + return PreserializeBase(b, computeClosures, inheritedDetachInfo); - if (obj is IDictionary d) + if (obj is IDictionary d) + { + Dictionary ret = new(d.Count); + foreach (DictionaryEntry kvp in d) { - Dictionary ret = new Dictionary(d.Count); - foreach (DictionaryEntry kvp in d) - { - object converted = PreserializeObject(kvp.Value, inheritedDetachInfo: inheritedDetachInfo); - if (converted != null) - ret[kvp.Key.ToString()] = converted; - } - return ret; + object converted = PreserializeObject(kvp.Value, inheritedDetachInfo: inheritedDetachInfo); + if (converted != null) + ret[kvp.Key.ToString()] = converted; } + return ret; + } - if (obj is IEnumerable e) - { - List ret; - if (e is IList list) - ret = new List(list.Count); - else - ret = new List(); - foreach (object element in e) - ret.Add(PreserializeObject(element, inheritedDetachInfo: inheritedDetachInfo)); - return ret; - } + if (obj is IEnumerable e) + { + List ret; + if (e is IList list) + ret = new List(list.Count); + else + ret = new List(); + foreach (object element in e) + ret.Add(PreserializeObject(element, inheritedDetachInfo: inheritedDetachInfo)); + return ret; + } - if (obj is ObjectReference r) - { - Dictionary ret = new Dictionary(); - ret["speckle_type"] = r.speckle_type; - ret["referencedId"] = r.referencedId; - return ret; - } + if (obj is ObjectReference r) + { + Dictionary ret = new(); + ret["speckle_type"] = r.speckle_type; + ret["referencedId"] = r.referencedId; + return ret; + } - if (obj is Enum) + if (obj is Enum) + return (int)obj; + + // Support for simple types + if (obj is Guid g) + return g.ToString(); + if (obj is Color c) + return c.ToArgb(); + if (obj is DateTime t) + return t.ToString("o", CultureInfo.InvariantCulture); + if (obj is Matrix4x4 m) + return new List() { - return (int)obj; - } + m.M11, + m.M12, + m.M13, + m.M14, + m.M21, + m.M22, + m.M23, + m.M24, + m.M31, + m.M32, + m.M33, + m.M34, + m.M41, + m.M42, + m.M43, + m.M44 + }; + + throw new Exception("Unsupported value in serialization: " + type.ToString()); + } - // Support for simple types - if (obj is Guid g) - { - return g.ToString(); - } - if (obj is System.Drawing.Color c) - { - return c.ToArgb(); - } - if (obj is DateTime t) - { - return t.ToString("o", System.Globalization.CultureInfo.InvariantCulture); - } - if (obj is Matrix4x4 m) + public object PreserializeBase( + Base baseObj, + bool computeClosures = false, + PropertyAttributeInfo inheritedDetachInfo = default + ) + { + // handle circular references + if (ParentObjects.Contains(baseObj)) + return null; + ParentObjects.Add(baseObj); + + Dictionary convertedBase = new(); + Dictionary closure = new(); + if (computeClosures || inheritedDetachInfo.IsDetachable || baseObj is Blob) + ParentClosures.Add(closure); + + List<(PropertyInfo, PropertyAttributeInfo)> typedProperties = GetTypedPropertiesWithCache( + baseObj + ); + IEnumerable dynamicProperties = baseObj.GetDynamicMembers(); + + // propertyName -> (originalValue, isDetachable, isChunkable, chunkSize) + Dictionary allProperties = new(); + + // Construct `allProperties`: Add typed properties + foreach ((PropertyInfo propertyInfo, PropertyAttributeInfo detachInfo) in typedProperties) + { + object baseValue = propertyInfo.GetValue(baseObj); + allProperties[propertyInfo.Name] = (baseValue, detachInfo); + } + + // Construct `allProperties`: Add dynamic properties + foreach (string propName in dynamicProperties) + { + if (propName.StartsWith("__")) + continue; + object baseValue = baseObj[propName]; + bool isDetachable = propName.StartsWith("@"); + bool isChunkable = false; + int chunkSize = 1000; + + if (ChunkPropertyNameRegex.IsMatch(propName)) { - return new List() - { - m.M11, m.M12, m.M13, m.M14, - m.M21, m.M22, m.M23, m.M24, - m.M31, m.M32, m.M33, m.M34, - m.M41, m.M42, m.M43, m.M44 - }; + var match = ChunkPropertyNameRegex.Match(propName); + isChunkable = int.TryParse(match.Groups[match.Groups.Count - 1].Value, out chunkSize); } + allProperties[propName] = ( + baseValue, + new PropertyAttributeInfo(isDetachable, isChunkable, chunkSize, null) + ); + } + + // Convert all properties + foreach (var prop in allProperties) + { + object convertedValue = PreserializeBasePropertyValue(prop.Value.Item1, prop.Value.Item2); + + if ( + convertedValue == null + && prop.Value.Item2.JsonPropertyInfo != null + && prop.Value.Item2.JsonPropertyInfo.NullValueHandling == NullValueHandling.Ignore + ) + continue; - throw new Exception("Unsupported value in serialization: " + type.ToString()); + convertedBase[prop.Key] = convertedValue; } - public object PreserializeBase(Base baseObj, bool computeClosures = false, PropertyAttributeInfo inheritedDetachInfo = default(PropertyAttributeInfo)) + if (baseObj is Blob blob) + convertedBase["id"] = blob.id; + else + convertedBase["id"] = ComputeId(convertedBase); + + if (closure.Count > 0) + convertedBase["__closure"] = closure; + if (computeClosures || inheritedDetachInfo.IsDetachable || baseObj is Blob) + ParentClosures.RemoveAt(ParentClosures.Count - 1); + + ParentObjects.Remove(baseObj); + + if (baseObj is Blob myBlob) { - // handle circular references - if (ParentObjects.Contains(baseObj)) - return null; - ParentObjects.Add(baseObj); + StoreBlob(myBlob); + UpdateParentClosures($"blob:{convertedBase["id"]}"); + return convertedBase; + } - Dictionary convertedBase = new Dictionary(); - Dictionary closure = new Dictionary(); - if (computeClosures || inheritedDetachInfo.IsDetachable || baseObj is Blob) - ParentClosures.Add(closure); + if (inheritedDetachInfo.IsDetachable && WriteTransports != null && WriteTransports.Count > 0) + { + string json = Dict2Json(convertedBase); + StoreObject(convertedBase["id"] as string, json); + ObjectReference objRef = new() { referencedId = convertedBase["id"] as string }; + object objRefConverted = PreserializeObject(objRef); + UpdateParentClosures(convertedBase["id"] as string); + OnProgressAction?.Invoke("S", 1); + return objRefConverted; + } - List<(PropertyInfo, PropertyAttributeInfo)> typedProperties = GetTypedPropertiesWithCache(baseObj); - IEnumerable dynamicProperties = baseObj.GetDynamicMembers(); + return convertedBase; + } - // propertyName -> (originalValue, isDetachable, isChunkable, chunkSize) - Dictionary allProperties = new Dictionary(); + private object PreserializeBasePropertyValue(object baseValue, PropertyAttributeInfo detachInfo) + { + bool computeClosuresForChild = + (detachInfo.IsDetachable || detachInfo.IsChunkable) + && WriteTransports != null + && WriteTransports.Count > 0; - // Construct `allProperties`: Add typed properties - foreach ((PropertyInfo propertyInfo, PropertyAttributeInfo detachInfo) in typedProperties) - { - object baseValue = propertyInfo.GetValue(baseObj); - allProperties[propertyInfo.Name] = (baseValue, detachInfo); - } + // If there are no WriteTransports, keep everything attached. + if (WriteTransports == null || WriteTransports.Count == 0) + return PreserializeObject(baseValue, inheritedDetachInfo: detachInfo); - // Construct `allProperties`: Add dynamic properties - foreach (string propName in dynamicProperties) + if (baseValue is IEnumerable && detachInfo.IsChunkable) + { + List chunks = new(); + DataChunk crtChunk = new(); + crtChunk.data = new List(detachInfo.ChunkSize); + foreach (object element in (IEnumerable)baseValue) { - if (propName.StartsWith("__")) - continue; - object baseValue = baseObj[propName]; - bool isDetachable = propName.StartsWith("@"); - bool isChunkable = false; - int chunkSize = 1000; - - if (ChunkPropertyNameRegex.IsMatch(propName)) + crtChunk.data.Add(element); + if (crtChunk.data.Count >= detachInfo.ChunkSize) { - var match = ChunkPropertyNameRegex.Match(propName); - isChunkable = int.TryParse(match.Groups[match.Groups.Count - 1].Value, out chunkSize); + chunks.Add(crtChunk); + crtChunk = new DataChunk(); + crtChunk.data = new List(detachInfo.ChunkSize); } - allProperties[propName] = (baseValue, new PropertyAttributeInfo(isDetachable, isChunkable, chunkSize, null)); } + if (crtChunk.data.Count > 0) + chunks.Add(crtChunk); + return PreserializeObject( + chunks, + inheritedDetachInfo: new PropertyAttributeInfo(true, false, 0, null) + ); + } - // Convert all properties - foreach (var prop in allProperties) - { - object convertedValue = PreserializeBasePropertyValue(prop.Value.Item1, prop.Value.Item2); + return PreserializeObject(baseValue, inheritedDetachInfo: detachInfo); + } - if (convertedValue == null && prop.Value.Item2.JsonPropertyInfo != null && prop.Value.Item2.JsonPropertyInfo.NullValueHandling == NullValueHandling.Ignore) - continue; + private void UpdateParentClosures(string objectId) + { + for (int parentLevel = 0; parentLevel < ParentClosures.Count; parentLevel++) + { + int childDepth = ParentClosures.Count - parentLevel; + if (!ParentClosures[parentLevel].ContainsKey(objectId)) + ParentClosures[parentLevel][objectId] = childDepth; + ParentClosures[parentLevel][objectId] = Math.Min( + ParentClosures[parentLevel][objectId], + childDepth + ); + } + } - convertedBase[prop.Key] = convertedValue; - } + private string ComputeId(Dictionary obj) + { + string serialized = JsonConvert.SerializeObject(obj); + string hash = Utilities.hashString(serialized); + return hash; + } - if (baseObj is Blob blob) - { - convertedBase["id"] = blob.id; - } - else - { - convertedBase["id"] = ComputeId(convertedBase); - } + private string Dict2Json(Dictionary obj) + { + string serialized = JsonConvert.SerializeObject(obj); + return serialized; + } - if (closure.Count > 0) - convertedBase["__closure"] = closure; - if (computeClosures || inheritedDetachInfo.IsDetachable || baseObj is Blob) - ParentClosures.RemoveAt(ParentClosures.Count - 1); + private void StoreObject(string objectId, string objectJson) + { + if (WriteTransports == null) + return; + _stopwatch.Stop(); + foreach (var transport in WriteTransports) + transport.SaveObject(objectId, objectJson); + _stopwatch.Start(); + } - ParentObjects.Remove(baseObj); + private void StoreBlob(Blob obj) + { + if (WriteTransports == null) + return; + bool hasBlobTransport = false; - if (baseObj is Blob myBlob) - { - StoreBlob(myBlob); - UpdateParentClosures($"blob:{convertedBase["id"]}"); - return convertedBase; - } + _stopwatch.Stop(); - if (inheritedDetachInfo.IsDetachable && WriteTransports != null && WriteTransports.Count > 0) + foreach (var transport in WriteTransports) + if (transport is IBlobCapableTransport blobTransport) { - string json = Dict2Json(convertedBase); - StoreObject(convertedBase["id"] as string, json); - ObjectReference objRef = new ObjectReference() { referencedId = convertedBase["id"] as string }; - object objRefConverted = PreserializeObject(objRef); - UpdateParentClosures(convertedBase["id"] as string); - OnProgressAction?.Invoke("S", 1); - return objRefConverted; + hasBlobTransport = true; + blobTransport.SaveBlob(obj); } - return convertedBase; - } + _stopwatch.Start(); + if (!hasBlobTransport) + throw new Exception( + "Object tree contains a Blob (file), but the serialiser has no blob saving capable transports." + ); + } - private object PreserializeBasePropertyValue(object baseValue, PropertyAttributeInfo detachInfo) - { - bool computeClosuresForChild = (detachInfo.IsDetachable || detachInfo.IsChunkable) && WriteTransports != null && WriteTransports.Count > 0; + // (propertyInfo, isDetachable, isChunkable, chunkSize, JsonPropertyAttribute) + private List<(PropertyInfo, PropertyAttributeInfo)> GetTypedPropertiesWithCache(Base baseObj) + { + Type type = baseObj.GetType(); + IEnumerable typedProperties = baseObj.GetInstanceMembers(); - // If there are no WriteTransports, keep everything attached. - if (WriteTransports == null || WriteTransports.Count == 0) - return PreserializeObject(baseValue, inheritedDetachInfo: detachInfo); + if (TypedPropertiesCache.ContainsKey(type.FullName)) + return TypedPropertiesCache[type.FullName]; - if (baseValue is IEnumerable && detachInfo.IsChunkable) - { - List chunks = new List(); - DataChunk crtChunk = new DataChunk(); - crtChunk.data = new List(detachInfo.ChunkSize); - foreach (object element in (IEnumerable)baseValue) + List<(PropertyInfo, PropertyAttributeInfo)> ret = new(); + + foreach (PropertyInfo typedProperty in typedProperties) + { + if (typedProperty.Name.StartsWith("__") || typedProperty.Name == "id") + continue; + + // Check JsonIgnore like this to cover both Newtonsoft JsonIgnore and System.Text.Json JsonIgnore + // TODO: replace JsonIgnore from newtonsoft with JsonIgnore from Sys, and check this more properly. + bool jsonIgnore = false; + foreach (object attr in typedProperty.GetCustomAttributes(true)) + if (attr.GetType().Name.Contains("JsonIgnore")) { - crtChunk.data.Add(element); - if (crtChunk.data.Count >= detachInfo.ChunkSize) - { - chunks.Add(crtChunk); - crtChunk = new DataChunk(); - crtChunk.data = new List(detachInfo.ChunkSize); - } + jsonIgnore = true; + break; } - if (crtChunk.data.Count > 0) - chunks.Add(crtChunk); - return PreserializeObject(chunks, inheritedDetachInfo: new PropertyAttributeInfo(true, false, 0, null)); - } - - return PreserializeObject(baseValue, inheritedDetachInfo: detachInfo); + if (jsonIgnore) + continue; + + object baseValue = typedProperty.GetValue(baseObj); + + List detachableAttributes = typedProperty + .GetCustomAttributes(true) + .ToList(); + List chunkableAttributes = typedProperty + .GetCustomAttributes(true) + .ToList(); + bool isDetachable = detachableAttributes.Count > 0 && detachableAttributes[0].Detachable; + bool isChunkable = chunkableAttributes.Count > 0; + int chunkSize = isChunkable ? chunkableAttributes[0].MaxObjCountPerChunk : 1000; + JsonPropertyAttribute jsonPropertyAttribute = + typedProperty.GetCustomAttribute(); + ret.Add( + ( + typedProperty, + new PropertyAttributeInfo(isDetachable, isChunkable, chunkSize, jsonPropertyAttribute) + ) + ); } - private void UpdateParentClosures(string objectId) + TypedPropertiesCache[type.FullName] = ret; + return ret; + } + + public struct PropertyAttributeInfo : IEquatable + { + public PropertyAttributeInfo( + bool isDetachable, + bool isChunkable, + int chunkSize, + JsonPropertyAttribute jsonPropertyAttribute + ) { - for (int parentLevel = 0; parentLevel < ParentClosures.Count; parentLevel++) - { - int childDepth = ParentClosures.Count - parentLevel; - if (!ParentClosures[parentLevel].ContainsKey(objectId)) - ParentClosures[parentLevel][objectId] = childDepth; - ParentClosures[parentLevel][objectId] = Math.Min(ParentClosures[parentLevel][objectId], childDepth); - } + IsDetachable = isDetachable || isChunkable; + IsChunkable = isChunkable; + ChunkSize = chunkSize; + JsonPropertyInfo = jsonPropertyAttribute; } - private string ComputeId(Dictionary obj) + public bool IsDetachable; + public bool IsChunkable; + public int ChunkSize; + public JsonPropertyAttribute JsonPropertyInfo; + + public override bool Equals(object obj) { - string serialized = JsonConvert.SerializeObject(obj); - string hash = Models.Utilities.hashString(serialized); - return hash; + throw new NotImplementedException(); } - private string Dict2Json(Dictionary obj) + public override int GetHashCode() { - string serialized = JsonConvert.SerializeObject(obj); - return serialized; + throw new NotImplementedException(); } - private void StoreObject(string objectId, string objectJson) + public static bool operator ==(PropertyAttributeInfo left, PropertyAttributeInfo right) { - if (WriteTransports == null) - return; - _stopwatch.Stop(); - foreach (var transport in WriteTransports) - { - transport.SaveObject(objectId, objectJson); - } - _stopwatch.Start(); + return left.Equals(right); } - private void StoreBlob(Blob obj) + public static bool operator !=(PropertyAttributeInfo left, PropertyAttributeInfo right) { - if (WriteTransports == null) - return; - bool hasBlobTransport = false; - - _stopwatch.Stop(); - - foreach (var transport in WriteTransports) - { - if (transport is IBlobCapableTransport blobTransport) - { - hasBlobTransport = true; - blobTransport.SaveBlob(obj); - } - } - - _stopwatch.Start(); - if (!hasBlobTransport) - throw new Exception("Object tree contains a Blob (file), but the serialiser has no blob saving capable transports."); + return !(left == right); } - // (propertyInfo, isDetachable, isChunkable, chunkSize, JsonPropertyAttribute) - private List<(PropertyInfo, PropertyAttributeInfo)> GetTypedPropertiesWithCache(Base baseObj) + public bool Equals(PropertyAttributeInfo other) { - Type type = baseObj.GetType(); - IEnumerable typedProperties = baseObj.GetInstanceMembers(); - - if (TypedPropertiesCache.ContainsKey(type.FullName)) - return TypedPropertiesCache[type.FullName]; - - List<(PropertyInfo, PropertyAttributeInfo)> ret = new List<(PropertyInfo, PropertyAttributeInfo)>(); - - foreach (PropertyInfo typedProperty in typedProperties) - { - if (typedProperty.Name.StartsWith("__") || typedProperty.Name == "id") - continue; - - // Check JsonIgnore like this to cover both Newtonsoft JsonIgnore and System.Text.Json JsonIgnore - // TODO: replace JsonIgnore from newtonsoft with JsonIgnore from Sys, and check this more properly. - bool jsonIgnore = false; - foreach (object attr in typedProperty.GetCustomAttributes(true)) - if (attr.GetType().Name.Contains("JsonIgnore")) - { - jsonIgnore = true; - break; - } - if (jsonIgnore) - continue; - - object baseValue = typedProperty.GetValue(baseObj); - - List detachableAttributes = typedProperty.GetCustomAttributes(true).ToList(); - List chunkableAttributes = typedProperty.GetCustomAttributes(true).ToList(); - bool isDetachable = detachableAttributes.Count > 0 && detachableAttributes[0].Detachable; - bool isChunkable = chunkableAttributes.Count > 0; - int chunkSize = isChunkable ? chunkableAttributes[0].MaxObjCountPerChunk : 1000; - JsonPropertyAttribute jsonPropertyAttribute = typedProperty.GetCustomAttribute(); - ret.Add((typedProperty, new PropertyAttributeInfo(isDetachable, isChunkable, chunkSize, jsonPropertyAttribute))); - } - - TypedPropertiesCache[type.FullName] = ret; - return ret; + throw new NotImplementedException(); } } } diff --git a/Core/Core/Serialisation/BaseObjectSerialzerUtilities.cs b/Core/Core/Serialisation/BaseObjectSerialzerUtilities.cs index 2db1e2884f..a14fd0b14a 100644 --- a/Core/Core/Serialisation/BaseObjectSerialzerUtilities.cs +++ b/Core/Core/Serialisation/BaseObjectSerialzerUtilities.cs @@ -1,4 +1,4 @@ -using System; +using System; using System.Collections; using System.Collections.Generic; using System.Linq; @@ -13,386 +13,373 @@ using Speckle.Newtonsoft.Json; using Speckle.Newtonsoft.Json.Linq; using Speckle.Newtonsoft.Json.Serialization; +using Binder = Microsoft.CSharp.RuntimeBinder.Binder; -namespace Speckle.Core.Serialisation +namespace Speckle.Core.Serialisation; + +internal static class SerializationUtilities { - internal static class SerializationUtilities + #region Value handling + + internal static object HandleValue( + JToken value, + JsonSerializer serializer, + CancellationToken CancellationToken, + JsonProperty jsonProperty = null, + string TypeDiscriminator = "speckle_type" + ) { - #region Getting Types - - private static Dictionary cachedTypes = new Dictionary(); - private static Dictionary> typeProperties = new Dictionary>(); - private static Dictionary> onDeserializedCallbacks = new Dictionary>(); + if (CancellationToken.IsCancellationRequested) + return null; // Check for cancellation - internal static Type GetType(string objFullType) + if (value is JValue) { - lock (cachedTypes) - { - if (cachedTypes.ContainsKey(objFullType)) - { - return cachedTypes[objFullType]; - } - - var type = GetAtomicType(objFullType); - cachedTypes[objFullType] = type; - return type; - } + if (jsonProperty != null) + return value.ToObject(jsonProperty.PropertyType); + else + return ((JValue)value).Value; } - internal static Type GetAtomicType(string objFullType) - { - var objectTypes = objFullType.Split(':').Reverse(); - foreach (var typeName in objectTypes) - { - //TODO: rather than getting the type from the first loaded kit that has it, maybe - //we get it from a specific Kit - var type = KitManager.Types.FirstOrDefault(tp => tp.FullName == typeName); - if (type != null) - { - return type; - } - } - - return typeof(Base); - } - - internal static Dictionary GetTypePropeties(string objFullType) - { - lock (typeProperties) - { - if (!typeProperties.ContainsKey(objFullType)) - { - Dictionary ret = new Dictionary(); - Type type = GetType(objFullType); - PropertyInfo[] properties = type.GetProperties(); - foreach (PropertyInfo prop in properties) - ret[prop.Name.ToLower()] = prop; - typeProperties[objFullType] = ret; - } - return typeProperties[objFullType]; - } - } - - internal static List GetOnDeserializedCallbacks(string objFullType) - { - // return new List(); - lock (onDeserializedCallbacks) - { - // System.Runtime.Serialization.Ca - if (!onDeserializedCallbacks.ContainsKey(objFullType)) - { - List ret = new List(); - Type type = GetType(objFullType); - MethodInfo[] methods = type.GetMethods(BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic); - foreach (MethodInfo method in methods) - { - List onDeserializedAttributes = method.GetCustomAttributes(true).ToList(); - if (onDeserializedAttributes.Count > 0) - ret.Add(method); - } - onDeserializedCallbacks[objFullType] = ret; - } - return onDeserializedCallbacks[objFullType]; - } - } - - internal static Type GetSytemOrSpeckleType(string typeName) - { - var systemType = Type.GetType(typeName); - if (systemType != null) - { - return systemType; - } - return GetAtomicType(typeName); - } - - /// - /// Flushes kit's (discriminator, type) cache. Useful if you're dynamically loading more kits at runtime, that provide better coverage of what you're deserialising, and it's now somehow poisoned because the higher level types were not originally available. - /// - public static void FlushCachedTypes() - { - cachedTypes = new Dictionary(); - } - - #endregion - - #region Value handling - - internal static object HandleValue(JToken value, JsonSerializer serializer, CancellationToken CancellationToken, JsonProperty jsonProperty = null, string TypeDiscriminator = "speckle_type") + // Lists + if (value is JArray) { if (CancellationToken.IsCancellationRequested) - { return null; // Check for cancellation - } - if (value is JValue) + if (jsonProperty != null && jsonProperty.PropertyType.GetConstructor(Type.EmptyTypes) != null) { - if (jsonProperty != null) - { - return value.ToObject(jsonProperty.PropertyType); - } - else - { - return ((JValue)value).Value; - } - } + var arr = Activator.CreateInstance(jsonProperty.PropertyType); - // Lists - if (value is JArray) - { - if (CancellationToken.IsCancellationRequested) - { - return null; // Check for cancellation - } + var addMethod = arr.GetType().GetMethod("Add"); + var hasGenericType = jsonProperty.PropertyType.GenericTypeArguments.Count() != 0; - if (jsonProperty != null && jsonProperty.PropertyType.GetConstructor(Type.EmptyTypes) != null) + foreach (var val in (JArray)value) { - var arr = Activator.CreateInstance(jsonProperty.PropertyType); - - var addMethod = arr.GetType().GetMethod("Add"); - var hasGenericType = jsonProperty.PropertyType.GenericTypeArguments.Count() != 0; - - foreach (var val in ((JArray)value)) - { - if (CancellationToken.IsCancellationRequested) - { - return null; // Check for cancellation - } + if (CancellationToken.IsCancellationRequested) + return null; // Check for cancellation - if (val == null) - { - continue; - } + if (val == null) + continue; - var item = HandleValue(val, serializer, CancellationToken); + var item = HandleValue(val, serializer, CancellationToken); - if (item is DataChunk chunk) - { - foreach (var dataItem in chunk.data) + if (item is DataChunk chunk) + { + foreach (var dataItem in chunk.data) + if (hasGenericType && !jsonProperty.PropertyType.GenericTypeArguments[0].IsInterface) { - if (hasGenericType && !jsonProperty.PropertyType.GenericTypeArguments[0].IsInterface) - { - if (jsonProperty.PropertyType.GenericTypeArguments[0].IsAssignableFrom(dataItem.GetType())) - { - addMethod.Invoke(arr, new object[] { dataItem }); - } - else - { - addMethod.Invoke(arr, new object[] { Convert.ChangeType(dataItem, jsonProperty.PropertyType.GenericTypeArguments[0]) }); - } - } - else - { + if ( + jsonProperty.PropertyType.GenericTypeArguments[0].IsAssignableFrom( + dataItem.GetType() + ) + ) addMethod.Invoke(arr, new object[] { dataItem }); - } - } - } - else if (hasGenericType && !jsonProperty.PropertyType.GenericTypeArguments[0].IsInterface) - { - if (jsonProperty.PropertyType.GenericTypeArguments[0].IsAssignableFrom(item.GetType())) - { - addMethod.Invoke(arr, new object[] { item }); + else + addMethod.Invoke( + arr, + new object[] + { + Convert.ChangeType( + dataItem, + jsonProperty.PropertyType.GenericTypeArguments[0] + ) + } + ); } else { - addMethod.Invoke(arr, new object[] { Convert.ChangeType(item, jsonProperty.PropertyType.GenericTypeArguments[0]) }); + addMethod.Invoke(arr, new object[] { dataItem }); } - } - else - { + } + else if (hasGenericType && !jsonProperty.PropertyType.GenericTypeArguments[0].IsInterface) + { + if (jsonProperty.PropertyType.GenericTypeArguments[0].IsAssignableFrom(item.GetType())) addMethod.Invoke(arr, new object[] { item }); - } + else + addMethod.Invoke( + arr, + new object[] + { + Convert.ChangeType(item, jsonProperty.PropertyType.GenericTypeArguments[0]) + } + ); + } + else + { + addMethod.Invoke(arr, new object[] { item }); } - return arr; } - else if (jsonProperty != null) - { + return arr; + } + else if (jsonProperty != null) + { + if (CancellationToken.IsCancellationRequested) + return null; // Check for cancellation + var arr = Activator.CreateInstance( + typeof(List<>).MakeGenericType(jsonProperty.PropertyType.GetElementType()) + ); + + foreach (var val in (JArray)value) + { if (CancellationToken.IsCancellationRequested) - { return null; // Check for cancellation - } - var arr = Activator.CreateInstance(typeof(List<>).MakeGenericType(jsonProperty.PropertyType.GetElementType())); + if (val == null) + continue; - foreach (var val in ((JArray)value)) + var item = HandleValue(val, serializer, CancellationToken); + if (item is DataChunk chunk) { - if (CancellationToken.IsCancellationRequested) - { - return null; // Check for cancellation - } - - if (val == null) - { - continue; - } - - var item = HandleValue(val, serializer, CancellationToken); - if (item is DataChunk chunk) - { - foreach (var dataItem in chunk.data) - { - if (!jsonProperty.PropertyType.GetElementType().IsInterface) - { - ((IList)arr).Add(Convert.ChangeType(dataItem, jsonProperty.PropertyType.GetElementType())); - } - else - { - ((IList)arr).Add(dataItem); - } - } - } - else - { + foreach (var dataItem in chunk.data) if (!jsonProperty.PropertyType.GetElementType().IsInterface) - { - ((IList)arr).Add(Convert.ChangeType(item, jsonProperty.PropertyType.GetElementType())); - } + ((IList)arr).Add( + Convert.ChangeType(dataItem, jsonProperty.PropertyType.GetElementType()) + ); else - { - ((IList)arr).Add(item); - } - } + ((IList)arr).Add(dataItem); + } + else + { + if (!jsonProperty.PropertyType.GetElementType().IsInterface) + ((IList)arr).Add( + Convert.ChangeType(item, jsonProperty.PropertyType.GetElementType()) + ); + else + ((IList)arr).Add(item); } - var actualArr = Array.CreateInstance(jsonProperty.PropertyType.GetElementType(), ((IList)arr).Count); - ((IList)arr).CopyTo(actualArr, 0); - return actualArr; } - else + var actualArr = Array.CreateInstance( + jsonProperty.PropertyType.GetElementType(), + ((IList)arr).Count + ); + ((IList)arr).CopyTo(actualArr, 0); + return actualArr; + } + else + { + if (CancellationToken.IsCancellationRequested) + return null; // Check for cancellation + + var arr = new List(); + foreach (var val in (JArray)value) { if (CancellationToken.IsCancellationRequested) - { return null; // Check for cancellation - } - var arr = new List(); - foreach (var val in ((JArray)value)) - { - if (CancellationToken.IsCancellationRequested) - { - return null; // Check for cancellation - } + if (val == null) + continue; - if (val == null) - { - continue; - } - - var item = HandleValue(val, serializer, CancellationToken); + var item = HandleValue(val, serializer, CancellationToken); - if (item is DataChunk chunk) - { - arr.AddRange(chunk.data); - } - else - { - arr.Add(item); - } - } - return arr; + if (item is DataChunk chunk) + arr.AddRange(chunk.data); + else + arr.Add(item); } + return arr; } + } - if (CancellationToken.IsCancellationRequested) - { - return null; // Check for cancellation - } + if (CancellationToken.IsCancellationRequested) + return null; // Check for cancellation - if (value is JObject) + if (value is JObject) + { + if (((JObject)value).Property(TypeDiscriminator) != null) + return value.ToObject(serializer); + + var dict = + jsonProperty != null + ? Activator.CreateInstance(jsonProperty.PropertyType) + : new Dictionary(); + foreach (var prop in (JObject)value) { - if (((JObject)value).Property(TypeDiscriminator) != null) - { - return value.ToObject(serializer); - } - - var dict = jsonProperty != null ? Activator.CreateInstance(jsonProperty.PropertyType) : new Dictionary(); - foreach (var prop in ((JObject)value)) - { - if (CancellationToken.IsCancellationRequested) - { - return null; // Check for cancellation - } + if (CancellationToken.IsCancellationRequested) + return null; // Check for cancellation - object key = prop.Key; - if (jsonProperty != null) - { - key = Convert.ChangeType(prop.Key, jsonProperty.PropertyType.GetGenericArguments()[0]); - } ((IDictionary)dict)[key] = HandleValue(prop.Value, serializer, CancellationToken); - } - return dict; + object key = prop.Key; + if (jsonProperty != null) + key = Convert.ChangeType(prop.Key, jsonProperty.PropertyType.GetGenericArguments()[0]); + ((IDictionary)dict)[key] = HandleValue(prop.Value, serializer, CancellationToken); } - return null; + return dict; } + return null; + } + + #endregion + + #region Getting Types - #endregion + private static Dictionary cachedTypes = new(); - #region Abstract Handling + private static Dictionary> typeProperties = new(); - private static Dictionary cachedAbstractTypes = new Dictionary(); + private static Dictionary> onDeserializedCallbacks = new(); - internal static object HandleAbstractOriginalValue(JToken jToken, string assemblyQualifiedName, JsonSerializer serializer) + internal static Type GetType(string objFullType) + { + lock (cachedTypes) { - if (cachedAbstractTypes.ContainsKey(assemblyQualifiedName)) - { - return jToken.ToObject(cachedAbstractTypes[assemblyQualifiedName]); - } + if (cachedTypes.ContainsKey(objFullType)) + return cachedTypes[objFullType]; + + var type = GetAtomicType(objFullType); + cachedTypes[objFullType] = type; + return type; + } + } + + internal static Type GetAtomicType(string objFullType) + { + var objectTypes = objFullType.Split(':').Reverse(); + foreach (var typeName in objectTypes) + { + //TODO: rather than getting the type from the first loaded kit that has it, maybe + //we get it from a specific Kit + var type = KitManager.Types.FirstOrDefault(tp => tp.FullName == typeName); + if (type != null) + return type; + } - var pieces = assemblyQualifiedName.Split(',').Select(s => s.Trim()).ToArray(); + return typeof(Base); + } - var myAssembly = AppDomain.CurrentDomain.GetAssemblies().FirstOrDefault(ass => ass.GetName().Name == pieces[1]); - if (myAssembly == null) + internal static Dictionary GetTypePropeties(string objFullType) + { + lock (typeProperties) + { + if (!typeProperties.ContainsKey(objFullType)) { - throw new SpeckleException("Could not load abstract object's assembly."); + Dictionary ret = new(); + Type type = GetType(objFullType); + PropertyInfo[] properties = type.GetProperties(); + foreach (PropertyInfo prop in properties) + ret[prop.Name.ToLower()] = prop; + typeProperties[objFullType] = ret; } + return typeProperties[objFullType]; + } + } - var myType = myAssembly.GetType(pieces[0]); - if (myType == null) + internal static List GetOnDeserializedCallbacks(string objFullType) + { + // return new List(); + lock (onDeserializedCallbacks) + { + // System.Runtime.Serialization.Ca + if (!onDeserializedCallbacks.ContainsKey(objFullType)) { - throw new SpeckleException("Could not load abstract object's assembly."); + List ret = new(); + Type type = GetType(objFullType); + MethodInfo[] methods = type.GetMethods( + BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic + ); + foreach (MethodInfo method in methods) + { + List onDeserializedAttributes = method + .GetCustomAttributes(true) + .ToList(); + if (onDeserializedAttributes.Count > 0) + ret.Add(method); + } + onDeserializedCallbacks[objFullType] = ret; } - - cachedAbstractTypes[assemblyQualifiedName] = myType; - - return jToken.ToObject(myType); + return onDeserializedCallbacks[objFullType]; } + } - #endregion + internal static Type GetSytemOrSpeckleType(string typeName) + { + var systemType = Type.GetType(typeName); + if (systemType != null) + return systemType; + return GetAtomicType(typeName); } - internal static class CallSiteCache + /// + /// Flushes kit's (discriminator, type) cache. Useful if you're dynamically loading more kits at runtime, that provide better coverage of what you're deserialising, and it's now somehow poisoned because the higher level types were not originally available. + /// + public static void FlushCachedTypes() { - // Adapted from the answer to - // https://stackoverflow.com/questions/12057516/c-sharp-dynamicobject-dynamic-properties - // by jbtule, https://stackoverflow.com/users/637783/jbtule - // And also - // https://github.com/mgravell/fast-member/blob/master/FastMember/CallSiteCache.cs - // by Marc Gravell, https://github.com/mgravell + cachedTypes = new Dictionary(); + } + + #endregion + + #region Abstract Handling + + private static Dictionary cachedAbstractTypes = new(); + + internal static object HandleAbstractOriginalValue( + JToken jToken, + string assemblyQualifiedName, + JsonSerializer serializer + ) + { + if (cachedAbstractTypes.ContainsKey(assemblyQualifiedName)) + return jToken.ToObject(cachedAbstractTypes[assemblyQualifiedName]); + + var pieces = assemblyQualifiedName.Split(',').Select(s => s.Trim()).ToArray(); + + var myAssembly = AppDomain.CurrentDomain + .GetAssemblies() + .FirstOrDefault(ass => ass.GetName().Name == pieces[1]); + if (myAssembly == null) + throw new SpeckleException("Could not load abstract object's assembly."); + + var myType = myAssembly.GetType(pieces[0]); + if (myType == null) + throw new SpeckleException("Could not load abstract object's assembly."); + + cachedAbstractTypes[assemblyQualifiedName] = myType; + + return jToken.ToObject(myType); + } - private static readonly Dictionary>> setters = new Dictionary>>(); + #endregion +} - public static void SetValue(string propertyName, object target, object value) +internal static class CallSiteCache +{ + // Adapted from the answer to + // https://stackoverflow.com/questions/12057516/c-sharp-dynamicobject-dynamic-properties + // by jbtule, https://stackoverflow.com/users/637783/jbtule + // And also + // https://github.com/mgravell/fast-member/blob/master/FastMember/CallSiteCache.cs + // by Marc Gravell, https://github.com/mgravell + + private static readonly Dictionary< + string, + CallSite> + > setters = new(); + + public static void SetValue(string propertyName, object target, object value) + { + lock (setters) { - lock (setters) - { - CallSite> site; + CallSite> site; - lock (setters) + lock (setters) + if (!setters.TryGetValue(propertyName, out site)) { - if (!setters.TryGetValue(propertyName, out site)) - { - var binder = Microsoft.CSharp.RuntimeBinder.Binder.SetMember(CSharpBinderFlags.None, - propertyName, typeof(CallSiteCache), - new List - { + var binder = Binder.SetMember( + CSharpBinderFlags.None, + propertyName, + typeof(CallSiteCache), + new List + { CSharpArgumentInfo.Create(CSharpArgumentInfoFlags.None, null), CSharpArgumentInfo.Create(CSharpArgumentInfoFlags.None, null) - }); - setters[propertyName] = site = CallSite>.Create(binder); - } + } + ); + setters[propertyName] = site = CallSite>.Create( + binder + ); } - site.Target(site, target, value); - } + site.Target(site, target, value); } } } diff --git a/Core/Core/Serialisation/DeserializationWorkerThreads.cs b/Core/Core/Serialisation/DeserializationWorkerThreads.cs index b987fda136..ac7c09973e 100644 --- a/Core/Core/Serialisation/DeserializationWorkerThreads.cs +++ b/Core/Core/Serialisation/DeserializationWorkerThreads.cs @@ -1,109 +1,102 @@ -using System; +using System; using System.Collections.Concurrent; using System.Collections.Generic; using System.Threading; using System.Threading.Tasks; -namespace Speckle.Core.Serialisation -{ - internal enum WorkerThreadTaskType - { - Deserialize, - } +namespace Speckle.Core.Serialisation; - internal class DeserializationWorkerThreads : IDisposable - { - public int ThreadCount { get; set; } = Environment.ProcessorCount; +internal enum WorkerThreadTaskType +{ + Deserialize +} - private List Threads = new List(); - private BaseObjectDeserializerV2 Serializer; +internal class DeserializationWorkerThreads : IDisposable +{ + private int FreeThreadCount = 0; - private object LockFreeThreads = new object(); - private int FreeThreadCount = 0; - private BlockingCollection<(WorkerThreadTaskType, object, TaskCompletionSource)> Tasks = new BlockingCollection<(WorkerThreadTaskType, object, TaskCompletionSource)>(); + private object LockFreeThreads = new(); + private BaseObjectDeserializerV2 Serializer; - public DeserializationWorkerThreads(BaseObjectDeserializerV2 serializer) - { - Serializer = serializer; - } + private BlockingCollection<(WorkerThreadTaskType, object, TaskCompletionSource)> Tasks = + new(); - public void Start() - { - for (int i = 0; i < ThreadCount; i++) - { - Thread t = new Thread(new ThreadStart(ThreadMain)); - t.IsBackground = true; - Threads.Add(t); - t.Start(); - } + private List Threads = new(); - } + public DeserializationWorkerThreads(BaseObjectDeserializerV2 serializer) + { + Serializer = serializer; + } - private void ThreadMain() - { - while (true) - { - lock (LockFreeThreads) - { - FreeThreadCount++; - } - (WorkerThreadTaskType taskType, object inputValue, TaskCompletionSource tcs) = Tasks.Take(); - if (tcs == null) - { - return; - } + public int ThreadCount { get; set; } = Environment.ProcessorCount; - try - { - object converted = null; - if (taskType == WorkerThreadTaskType.Deserialize) - converted = Serializer.DeserializeTransportObject(inputValue as String); - tcs.SetResult(converted); - } - catch (Exception e) - { - tcs.SetException(e); - } - } - } + public void Dispose() + { + lock (LockFreeThreads) + FreeThreadCount -= ThreadCount; + foreach (Thread t in Threads) + Tasks.Add((WorkerThreadTaskType.Deserialize, null, null)); + foreach (Thread t in Threads) + t.Join(); + Threads = null; + Tasks.Dispose(); + } - public void Dispose() + public void Start() + { + for (int i = 0; i < ThreadCount; i++) { - lock (LockFreeThreads) - { - FreeThreadCount -= ThreadCount; - } - foreach (Thread t in Threads) - Tasks.Add((WorkerThreadTaskType.Deserialize, null, null)); - foreach (Thread t in Threads) - t.Join(); - Threads = null; - Tasks.Dispose(); + Thread t = new(new ThreadStart(ThreadMain)); + t.IsBackground = true; + Threads.Add(t); + t.Start(); } + } - internal Task TryStartTask(WorkerThreadTaskType taskType, object inputValue) + private void ThreadMain() + { + while (true) { - bool canStartTask = false; lock (LockFreeThreads) + FreeThreadCount++; + (WorkerThreadTaskType taskType, object inputValue, TaskCompletionSource tcs) = + Tasks.Take(); + if (tcs == null) + return; + + try { - if (FreeThreadCount > 0) - { - canStartTask = true; - FreeThreadCount--; - } + object converted = null; + if (taskType == WorkerThreadTaskType.Deserialize) + converted = Serializer.DeserializeTransportObject(inputValue as string); + tcs.SetResult(converted); } - - if (canStartTask) + catch (Exception e) { - TaskCompletionSource tcs = new TaskCompletionSource(TaskCreationOptions.RunContinuationsAsynchronously); - Tasks.Add((taskType, inputValue, tcs)); - return tcs.Task; + tcs.SetException(e); } - else + } + } + + internal Task TryStartTask(WorkerThreadTaskType taskType, object inputValue) + { + bool canStartTask = false; + lock (LockFreeThreads) + if (FreeThreadCount > 0) { - return null; + canStartTask = true; + FreeThreadCount--; } + if (canStartTask) + { + TaskCompletionSource tcs = new(TaskCreationOptions.RunContinuationsAsynchronously); + Tasks.Add((taskType, inputValue, tcs)); + return tcs.Task; + } + else + { + return null; } } } diff --git a/Core/Core/Serialisation/ValueConverter.cs b/Core/Core/Serialisation/ValueConverter.cs index 62a5dd10c6..d2d6229e31 100644 --- a/Core/Core/Serialisation/ValueConverter.cs +++ b/Core/Core/Serialisation/ValueConverter.cs @@ -1,173 +1,258 @@ -using System; +using System; using System.Collections; using System.Collections.Generic; +using System.Drawing; +using System.Globalization; using System.Linq; using System.Numerics; -namespace Speckle.Core.Serialisation + +namespace Speckle.Core.Serialisation; + +internal static class ValueConverter { - internal static class ValueConverter + public static bool ConvertValue(Type type, object value, out object convertedValue) { + // TODO: Document list of supported values in the SDK. (and grow it as needed) + + convertedValue = null; + if (value == null) + return true; + Type valueType = value.GetType(); - public static bool ConvertValue(Type type, object value, out object convertedValue) + if (type.IsAssignableFrom(valueType)) { - // TODO: Document list of supported values in the SDK. (and grow it as needed) + convertedValue = value; + return true; + } - convertedValue = null; - if (value == null) - return true; - Type valueType = value.GetType(); + bool isList = value is List; + List valueList = value as List; - if (type.IsAssignableFrom(valueType)) - { - convertedValue = value; - return true; - } + //strings + if (type == typeof(string)) + { + convertedValue = Convert.ToString(value); + return true; + } - bool isList = value is List; - List valueList = value as List; + #region Enum + if (type.IsEnum) + { + if (valueType != typeof(long)) + return false; + convertedValue = Enum.ToObject(type, (long)value); + return true; + } + #endregion - //strings - if (type == typeof(string)) - { - convertedValue = Convert.ToString(value); - return true; - } + switch (type.Name) + { + case "Nullable`1": + if (value == null) + { + convertedValue = null; + return true; + } + return ConvertValue(type.GenericTypeArguments[0], value, out convertedValue); + #region Numbers + case "Int64": + if (valueType == typeof(long)) + { + convertedValue = (long)value; + return true; + } + else + { + return false; + } + case "Int32": + if (valueType == typeof(long)) + { + convertedValue = (int)(long)value; + return true; + } + else + { + return false; + } + case "Int16": + if (valueType == typeof(long)) + { + convertedValue = (short)(long)value; + return true; + } + else + { + return false; + } + case "UInt64": + if (valueType == typeof(long)) + { + convertedValue = (ulong)(long)value; + return true; + } + else + { + return false; + } + case "UInt32": + if (valueType == typeof(long)) + { + convertedValue = (uint)(long)value; + return true; + } + else + { + return false; + } + case "UInt16": + if (valueType == typeof(long)) + { + convertedValue = (ushort)(long)value; + return true; + } + else + { + return false; + } + case "Double": + if (valueType == typeof(double)) + { + convertedValue = (double)(double)value; + return true; + } + if (valueType == typeof(long)) + { + convertedValue = (double)(long)value; + return true; + } + else + { + return false; + } + case "Single": + if (valueType == typeof(double)) + { + convertedValue = (float)(double)value; + return true; + } + if (valueType == typeof(long)) + { + convertedValue = (float)(long)value; + return true; + } + else + { + return false; + } - #region Enum - if (type.IsEnum) - { - if (valueType != typeof(long)) return false; - convertedValue = Enum.ToObject(type, (long)value); - return true; - } #endregion + } - switch (type.Name) - { - case "Nullable`1": - if (value == null) { convertedValue = null; return true; } - return ConvertValue(type.GenericTypeArguments[0], value, out convertedValue); - #region Numbers - case "Int64": - if (valueType == typeof(long)) { convertedValue = (long)value; return true; } - else return false; - case "Int32": - if (valueType == typeof(long)) { convertedValue = (Int32)(long)value; return true; } - else return false; - case "Int16": - if (valueType == typeof(long)) { convertedValue = (Int16)(long)value; return true; } - else return false; - case "UInt64": - if (valueType == typeof(long)) { convertedValue = (UInt64)(long)value; return true; } - else return false; - case "UInt32": - if (valueType == typeof(long)) { convertedValue = (UInt32)(long)value; return true; } - else return false; - case "UInt16": - if (valueType == typeof(long)) { convertedValue = (UInt16)(long)value; return true; } - else return false; - case "Double": - if (valueType == typeof(double)) { convertedValue = (Double)(double)value; return true; } - if (valueType == typeof(long)) { convertedValue = (Double)(long)value; return true; } - else return false; - case "Single": - if (valueType == typeof(double)) { convertedValue = (Single)(double)value; return true; } - if (valueType == typeof(long)) { convertedValue = (Single)(long)value; return true; } - else return false; - #endregion - } - - // Handle List - if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(List<>)) - { - if (!isList) return false; - Type listElementType = type.GenericTypeArguments[0]; - IList ret = Activator.CreateInstance(type, new object[] { valueList.Count }) as IList; - foreach (object inputListElement in valueList) - { - object convertedListElement; - if (!ConvertValue(listElementType, inputListElement, out convertedListElement)) - return false; - ret.Add(convertedListElement); - } - convertedValue = ret; - return true; - } - - // Handle Dictionary - if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(Dictionary<,>)) + // Handle List + if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(List<>)) + { + if (!isList) + return false; + Type listElementType = type.GenericTypeArguments[0]; + IList ret = Activator.CreateInstance(type, new object[] { valueList.Count }) as IList; + foreach (object inputListElement in valueList) { - if (!(value is Dictionary)) + object convertedListElement; + if (!ConvertValue(listElementType, inputListElement, out convertedListElement)) return false; - Dictionary valueDict = (Dictionary)value; + ret.Add(convertedListElement); + } + convertedValue = ret; + return true; + } - if (type.GenericTypeArguments[0] != typeof(string)) - throw new Exception("Dictionaries with non-string keys are not supported"); - Type dictValueType = type.GenericTypeArguments[1]; - IDictionary ret = Activator.CreateInstance(type) as IDictionary; + // Handle Dictionary + if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(Dictionary<,>)) + { + if (!(value is Dictionary)) + return false; + Dictionary valueDict = (Dictionary)value; - foreach (KeyValuePair kv in valueDict) - { - object convertedDictValue; - if (!ConvertValue(dictValueType, kv.Value, out convertedDictValue)) - return false; - ret[kv.Key] = convertedDictValue; - } - convertedValue = ret; - return true; - } + if (type.GenericTypeArguments[0] != typeof(string)) + throw new Exception("Dictionaries with non-string keys are not supported"); + Type dictValueType = type.GenericTypeArguments[1]; + IDictionary ret = Activator.CreateInstance(type) as IDictionary; - // Handle arrays - if (type.IsArray) + foreach (KeyValuePair kv in valueDict) { - if (!isList) return false; - Type arrayElementType = type.GetElementType(); - Array ret = Activator.CreateInstance(type, new object[] { valueList.Count }) as Array; - for (int i = 0; i < valueList.Count; i++) - { - object inputListElement = valueList[i]; - object convertedListElement; - if (!ConvertValue(arrayElementType, inputListElement, out convertedListElement)) - return false; - ret.SetValue(convertedListElement, i); - } - convertedValue = ret; - return true; + object convertedDictValue; + if (!ConvertValue(dictValueType, kv.Value, out convertedDictValue)) + return false; + ret[kv.Key] = convertedDictValue; } + convertedValue = ret; + return true; + } - // Handle simple classes/structs - if (type == typeof(Guid) && valueType == typeof(string)) + // Handle arrays + if (type.IsArray) + { + if (!isList) + return false; + Type arrayElementType = type.GetElementType(); + Array ret = Activator.CreateInstance(type, new object[] { valueList.Count }) as Array; + for (int i = 0; i < valueList.Count; i++) { - convertedValue = Guid.Parse(value as string); - return true; + object inputListElement = valueList[i]; + object convertedListElement; + if (!ConvertValue(arrayElementType, inputListElement, out convertedListElement)) + return false; + ret.SetValue(convertedListElement, i); } + convertedValue = ret; + return true; + } - if (type == typeof(System.Drawing.Color) && valueType == typeof(long)) - { - convertedValue = System.Drawing.Color.FromArgb((int)(long)value); - return true; - } + // Handle simple classes/structs + if (type == typeof(Guid) && valueType == typeof(string)) + { + convertedValue = Guid.Parse(value as string); + return true; + } - if (type == typeof(DateTime) && valueType == typeof(string)) - { - convertedValue = DateTime.ParseExact((string)value, "o", System.Globalization.CultureInfo.InvariantCulture); - return true; - } + if (type == typeof(Color) && valueType == typeof(long)) + { + convertedValue = Color.FromArgb((int)(long)value); + return true; + } - if (type == typeof(Matrix4x4) && valueType == typeof(List)) - { - var l = (value as List).ToList(); - float I(int index) => Convert.ToSingle(l[index]); - convertedValue = new Matrix4x4( - I(0), I(1), I(2), I(3), - I(4), I(5), I(6), I(7), - I(8), I(9), I(10), I(11), - I(12), I(13), I(14), I(15) - ); - return true; - } + if (type == typeof(DateTime) && valueType == typeof(string)) + { + convertedValue = DateTime.ParseExact((string)value, "o", CultureInfo.InvariantCulture); + return true; + } - return false; + if (type == typeof(Matrix4x4) && valueType == typeof(List)) + { + var l = (value as List).ToList(); + float I(int index) => Convert.ToSingle(l[index]); + convertedValue = new Matrix4x4( + I(0), + I(1), + I(2), + I(3), + I(4), + I(5), + I(6), + I(7), + I(8), + I(9), + I(10), + I(11), + I(12), + I(13), + I(14), + I(15) + ); + return true; } + + return false; } } diff --git a/Core/Core/Transports/ITransport.cs b/Core/Core/Transports/ITransport.cs index 6d6c1e5eae..4b40c39154 100644 --- a/Core/Core/Transports/ITransport.cs +++ b/Core/Core/Transports/ITransport.cs @@ -1,116 +1,115 @@ -using System; +using System; using System.Collections.Generic; using System.Threading; using System.Threading.Tasks; using Speckle.Core.Models; -namespace Speckle.Core.Transports +namespace Speckle.Core.Transports; + +/// +/// Interface defining the contract for transport implementations. +/// +public interface ITransport { /// - /// Interface defining the contract for transport implementations. + /// Human readable name for the transport + /// + public string TransportName { get; set; } + + /// + /// Extra descriptor properties of the given transport. + /// + public Dictionary TransportContext { get; } + + /// + /// Show how much time the transport was busy for. + /// + public TimeSpan Elapsed { get; } + + /// + /// Show how many objects the transport saved. + /// + public int SavedObjectCount { get; } + + /// + /// Should be checked often and gracefully stop all in progress sending if requested. + /// + public CancellationToken CancellationToken { get; set; } + + /// + /// Used to report progress during the transport's longer operations. + /// + public Action OnProgressAction { get; set; } + + /// + /// Used to report errors during the transport's longer operations. + /// + public Action OnErrorAction { get; set; } + + /// + /// Optional: signals to the transport that writes are about to begin. + /// + public void BeginWrite(); + + /// + /// Optional: signals to the transport that no more items will need to be written. + /// + public void EndWrite(); + + /// + /// Saves an object. + /// + /// The hash of the object. + /// The full string representation of the object. + public void SaveObject(string id, string serializedObject); + + /// + /// Saves an object, retrieving its serialised version from the provided transport. + /// + /// The hash of the object. + /// The transport from where to retrieve it. + public void SaveObject(string id, ITransport sourceTransport); + + /// + /// Awaitable method to figure out whether writing is completed. + /// + /// + public Task WriteComplete(); + + /// + /// Gets an object. + /// + /// The object's hash. + /// + public string GetObject(string id); + + /// + /// Copies the parent object and all its children to the provided transport. + /// + /// The id of the object you want to copy. + /// The transport you want to copy the object to. + /// (Optional) an action that will be invoked once, when the amount of object children to be copied over is known. + /// The string representation of the root object. + public Task CopyObjectAndChildren( + string id, + ITransport targetTransport, + Action onTotalChildrenCountKnown = null + ); + + /// + /// Checks if objects are present in the transport /// - public interface ITransport - { - /// - /// Human readable name for the transport - /// - public string TransportName { get; set; } - - /// - /// Extra descriptor properties of the given transport. - /// - public Dictionary TransportContext { get; } - - /// - /// Show how much time the transport was busy for. - /// - public TimeSpan Elapsed { get; } - - /// - /// Show how many objects the transport saved. - /// - public int SavedObjectCount { get; } - - /// - /// Should be checked often and gracefully stop all in progress sending if requested. - /// - public CancellationToken CancellationToken { get; set; } - - /// - /// Used to report progress during the transport's longer operations. - /// - public Action OnProgressAction { get; set; } - - /// - /// Used to report errors during the transport's longer operations. - /// - public Action OnErrorAction { get; set; } - - /// - /// Optional: signals to the transport that writes are about to begin. - /// - public void BeginWrite(); - - /// - /// Optional: signals to the transport that no more items will need to be written. - /// - public void EndWrite(); - - /// - /// Saves an object. - /// - /// The hash of the object. - /// The full string representation of the object. - public void SaveObject(string id, string serializedObject); - - /// - /// Saves an object, retrieving its serialised version from the provided transport. - /// - /// The hash of the object. - /// The transport from where to retrieve it. - public void SaveObject(string id, ITransport sourceTransport); - - /// - /// Awaitable method to figure out whether writing is completed. - /// - /// - public Task WriteComplete(); - - /// - /// Gets an object. - /// - /// The object's hash. - /// - public string GetObject(string id); - - /// - /// Copies the parent object and all its children to the provided transport. - /// - /// The id of the object you want to copy. - /// The transport you want to copy the object to. - /// (Optional) an action that will be invoked once, when the amount of object children to be copied over is known. - /// The string representation of the root object. - public Task CopyObjectAndChildren( - string id, - ITransport targetTransport, - Action onTotalChildrenCountKnown = null - ); - - /// - /// Checks if objects are present in the transport - /// - /// List of object ids to check - /// A dictionary with the specified object ids as keys and boolean values, whether each object is present in the transport or not - public Task> HasObjects(List objectIds); - } - - public interface IBlobCapableTransport - { - public string BlobStorageFolder { get; } - - public void SaveBlob(Blob obj); - - // NOTE: not needed, should be implemented in "CopyObjectsAndChildren" - //public void GetBlob(Blob obj); - } + /// List of object ids to check + /// A dictionary with the specified object ids as keys and boolean values, whether each object is present in the transport or not + public Task> HasObjects(List objectIds); +} + +public interface IBlobCapableTransport +{ + public string BlobStorageFolder { get; } + + public void SaveBlob(Blob obj); + + // NOTE: not needed, should be implemented in "CopyObjectsAndChildren" + //public void GetBlob(Blob obj); } diff --git a/Core/Core/Transports/Memory.cs b/Core/Core/Transports/Memory.cs index 711451ed35..d1cc8fc88a 100644 --- a/Core/Core/Transports/Memory.cs +++ b/Core/Core/Transports/Memory.cs @@ -1,134 +1,130 @@ -using System; +using System; using System.Collections.Generic; using System.Diagnostics; using System.Threading; using System.Threading.Tasks; -using Serilog; using Speckle.Core.Logging; -namespace Speckle.Core.Transports +namespace Speckle.Core.Transports; + +/// +/// An in memory storage of speckle objects. +/// +public class MemoryTransport : ITransport, IDisposable, ICloneable { - /// - /// An in memory storage of speckle objects. - /// - public class MemoryTransport : ITransport, IDisposable, ICloneable + public Dictionary Objects; + + public MemoryTransport() { - public Dictionary Objects; + SpeckleLog.Logger.Debug("Creating a new Memory Transport"); - public CancellationToken CancellationToken { get; set; } + Objects = new Dictionary(); + } - public string TransportName { get; set; } = "Memory"; + public object Clone() + { + return new MemoryTransport() + { + TransportName = TransportName, + OnErrorAction = OnErrorAction, + OnProgressAction = OnProgressAction, + CancellationToken = CancellationToken, + Objects = Objects, + SavedObjectCount = SavedObjectCount + }; + } - public Action OnProgressAction { get; set; } + public void Dispose() + { + Objects = null; + OnErrorAction = null; + OnProgressAction = null; + SavedObjectCount = 0; + } - public Action OnErrorAction { get; set; } + public CancellationToken CancellationToken { get; set; } - public int SavedObjectCount { get; set; } = 0; + public string TransportName { get; set; } = "Memory"; - public Dictionary TransportContext => - new Dictionary { { "name", TransportName }, { "type", this.GetType().Name } }; + public Action OnProgressAction { get; set; } - public TimeSpan Elapsed { get; set; } = TimeSpan.Zero; + public Action OnErrorAction { get; set; } - public MemoryTransport() - { - SpeckleLog.Logger.Debug("Creating a new Memory Transport"); + public int SavedObjectCount { get; set; } = 0; - Objects = new Dictionary(); - } + public Dictionary TransportContext => + new() { { "name", TransportName }, { "type", GetType().Name } }; - public void BeginWrite() - { - SavedObjectCount = 0; - } + public TimeSpan Elapsed { get; set; } = TimeSpan.Zero; - public void EndWrite() { } + public void BeginWrite() + { + SavedObjectCount = 0; + } - public void SaveObject(string hash, string serializedObject) - { - var stopwatch = Stopwatch.StartNew(); - if (CancellationToken.IsCancellationRequested) - return; // Check for cancellation + public void EndWrite() { } - Objects[hash] = serializedObject; + public void SaveObject(string hash, string serializedObject) + { + var stopwatch = Stopwatch.StartNew(); + if (CancellationToken.IsCancellationRequested) + return; // Check for cancellation - SavedObjectCount++; - OnProgressAction?.Invoke(TransportName, 1); - stopwatch.Stop(); - Elapsed += stopwatch.Elapsed; - } + Objects[hash] = serializedObject; - public void SaveObject(string id, ITransport sourceTransport) - { - throw new NotImplementedException(); - } + SavedObjectCount++; + OnProgressAction?.Invoke(TransportName, 1); + stopwatch.Stop(); + Elapsed += stopwatch.Elapsed; + } - public string GetObject(string hash) - { - if (CancellationToken.IsCancellationRequested) - return null; // Check for cancellation - - var stopwatch = Stopwatch.StartNew(); - var ret = Objects.ContainsKey(hash) ? Objects[hash] : null; - stopwatch.Stop(); - Elapsed += stopwatch.Elapsed; - return ret; - } - - public Task CopyObjectAndChildren( - string id, - ITransport targetTransport, - Action onTotalChildrenCountKnown = null - ) - { - throw new NotImplementedException(); - } + public void SaveObject(string id, ITransport sourceTransport) + { + throw new NotImplementedException(); + } - public bool GetWriteCompletionStatus() - { - return true; // can safely assume it's always true, as ops are atomic? - } + public string GetObject(string hash) + { + if (CancellationToken.IsCancellationRequested) + return null; // Check for cancellation + + var stopwatch = Stopwatch.StartNew(); + var ret = Objects.ContainsKey(hash) ? Objects[hash] : null; + stopwatch.Stop(); + Elapsed += stopwatch.Elapsed; + return ret; + } - public Task WriteComplete() - { - return Utilities.WaitUntil(() => true); - } + public Task CopyObjectAndChildren( + string id, + ITransport targetTransport, + Action onTotalChildrenCountKnown = null + ) + { + throw new NotImplementedException(); + } - public override string ToString() - { - return $"Memory Transport {TransportName}"; - } + public Task WriteComplete() + { + return Utilities.WaitUntil(() => true); + } - public async Task> HasObjects(List objectIds) - { - Dictionary ret = new Dictionary(); - foreach (string objectId in objectIds) - { - ret[objectId] = Objects.ContainsKey(objectId); - } + public async Task> HasObjects(List objectIds) + { + Dictionary ret = new(); + foreach (string objectId in objectIds) + ret[objectId] = Objects.ContainsKey(objectId); - return ret; - } + return ret; + } - public void Dispose() - { - Objects = null; - OnErrorAction = null; - OnProgressAction = null; - SavedObjectCount = 0; - } + public bool GetWriteCompletionStatus() + { + return true; // can safely assume it's always true, as ops are atomic? + } - public object Clone() - { - return new MemoryTransport() - { - TransportName = TransportName, - OnErrorAction = OnErrorAction, - OnProgressAction = OnProgressAction, - CancellationToken = CancellationToken, - Objects = Objects, - SavedObjectCount = SavedObjectCount - }; - } + public override string ToString() + { + return $"Memory Transport {TransportName}"; } } diff --git a/Core/Core/Transports/SQLite.cs b/Core/Core/Transports/SQLite.cs index 20ef7b83d9..4fafe3cc06 100644 --- a/Core/Core/Transports/SQLite.cs +++ b/Core/Core/Transports/SQLite.cs @@ -1,436 +1,437 @@ -using System; +using System; using System.Collections.Concurrent; using System.Collections.Generic; using System.Diagnostics; using System.IO; +using System.Text; using System.Threading; using System.Threading.Tasks; using System.Timers; using Microsoft.Data.Sqlite; using Speckle.Core.Helpers; +using Speckle.Core.Models; +using Timer = System.Timers.Timer; -namespace Speckle.Core.Transports +namespace Speckle.Core.Transports; + +public class SQLiteTransport : IDisposable, ICloneable, ITransport, IBlobCapableTransport { - public class SQLiteTransport : IDisposable, ICloneable, ITransport, IBlobCapableTransport + private bool IS_WRITING = false; + private int MAX_TRANSACTION_SIZE = 1000; + private int PollInterval = 500; + + private ConcurrentQueue<(string, string, int)> Queue = new(); + + /// + /// Timer that ensures queue is consumed if less than MAX_TRANSACTION_SIZE objects are being sent. + /// + private Timer WriteTimer; + + public SQLiteTransport( + string basePath = null, + string applicationName = null, + string scope = "Data" + ) { - public string TransportName { get; set; } = "SQLite"; - public Dictionary TransportContext => - new Dictionary + if (basePath == null) + basePath = SpecklePathProvider.UserApplicationDataPath(); + _basePath = basePath; + + if (applicationName == null) + applicationName = "Speckle"; + _applicationName = applicationName; + + if (scope == null) + scope = "Data"; + _scope = scope; + + var dir = Path.Combine(basePath, applicationName); + try + { + Directory.CreateDirectory(dir); //ensure dir is there + } + catch (Exception ex) + { + throw new Exception($"Cound not create {dir}", ex); + } + + RootPath = Path.Combine(basePath, applicationName, $"{scope}.db"); + ConnectionString = string.Format("Data Source={0};", RootPath); + + try + { + Initialize(); + + WriteTimer = new Timer() { - { "name", TransportName }, - { "type", this.GetType().Name }, - { "basePath", _basePath }, - { "applicationName", _applicationName }, - { "scope", _scope }, - { "blobStorageFolder", BlobStorageFolder } + AutoReset = true, + Enabled = false, + Interval = PollInterval }; + WriteTimer.Elapsed += WriteTimerElapsed; + } + catch (Exception e) + { + OnErrorAction?.Invoke(TransportName, e); + } + } - public CancellationToken CancellationToken { get; set; } + public string RootPath { get; set; } - public string RootPath { get; set; } + private string _basePath { get; set; } + private string _applicationName { get; set; } + private string _scope { get; set; } - private string _basePath { get; set; } - private string _applicationName { get; set; } - private string _scope { get; set; } + public string ConnectionString { get; set; } - public string ConnectionString { get; set; } + private SqliteConnection Connection { get; set; } + private object ConnectionLock { get; set; } + public string BlobStorageFolder => + SpecklePathProvider.BlobStoragePath(Path.Combine(_basePath, _applicationName)); - private SqliteConnection Connection { get; set; } - private object ConnectionLock { get; set; } + public void SaveBlob(Blob obj) + { + var blobPath = obj.originalPath; + var targetPath = obj.getLocalDestinationPath(BlobStorageFolder); + File.Copy(blobPath, targetPath, true); + } - private ConcurrentQueue<(string, string, int)> Queue = - new ConcurrentQueue<(string, string, int)>(); + public object Clone() + { + return new SQLiteTransport(_basePath, _applicationName, _scope) + { + OnProgressAction = OnProgressAction, + OnErrorAction = OnErrorAction, + CancellationToken = CancellationToken + }; + } - public Action OnProgressAction { get; set; } + public void Dispose() + { + // TODO: Check if it's still writing? + Connection?.Close(); + Connection?.Dispose(); + WriteTimer.Dispose(); + } - public Action OnErrorAction { get; set; } - public int SavedObjectCount { get; private set; } - public string BlobStorageFolder => - SpecklePathProvider.BlobStoragePath(Path.Combine(_basePath, _applicationName)); + public string TransportName { get; set; } = "SQLite"; - public TimeSpan Elapsed { get; private set; } + public Dictionary TransportContext => + new() + { + { "name", TransportName }, + { "type", GetType().Name }, + { "basePath", _basePath }, + { "applicationName", _applicationName }, + { "scope", _scope }, + { "blobStorageFolder", BlobStorageFolder } + }; - /// - /// Timer that ensures queue is consumed if less than MAX_TRANSACTION_SIZE objects are being sent. - /// - private System.Timers.Timer WriteTimer; - private int PollInterval = 500; + public CancellationToken CancellationToken { get; set; } - private bool IS_WRITING = false; - private int MAX_TRANSACTION_SIZE = 1000; + public Action OnProgressAction { get; set; } - public SQLiteTransport( - string basePath = null, - string applicationName = null, - string scope = "Data" - ) - { - if (basePath == null) - basePath = SpecklePathProvider.UserApplicationDataPath(); - _basePath = basePath; + public Action OnErrorAction { get; set; } + public int SavedObjectCount { get; private set; } - if (applicationName == null) - applicationName = "Speckle"; - _applicationName = applicationName; + public TimeSpan Elapsed { get; private set; } - if (scope == null) - scope = "Data"; - _scope = scope; + public void BeginWrite() + { + Queue = new ConcurrentQueue<(string, string, int)>(); + SavedObjectCount = 0; + } - var dir = Path.Combine(basePath, applicationName); - try - { - Directory.CreateDirectory(dir); //ensure dir is there - } - catch (Exception ex) - { - throw new Exception($"Cound not create {dir}", ex); - } + public void EndWrite() { } - RootPath = Path.Combine(basePath, applicationName, $"{scope}.db"); - ConnectionString = string.Format("Data Source={0};", RootPath); + public async Task> HasObjects(List objectIds) + { + Dictionary ret = new(); + // Initialize with false so that canceled queries still return a dictionary item for every object id + foreach (string objectId in objectIds) + ret[objectId] = false; - try + using (var c = new SqliteConnection(ConnectionString)) + { + c.Open(); + foreach (string objectId in objectIds) { - Initialize(); - - WriteTimer = new System.Timers.Timer() + if (CancellationToken.IsCancellationRequested) + return ret; + var commandText = "SELECT 1 FROM objects WHERE hash = @hash LIMIT 1 "; + using (var command = new SqliteCommand(commandText, c)) { - AutoReset = true, - Enabled = false, - Interval = PollInterval - }; - WriteTimer.Elapsed += WriteTimerElapsed; - } - catch (Exception e) - { - OnErrorAction?.Invoke(TransportName, e); + command.Parameters.AddWithValue("@hash", objectId); + using (var reader = command.ExecuteReader()) + { + bool rowFound = reader.Read(); + ret[objectId] = rowFound; + } + } } } + return ret; + } - private void Initialize() + private void Initialize() + { + // NOTE: used for creating partioned object tables. + //string[] HexChars = new string[] { "0", "1", "2", "3", "4", "5", "6", "7", "8", "9", "a", "b", "c", "d", "e", "f" }; + //var cart = new List(); + //foreach (var str in HexChars) + // foreach (var str2 in HexChars) + // cart.Add(str + str2); + if (CancellationToken.IsCancellationRequested) + return; + + using (var c = new SqliteConnection(ConnectionString)) { - // NOTE: used for creating partioned object tables. - //string[] HexChars = new string[] { "0", "1", "2", "3", "4", "5", "6", "7", "8", "9", "a", "b", "c", "d", "e", "f" }; - //var cart = new List(); - //foreach (var str in HexChars) - // foreach (var str2 in HexChars) - // cart.Add(str + str2); - if (CancellationToken.IsCancellationRequested) - return; - - using (var c = new SqliteConnection(ConnectionString)) - { - c.Open(); - var commandText = - @" + c.Open(); + var commandText = + @" CREATE TABLE IF NOT EXISTS objects( hash TEXT PRIMARY KEY, content TEXT ) WITHOUT ROWID; "; - using (var command = new SqliteCommand(commandText, c)) - { - command.ExecuteNonQuery(); - } + using (var command = new SqliteCommand(commandText, c)) + command.ExecuteNonQuery(); - // Insert Optimisations + // Insert Optimisations - SqliteCommand cmd; - cmd = new SqliteCommand("PRAGMA journal_mode='wal';", c); - cmd.ExecuteNonQuery(); + SqliteCommand cmd; + cmd = new SqliteCommand("PRAGMA journal_mode='wal';", c); + cmd.ExecuteNonQuery(); - //Note / Hack: This setting has the potential to corrupt the db. - //cmd = new SqliteCommand("PRAGMA synchronous=OFF;", Connection); - //cmd.ExecuteNonQuery(); + //Note / Hack: This setting has the potential to corrupt the db. + //cmd = new SqliteCommand("PRAGMA synchronous=OFF;", Connection); + //cmd.ExecuteNonQuery(); - cmd = new SqliteCommand("PRAGMA count_changes=OFF;", c); - cmd.ExecuteNonQuery(); + cmd = new SqliteCommand("PRAGMA count_changes=OFF;", c); + cmd.ExecuteNonQuery(); - cmd = new SqliteCommand("PRAGMA temp_store=MEMORY;", c); - cmd.ExecuteNonQuery(); - } + cmd = new SqliteCommand("PRAGMA temp_store=MEMORY;", c); + cmd.ExecuteNonQuery(); + } - Connection = new SqliteConnection(ConnectionString); - Connection.Open(); - ConnectionLock = new object(); + Connection = new SqliteConnection(ConnectionString); + Connection.Open(); + ConnectionLock = new object(); - if (CancellationToken.IsCancellationRequested) - return; - } + if (CancellationToken.IsCancellationRequested) + return; + } - public void BeginWrite() - { - Queue = new ConcurrentQueue<(string, string, int)>(); - SavedObjectCount = 0; - } + /// + /// Returns all the objects in the store. Note: do not use for large collections. + /// + /// + internal IEnumerable GetAllObjects() + { + if (CancellationToken.IsCancellationRequested) + yield break; // Check for cancellation - public void EndWrite() { } + using var c = new SqliteConnection(ConnectionString); + c.Open(); - #region Writes + using var command = new SqliteCommand("SELECT * FROM objects", c); - /// - /// Awaits untill write completion (ie, the current queue is fully consumed). - /// - /// - public async Task WriteComplete() + using var reader = command.ExecuteReader(); + while (reader.Read()) { - await Utilities.WaitUntil( - () => - { - return GetWriteCompletionStatus(); - }, - 500 - ); + if (CancellationToken.IsCancellationRequested) + yield break; // Check for cancellation + yield return reader.GetString(1); } + } - /// - /// Returns true if the current write queue is empty and comitted. - /// - /// - public bool GetWriteCompletionStatus() - { - return Queue.Count == 0 && !IS_WRITING; - } + /// + /// Deletes an object. Note: do not use for any speckle object transport, as it will corrupt the database. + /// + /// + public void DeleteObject(string hash) + { + if (CancellationToken.IsCancellationRequested) + return; - private void WriteTimerElapsed(object sender, ElapsedEventArgs e) + using (var c = new SqliteConnection(ConnectionString)) { - WriteTimer.Enabled = false; - - if (CancellationToken.IsCancellationRequested) + c.Open(); + using (var command = new SqliteCommand("DELETE FROM objects WHERE hash = @hash", c)) { - Queue = new ConcurrentQueue<(string, string, int)>(); - return; + command.Parameters.AddWithValue("@hash", hash); + command.ExecuteNonQuery(); } - - if (!IS_WRITING && Queue.Count != 0) - ConsumeQueue(); } + } - private void ConsumeQueue() + /// + /// Updates an object. + /// + /// + /// + public void UpdateObject(string hash, string serializedObject) + { + if (CancellationToken.IsCancellationRequested) + return; + + using (var c = new SqliteConnection(ConnectionString)) { - if (CancellationToken.IsCancellationRequested) + c.Open(); + var commandText = $"REPLACE INTO objects(hash, content) VALUES(@hash, @content)"; + using (var command = new SqliteCommand(commandText, c)) { - Queue = new ConcurrentQueue<(string, string, int)>(); - return; + command.Parameters.AddWithValue("@hash", hash); + command.Parameters.AddWithValue("@content", serializedObject); + command.ExecuteNonQuery(); } + } + } - var stopwatch = Stopwatch.StartNew(); - IS_WRITING = true; - var i = 0; - ValueTuple result; + public override string ToString() + { + return $"Sqlite Transport @{RootPath}"; + } - var saved = 0; + #region Writes - using (var c = new SqliteConnection(ConnectionString)) - { - c.Open(); - using (var t = c.BeginTransaction()) + /// + /// Awaits untill write completion (ie, the current queue is fully consumed). + /// + /// + public async Task WriteComplete() + { + await Utilities + .WaitUntil( + () => { - var commandText = $"INSERT OR IGNORE INTO objects(hash, content) VALUES(@hash, @content)"; - - while (i < MAX_TRANSACTION_SIZE && Queue.TryPeek(out result)) - { - using (var command = new SqliteCommand(commandText, c, t)) - { - Queue.TryDequeue(out result); - command.Parameters.AddWithValue("@hash", result.Item1); - command.Parameters.AddWithValue("@content", result.Item2); - command.ExecuteNonQuery(); - - saved++; - } - } - t.Commit(); - if (CancellationToken.IsCancellationRequested) - { - Queue = new ConcurrentQueue<(string, string, int)>(); - IS_WRITING = false; - stopwatch.Stop(); - Elapsed += stopwatch.Elapsed; - return; - } - } - } - - if (OnProgressAction != null) - OnProgressAction(TransportName, saved); - - if (CancellationToken.IsCancellationRequested) - { - Queue = new ConcurrentQueue<(string, string, int)>(); - IS_WRITING = false; - stopwatch.Stop(); - Elapsed += stopwatch.Elapsed; - return; - } + return GetWriteCompletionStatus(); + }, + 500 + ) + .ConfigureAwait(false); + } - if (Queue.Count > 0) - ConsumeQueue(); + /// + /// Returns true if the current write queue is empty and comitted. + /// + /// + public bool GetWriteCompletionStatus() + { + return Queue.Count == 0 && !IS_WRITING; + } - stopwatch.Stop(); - Elapsed += stopwatch.Elapsed; - IS_WRITING = false; - } + private void WriteTimerElapsed(object sender, ElapsedEventArgs e) + { + WriteTimer.Enabled = false; - /// - /// Adds an object to the saving queue. - /// - /// - /// - public void SaveObject(string hash, string serializedObject) + if (CancellationToken.IsCancellationRequested) { - Queue.Enqueue( - (hash, serializedObject, System.Text.Encoding.UTF8.GetByteCount(serializedObject)) - ); - - WriteTimer.Enabled = true; - WriteTimer.Start(); + Queue = new ConcurrentQueue<(string, string, int)>(); + return; } - public void SaveObject(string hash, ITransport sourceTransport) - { - var serializedObject = sourceTransport.GetObject(hash); - Queue.Enqueue( - (hash, serializedObject, System.Text.Encoding.UTF8.GetByteCount(serializedObject)) - ); - } + if (!IS_WRITING && Queue.Count != 0) + ConsumeQueue(); + } - /// - /// Directly saves the object in the db. - /// - /// - /// - public void SaveObjectSync(string hash, string serializedObject) + private void ConsumeQueue() + { + if (CancellationToken.IsCancellationRequested) { - try - { - using (var c = new SqliteConnection(ConnectionString)) - { - c.Open(); - var commandText = $"INSERT OR IGNORE INTO objects(hash, content) VALUES(@hash, @content)"; - using (var command = new SqliteCommand(commandText, c)) - { - command.Parameters.AddWithValue("@hash", hash); - command.Parameters.AddWithValue("@content", serializedObject); - command.ExecuteNonQuery(); - } - } - } - catch (Exception e) - { - OnErrorAction?.Invoke(TransportName, e); - } + Queue = new ConcurrentQueue<(string, string, int)>(); + return; } - #endregion + var stopwatch = Stopwatch.StartNew(); + IS_WRITING = true; + var i = 0; + ValueTuple result; - #region Reads + var saved = 0; - /// - /// Gets an object. - /// - /// - /// - public string GetObject(string hash) + using (var c = new SqliteConnection(ConnectionString)) { - if (CancellationToken.IsCancellationRequested) - return null; - lock (ConnectionLock) + c.Open(); + using (var t = c.BeginTransaction()) { - var stopwatch = Stopwatch.StartNew(); - using ( - var command = new SqliteCommand( - "SELECT * FROM objects WHERE hash = @hash LIMIT 1 ", - Connection - ) - ) - { - command.Parameters.AddWithValue("@hash", hash); - using (var reader = command.ExecuteReader()) + var commandText = $"INSERT OR IGNORE INTO objects(hash, content) VALUES(@hash, @content)"; + + while (i < MAX_TRANSACTION_SIZE && Queue.TryPeek(out result)) + using (var command = new SqliteCommand(commandText, c, t)) { - while (reader.Read()) - { - if (CancellationToken.IsCancellationRequested) - return null; - return reader.GetString(1); - } + Queue.TryDequeue(out result); + command.Parameters.AddWithValue("@hash", result.Item1); + command.Parameters.AddWithValue("@content", result.Item2); + command.ExecuteNonQuery(); + + saved++; } + + t.Commit(); + if (CancellationToken.IsCancellationRequested) + { + Queue = new ConcurrentQueue<(string, string, int)>(); + IS_WRITING = false; + stopwatch.Stop(); + Elapsed += stopwatch.Elapsed; + return; } - stopwatch.Stop(); - Elapsed += stopwatch.Elapsed; } - return null; // pass on the duty of null checks to consumers } - public async Task CopyObjectAndChildren( - string hash, - ITransport targetTransport, - Action onTotalChildrenCountKnown = null - ) - { - throw new NotImplementedException(); - } - - #endregion + if (OnProgressAction != null) + OnProgressAction(TransportName, saved); - /// - /// Returns all the objects in the store. Note: do not use for large collections. - /// - /// - internal IEnumerable GetAllObjects() + if (CancellationToken.IsCancellationRequested) { - if (CancellationToken.IsCancellationRequested) - yield break; // Check for cancellation + Queue = new ConcurrentQueue<(string, string, int)>(); + IS_WRITING = false; + stopwatch.Stop(); + Elapsed += stopwatch.Elapsed; + return; + } - using var c = new SqliteConnection(ConnectionString); - c.Open(); + if (Queue.Count > 0) + ConsumeQueue(); - using var command = new SqliteCommand("SELECT * FROM objects", c); + stopwatch.Stop(); + Elapsed += stopwatch.Elapsed; + IS_WRITING = false; + } - using var reader = command.ExecuteReader(); - while (reader.Read()) - { - if (CancellationToken.IsCancellationRequested) - yield break; // Check for cancellation - yield return reader.GetString(1); - } - } + /// + /// Adds an object to the saving queue. + /// + /// + /// + public void SaveObject(string hash, string serializedObject) + { + Queue.Enqueue((hash, serializedObject, Encoding.UTF8.GetByteCount(serializedObject))); - /// - /// Deletes an object. Note: do not use for any speckle object transport, as it will corrupt the database. - /// - /// - public void DeleteObject(string hash) - { - if (CancellationToken.IsCancellationRequested) - return; + WriteTimer.Enabled = true; + WriteTimer.Start(); + } - using (var c = new SqliteConnection(ConnectionString)) - { - c.Open(); - using (var command = new SqliteCommand("DELETE FROM objects WHERE hash = @hash", c)) - { - command.Parameters.AddWithValue("@hash", hash); - command.ExecuteNonQuery(); - } - } - } + public void SaveObject(string hash, ITransport sourceTransport) + { + var serializedObject = sourceTransport.GetObject(hash); + Queue.Enqueue((hash, serializedObject, Encoding.UTF8.GetByteCount(serializedObject))); + } - /// - /// Updates an object. - /// - /// - /// - public void UpdateObject(string hash, string serializedObject) + /// + /// Directly saves the object in the db. + /// + /// + /// + public void SaveObjectSync(string hash, string serializedObject) + { + try { - if (CancellationToken.IsCancellationRequested) - return; - using (var c = new SqliteConnection(ConnectionString)) { c.Open(); - var commandText = $"REPLACE INTO objects(hash, content) VALUES(@hash, @content)"; + var commandText = $"INSERT OR IGNORE INTO objects(hash, content) VALUES(@hash, @content)"; using (var command = new SqliteCommand(commandText, c)) { command.Parameters.AddWithValue("@hash", hash); @@ -439,64 +440,58 @@ public void UpdateObject(string hash, string serializedObject) } } } - - public override string ToString() + catch (Exception e) { - return $"Sqlite Transport @{RootPath}"; + OnErrorAction?.Invoke(TransportName, e); } + } - public async Task> HasObjects(List objectIds) - { - Dictionary ret = new Dictionary(); - // Initialize with false so that canceled queries still return a dictionary item for every object id - foreach (string objectId in objectIds) - ret[objectId] = false; + #endregion - using (var c = new SqliteConnection(ConnectionString)) + #region Reads + + /// + /// Gets an object. + /// + /// + /// + public string GetObject(string hash) + { + if (CancellationToken.IsCancellationRequested) + return null; + lock (ConnectionLock) + { + var stopwatch = Stopwatch.StartNew(); + using ( + var command = new SqliteCommand( + "SELECT * FROM objects WHERE hash = @hash LIMIT 1 ", + Connection + ) + ) { - c.Open(); - foreach (string objectId in objectIds) - { - if (CancellationToken.IsCancellationRequested) - return ret; - var commandText = "SELECT 1 FROM objects WHERE hash = @hash LIMIT 1 "; - using (var command = new SqliteCommand(commandText, c)) + command.Parameters.AddWithValue("@hash", hash); + using (var reader = command.ExecuteReader()) + while (reader.Read()) { - command.Parameters.AddWithValue("@hash", objectId); - using (var reader = command.ExecuteReader()) - { - bool rowFound = reader.Read(); - ret[objectId] = rowFound; - } + if (CancellationToken.IsCancellationRequested) + return null; + return reader.GetString(1); } - } } - return ret; - } - - public void Dispose() - { - // TODO: Check if it's still writing? - Connection?.Close(); - Connection?.Dispose(); - WriteTimer.Dispose(); - } - - public object Clone() - { - return new SQLiteTransport(_basePath, _applicationName, _scope) - { - OnProgressAction = OnProgressAction, - OnErrorAction = OnErrorAction, - CancellationToken = CancellationToken - }; + stopwatch.Stop(); + Elapsed += stopwatch.Elapsed; } + return null; // pass on the duty of null checks to consumers + } - public void SaveBlob(Models.Blob obj) - { - var blobPath = obj.originalPath; - var targetPath = obj.getLocalDestinationPath(BlobStorageFolder); - File.Copy(blobPath, targetPath, true); - } + public async Task CopyObjectAndChildren( + string hash, + ITransport targetTransport, + Action onTotalChildrenCountKnown = null + ) + { + throw new NotImplementedException(); } + + #endregion } diff --git a/Core/Core/Transports/Server.cs b/Core/Core/Transports/Server.cs index 7689514077..9d7ceda973 100644 --- a/Core/Core/Transports/Server.cs +++ b/Core/Core/Transports/Server.cs @@ -1,4 +1,4 @@ -using System; +using System; using System.Collections.Concurrent; using System.Collections.Generic; using System.IO; @@ -6,224 +6,284 @@ using System.Linq; using System.Net; using System.Net.Http; +using System.Net.Http.Headers; using System.Text; using System.Threading; using System.Threading.Tasks; using System.Timers; -using Serilog; using Speckle.Core.Credentials; using Speckle.Core.Helpers; using Speckle.Core.Logging; using Speckle.Newtonsoft.Json; +using Timer = System.Timers.Timer; -namespace Speckle.Core.Transports +namespace Speckle.Core.Transports; + +/// +/// Sends data to a speckle server. +/// +public class ServerTransportV1 : IDisposable, ICloneable, ITransport { - /// - /// Sends data to a speckle server. - /// - public class ServerTransportV1 : IDisposable, ICloneable, ITransport - { - public string TransportName { get; set; } = "RemoteTransport"; - public Dictionary TransportContext => - new Dictionary - { - { "name", TransportName }, - { "type", this.GetType().Name }, - { "streamId", StreamId }, - { "serverUrl", BaseUri } - }; + private int DOWNLOAD_BATCH_SIZE = 1000; - public CancellationToken CancellationToken { get; set; } + private bool IS_WRITING = false; - public string BaseUri { get; private set; } + private int MAX_BUFFER_SIZE = 1_000_000; - public string StreamId { get; set; } + private int MAX_MULTIPART_COUNT = 50; - private HttpClient Client { get; set; } + private ConcurrentQueue<(string, string, int)> Queue = new(); - private ConcurrentQueue<(string, string, int)> Queue = - new ConcurrentQueue<(string, string, int)>(); + private int TotalElapsed = 0, + PollInterval = 100; - private System.Timers.Timer WriteTimer; + private Timer WriteTimer; - private int TotalElapsed = 0, - PollInterval = 100; + public ServerTransportV1(Account account, string streamId, int timeoutSeconds = 60) + { + Account = account; + Initialize(account.serverInfo.url, streamId, account.token, timeoutSeconds); + } - private bool IS_WRITING = false; + public string BaseUri { get; private set; } - private int MAX_BUFFER_SIZE = 1_000_000; + public string StreamId { get; set; } - private int MAX_MULTIPART_COUNT = 50; + private HttpClient Client { get; set; } - private int DOWNLOAD_BATCH_SIZE = 1000; + public bool CompressPayloads { get; set; } = true; - public bool CompressPayloads { get; set; } = true; + public int TotalSentBytes { get; set; } = 0; - public int SavedObjectCount { get; private set; } = 0; + public Account Account { get; set; } - public int TotalSentBytes { get; set; } = 0; + public object Clone() + { + return new ServerTransport(Account, StreamId) + { + OnErrorAction = OnErrorAction, + OnProgressAction = OnProgressAction, + CancellationToken = CancellationToken + }; + } - public Action OnProgressAction { get; set; } + public void Dispose() + { + // TODO: check if it's writing first? + Client?.Dispose(); + WriteTimer.Dispose(); + } - public Action OnErrorAction { get; set; } + public string TransportName { get; set; } = "RemoteTransport"; - public Account Account { get; set; } + public Dictionary TransportContext => + new() + { + { "name", TransportName }, + { "type", GetType().Name }, + { "streamId", StreamId }, + { "serverUrl", BaseUri } + }; - // not implementing this for V1, just a dummy 0 value - public TimeSpan Elapsed => TimeSpan.Zero; + public CancellationToken CancellationToken { get; set; } - public ServerTransportV1(Account account, string streamId, int timeoutSeconds = 60) - { - Account = account; - Initialize(account.serverInfo.url, streamId, account.token, timeoutSeconds); - } + public int SavedObjectCount { get; private set; } = 0; - private void Initialize( - string baseUri, - string streamId, - string authorizationToken, - int timeoutSeconds = 60 - ) - { - SpeckleLog.Logger.Information("Initializing New Remote V1 Transport for {baseUri}", baseUri); + public Action OnProgressAction { get; set; } - BaseUri = baseUri; - StreamId = streamId; + public Action OnErrorAction { get; set; } - Client = Http.GetHttpProxyClient( - new SpeckleHttpClientHandler() - { - AutomaticDecompression = System.Net.DecompressionMethods.GZip, - } - ); + // not implementing this for V1, just a dummy 0 value + public TimeSpan Elapsed => TimeSpan.Zero; - Client.BaseAddress = new Uri(baseUri); - Client.Timeout = new TimeSpan(0, 0, timeoutSeconds); + public void BeginWrite() + { + if (!GetWriteCompletionStatus()) + throw new SpeckleException("Transport is still writing."); + TotalSentBytes = 0; + SavedObjectCount = 0; + } - if (authorizationToken.ToLowerInvariant().Contains("bearer")) - { - Client.DefaultRequestHeaders.Add("Authorization", authorizationToken); - } - else - { - Client.DefaultRequestHeaders.Add("Authorization", $"Bearer {authorizationToken}"); - } - WriteTimer = new System.Timers.Timer() - { - AutoReset = true, - Enabled = false, - Interval = PollInterval - }; - WriteTimer.Elapsed += WriteTimerElapsed; - } + public void EndWrite() { } - public void BeginWrite() + public async Task> HasObjects(List objectIds) + { + var payload = new Dictionary() { - if (!GetWriteCompletionStatus()) - { - throw new SpeckleException("Transport is still writing."); - } - TotalSentBytes = 0; - SavedObjectCount = 0; - } + { "objects", JsonConvert.SerializeObject(objectIds) } + }; + var uri = new Uri($"/api/diff/{StreamId}", UriKind.Relative); + var response = await Client + .PostAsync( + uri, + new StringContent(JsonConvert.SerializeObject(payload), Encoding.UTF8, "application/json"), + CancellationToken + ) + .ConfigureAwait(false); + response.EnsureSuccessStatusCode(); + + var hasObjectsJson = await response.Content.ReadAsStringAsync().ConfigureAwait(false); + var hasObjects = JsonConvert.DeserializeObject>(hasObjectsJson); + return hasObjects; + } + + private void Initialize( + string baseUri, + string streamId, + string authorizationToken, + int timeoutSeconds = 60 + ) + { + SpeckleLog.Logger.Information("Initializing New Remote V1 Transport for {baseUri}", baseUri); + + BaseUri = baseUri; + StreamId = streamId; - public void EndWrite() { } + Client = Http.GetHttpProxyClient( + new SpeckleHttpClientHandler() { AutomaticDecompression = DecompressionMethods.GZip } + ); - #region Writing objects + Client.BaseAddress = new Uri(baseUri); + Client.Timeout = new TimeSpan(0, 0, timeoutSeconds); - public async Task WriteComplete() + if (authorizationToken.ToLowerInvariant().Contains("bearer")) + Client.DefaultRequestHeaders.Add("Authorization", authorizationToken); + else + Client.DefaultRequestHeaders.Add("Authorization", $"Bearer {authorizationToken}"); + WriteTimer = new Timer() { - await Utilities.WaitUntil( + AutoReset = true, + Enabled = false, + Interval = PollInterval + }; + WriteTimer.Elapsed += WriteTimerElapsed; + } + + public override string ToString() + { + return $"Server Transport @{Account.serverInfo.url}"; + } + + internal class Placeholder + { + public Dictionary __closure { get; set; } = new(); + } + + #region Writing objects + + public async Task WriteComplete() + { + await Utilities + .WaitUntil( () => { return GetWriteCompletionStatus(); }, 50 - ); - } + ) + .ConfigureAwait(false); + } + + public bool GetWriteCompletionStatus() + { + return Queue.Count == 0 && !IS_WRITING; + } - public bool GetWriteCompletionStatus() + private void WriteTimerElapsed(object sender, ElapsedEventArgs e) + { + TotalElapsed += PollInterval; + + if (CancellationToken.IsCancellationRequested) { - return Queue.Count == 0 && !IS_WRITING; + Queue = new ConcurrentQueue<(string, string, int)>(); + IS_WRITING = false; + return; } - private void WriteTimerElapsed(object sender, ElapsedEventArgs e) + if (TotalElapsed > 300 && IS_WRITING == false && Queue.Count != 0) { - TotalElapsed += PollInterval; - - if (CancellationToken.IsCancellationRequested) - { - Queue = new ConcurrentQueue<(string, string, int)>(); - IS_WRITING = false; - return; - } - - if (TotalElapsed > 300 && IS_WRITING == false && Queue.Count != 0) - { - TotalElapsed = 0; - WriteTimer.Enabled = false; + TotalElapsed = 0; + WriteTimer.Enabled = false; #pragma warning disable CS4014 - ConsumeQueue(); + ConsumeQueue(); #pragma warning restore CS4014 - } } + } + + /// + /// Consumes a batch of objects from Queue, of MAX_BUFFER_SIZE or until queue is empty, and filters out the objects that already exist on the server + /// + /// + /// Tuple of: + /// - int: the number of objects consumed from the queue (useful to report progress) + /// - List<(string, string, int)>: List of queued objects that are not already on the server + /// + private async Task<(int, List<(string, string, int)>)> ConsumeNewBatch() + { + // Read a batch from the queue - /// - /// Consumes a batch of objects from Queue, of MAX_BUFFER_SIZE or until queue is empty, and filters out the objects that already exist on the server - /// - /// - /// Tuple of: - /// - int: the number of objects consumed from the queue (useful to report progress) - /// - List<(string, string, int)>: List of queued objects that are not already on the server - /// - private async Task<(int, List<(string, string, int)>)> ConsumeNewBatch() + List<(string, string, int)> queuedBatch = new(); + List queuedBatchIds = new(); + ValueTuple queueElement; + var payloadBufferSize = 0; + while (Queue.TryPeek(out queueElement) && payloadBufferSize < MAX_BUFFER_SIZE) { - // Read a batch from the queue + if (CancellationToken.IsCancellationRequested) + return (queuedBatch.Count, null); - List<(string, string, int)> queuedBatch = new List<(string, string, int)>(); - List queuedBatchIds = new List(); - ValueTuple queueElement; - var payloadBufferSize = 0; - while (Queue.TryPeek(out queueElement) && payloadBufferSize < MAX_BUFFER_SIZE) - { - if (CancellationToken.IsCancellationRequested) - { - return (queuedBatch.Count, null); - } + Queue.TryDequeue(out queueElement); + queuedBatch.Add(queueElement); + queuedBatchIds.Add(queueElement.Item1); + payloadBufferSize += queueElement.Item3; + } - Queue.TryDequeue(out queueElement); - queuedBatch.Add(queueElement); - queuedBatchIds.Add(queueElement.Item1); - payloadBufferSize += queueElement.Item3; - } + // Ask the server which objects from the batch it already has + Dictionary hasObjects = null; + try + { + hasObjects = await HasObjects(queuedBatchIds).ConfigureAwait(false); + } + catch (Exception e) + { + OnErrorAction?.Invoke(TransportName, e); + return (queuedBatch.Count, null); + } - // Ask the server which objects from the batch it already has - Dictionary hasObjects = null; - try - { - hasObjects = await HasObjects(queuedBatchIds); - } - catch (Exception e) - { - OnErrorAction?.Invoke(TransportName, e); - return (queuedBatch.Count, null); - } + // Filter the queued batch to only return new objects - // Filter the queued batch to only return new objects + List<(string, string, int)> newBatch = new(); + foreach (var queuedItem in queuedBatch) + if (!hasObjects.ContainsKey(queuedItem.Item1) || !hasObjects[queuedItem.Item1]) + newBatch.Add(queuedItem); - List<(string, string, int)> newBatch = new List<(string, string, int)>(); - foreach (var queuedItem in queuedBatch) - { - if (!hasObjects.ContainsKey(queuedItem.Item1) || !hasObjects[queuedItem.Item1]) - { - newBatch.Add(queuedItem); - } - } + return (queuedBatch.Count, newBatch); + } - return (queuedBatch.Count, newBatch); + private async Task ConsumeQueue() + { + if (CancellationToken.IsCancellationRequested) + { + Queue = new ConcurrentQueue<(string, string, int)>(); + IS_WRITING = false; + return; } - private async Task ConsumeQueue() + if (Queue.Count == 0) + return; + + IS_WRITING = true; + var message = new HttpRequestMessage() + { + RequestUri = new Uri($"/objects/{StreamId}", UriKind.Relative), + Method = HttpMethod.Post + }; + + var multipart = new MultipartFormDataContent("--obj--"); + + SavedObjectCount = 0; + var addedMpCount = 0; + + while (addedMpCount < MAX_MULTIPART_COUNT && Queue.Count != 0) { if (CancellationToken.IsCancellationRequested) { @@ -232,423 +292,334 @@ private async Task ConsumeQueue() return; } - if (Queue.Count == 0) + (int consumedQueuedObjects, List<(string, string, int)> batch) = await ConsumeNewBatch() + .ConfigureAwait(false); + if (batch == null) { + // Canceled or error happened (which was already reported) + Queue = new ConcurrentQueue<(string, string, int)>(); + IS_WRITING = false; return; } - IS_WRITING = true; - var message = new HttpRequestMessage() + if (batch.Count == 0) { - RequestUri = new Uri($"/objects/{StreamId}", UriKind.Relative), - Method = HttpMethod.Post - }; - - var multipart = new MultipartFormDataContent("--obj--"); - - SavedObjectCount = 0; - var addedMpCount = 0; - - while (addedMpCount < MAX_MULTIPART_COUNT && Queue.Count != 0) - { - if (CancellationToken.IsCancellationRequested) - { - Queue = new ConcurrentQueue<(string, string, int)>(); - IS_WRITING = false; - return; - } - - (int consumedQueuedObjects, List<(string, string, int)> batch) = await ConsumeNewBatch(); - if (batch == null) - { - // Canceled or error happened (which was already reported) - Queue = new ConcurrentQueue<(string, string, int)>(); - IS_WRITING = false; - return; - } - - if (batch.Count == 0) - { - // The server already has all objects from the queued batch - SavedObjectCount += consumedQueuedObjects; - continue; - } - - var _ctBuilder = new StringBuilder("["); - for (int i = 0; i < batch.Count; i++) - { - if (i > 0) - { - _ctBuilder.Append(","); - } - _ctBuilder.Append(batch[i].Item2); - TotalSentBytes += batch[i].Item3; - } - _ctBuilder.Append("]"); - String _ct = _ctBuilder.ToString(); - - if (CompressPayloads) - { - var content = new GzipContent(new StringContent(_ct, Encoding.UTF8)); - content.Headers.ContentType = new System.Net.Http.Headers.MediaTypeHeaderValue( - "application/gzip" - ); - multipart.Add(content, $"batch-{addedMpCount}", $"batch-{addedMpCount}"); - } - else - { - multipart.Add( - new StringContent(_ct, Encoding.UTF8), - $"batch-{addedMpCount}", - $"batch-{addedMpCount}" - ); - } - - addedMpCount++; + // The server already has all objects from the queued batch SavedObjectCount += consumedQueuedObjects; + continue; } - message.Content = multipart; - - if (CancellationToken.IsCancellationRequested) + var _ctBuilder = new StringBuilder("["); + for (int i = 0; i < batch.Count; i++) { - Queue = new ConcurrentQueue<(string, string, int)>(); - IS_WRITING = false; - return; + if (i > 0) + _ctBuilder.Append(","); + _ctBuilder.Append(batch[i].Item2); + TotalSentBytes += batch[i].Item3; } + _ctBuilder.Append("]"); + string _ct = _ctBuilder.ToString(); - if (addedMpCount > 0) + if (CompressPayloads) { - try - { - var response = await Client.SendAsync(message, CancellationToken); - response.EnsureSuccessStatusCode(); - } - catch (Exception e) - { - IS_WRITING = false; - OnErrorAction?.Invoke( - TransportName, - new Exception( - $"Remote error: {Account.serverInfo.url} is not reachable. \n {e.Message}", - e - ) - ); - - Queue = new ConcurrentQueue<(string, string, int)>(); - return; - } + var content = new GzipContent(new StringContent(_ct, Encoding.UTF8)); + content.Headers.ContentType = new MediaTypeHeaderValue("application/gzip"); + multipart.Add(content, $"batch-{addedMpCount}", $"batch-{addedMpCount}"); } - - IS_WRITING = false; - - OnProgressAction?.Invoke(TransportName, SavedObjectCount); - - if (!WriteTimer.Enabled) + else { - WriteTimer.Enabled = true; - WriteTimer.Start(); + multipart.Add( + new StringContent(_ct, Encoding.UTF8), + $"batch-{addedMpCount}", + $"batch-{addedMpCount}" + ); } + + addedMpCount++; + SavedObjectCount += consumedQueuedObjects; } - public void SaveObject(string hash, string serializedObject) + message.Content = multipart; + + if (CancellationToken.IsCancellationRequested) { - if (CancellationToken.IsCancellationRequested) + Queue = new ConcurrentQueue<(string, string, int)>(); + IS_WRITING = false; + return; + } + + if (addedMpCount > 0) + try + { + var response = await Client.SendAsync(message, CancellationToken).ConfigureAwait(false); + response.EnsureSuccessStatusCode(); + } + catch (Exception e) { - Queue = new ConcurrentQueue<(string, string, int)>(); IS_WRITING = false; + OnErrorAction?.Invoke( + TransportName, + new Exception( + $"Remote error: {Account.serverInfo.url} is not reachable. \n {e.Message}", + e + ) + ); + + Queue = new ConcurrentQueue<(string, string, int)>(); return; } - Queue.Enqueue((hash, serializedObject, Encoding.UTF8.GetByteCount(serializedObject))); + IS_WRITING = false; - if (!WriteTimer.Enabled && !IS_WRITING) - { - WriteTimer.Enabled = true; - WriteTimer.Start(); - } + OnProgressAction?.Invoke(TransportName, SavedObjectCount); + + if (!WriteTimer.Enabled) + { + WriteTimer.Enabled = true; + WriteTimer.Start(); } + } - public void SaveObject(string hash, ITransport sourceTransport) + public void SaveObject(string hash, string serializedObject) + { + if (CancellationToken.IsCancellationRequested) { - if (CancellationToken.IsCancellationRequested) - { - Queue = new ConcurrentQueue<(string, string, int)>(); - IS_WRITING = false; - return; - } + Queue = new ConcurrentQueue<(string, string, int)>(); + IS_WRITING = false; + return; + } - var serializedObject = sourceTransport.GetObject(hash); + Queue.Enqueue((hash, serializedObject, Encoding.UTF8.GetByteCount(serializedObject))); - Queue.Enqueue((hash, serializedObject, Encoding.UTF8.GetByteCount(serializedObject))); + if (!WriteTimer.Enabled && !IS_WRITING) + { + WriteTimer.Enabled = true; + WriteTimer.Start(); + } + } - if (!WriteTimer.Enabled && !IS_WRITING) - { - WriteTimer.Enabled = true; - WriteTimer.Start(); - } + public void SaveObject(string hash, ITransport sourceTransport) + { + if (CancellationToken.IsCancellationRequested) + { + Queue = new ConcurrentQueue<(string, string, int)>(); + IS_WRITING = false; + return; } - #endregion + var serializedObject = sourceTransport.GetObject(hash); - #region Getting objects + Queue.Enqueue((hash, serializedObject, Encoding.UTF8.GetByteCount(serializedObject))); - public string GetObject(string hash) + if (!WriteTimer.Enabled && !IS_WRITING) { - if (CancellationToken.IsCancellationRequested) - { - Queue = new ConcurrentQueue<(string, string, int)>(); - return null; - } + WriteTimer.Enabled = true; + WriteTimer.Start(); + } + } - var message = new HttpRequestMessage() - { - RequestUri = new Uri($"/objects/{StreamId}/{hash}/single", UriKind.Relative), - Method = HttpMethod.Get, - }; + #endregion - var response = Client - .SendAsync(message, HttpCompletionOption.ResponseContentRead, CancellationToken) - .Result.Content; - return response.ReadAsStringAsync().Result; + #region Getting objects + + public string GetObject(string hash) + { + if (CancellationToken.IsCancellationRequested) + { + Queue = new ConcurrentQueue<(string, string, int)>(); + return null; } - public async Task CopyObjectAndChildren( - string hash, - ITransport targetTransport, - Action onTotalChildrenCountKnown - ) + var message = new HttpRequestMessage() { - if (CancellationToken.IsCancellationRequested) - { - Queue = new ConcurrentQueue<(string, string, int)>(); - return null; - } + RequestUri = new Uri($"/objects/{StreamId}/{hash}/single", UriKind.Relative), + Method = HttpMethod.Get + }; + + var response = Client + .SendAsync(message, HttpCompletionOption.ResponseContentRead, CancellationToken) + .Result.Content; + return response.ReadAsStringAsync().Result; + } - // Get root object - var rootHttpMessage = new HttpRequestMessage() - { - RequestUri = new Uri($"/objects/{StreamId}/{hash}/single", UriKind.Relative), - Method = HttpMethod.Get, - }; + public async Task CopyObjectAndChildren( + string hash, + ITransport targetTransport, + Action onTotalChildrenCountKnown + ) + { + if (CancellationToken.IsCancellationRequested) + { + Queue = new ConcurrentQueue<(string, string, int)>(); + return null; + } - HttpResponseMessage rootHttpResponse = null; - try - { - rootHttpResponse = await Client.SendAsync( - rootHttpMessage, - HttpCompletionOption.ResponseContentRead, - CancellationToken - ); - rootHttpResponse.EnsureSuccessStatusCode(); - } - catch (Exception e) - { - OnErrorAction?.Invoke(TransportName, e); - return null; - } + // Get root object + var rootHttpMessage = new HttpRequestMessage() + { + RequestUri = new Uri($"/objects/{StreamId}/{hash}/single", UriKind.Relative), + Method = HttpMethod.Get + }; - String rootObjectStr = await rootHttpResponse.Content.ReadAsStringAsync(); - List childrenIds = new List(); - var rootPartial = JsonConvert.DeserializeObject(rootObjectStr); - if (rootPartial.__closure != null) - { - childrenIds = new List(rootPartial.__closure.Keys); - } - onTotalChildrenCountKnown?.Invoke(childrenIds.Count); + HttpResponseMessage rootHttpResponse = null; + try + { + rootHttpResponse = await Client + .SendAsync(rootHttpMessage, HttpCompletionOption.ResponseContentRead, CancellationToken) + .ConfigureAwait(false); + rootHttpResponse.EnsureSuccessStatusCode(); + } + catch (Exception e) + { + OnErrorAction?.Invoke(TransportName, e); + return null; + } - var childrenFoundMap = await targetTransport.HasObjects(childrenIds); - List newChildrenIds = new List( - from objId in childrenFoundMap.Keys - where !childrenFoundMap[objId] - select objId - ); + string rootObjectStr = await rootHttpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); + List childrenIds = new(); + var rootPartial = JsonConvert.DeserializeObject(rootObjectStr); + if (rootPartial.__closure != null) + childrenIds = new List(rootPartial.__closure.Keys); + onTotalChildrenCountKnown?.Invoke(childrenIds.Count); - targetTransport.BeginWrite(); + var childrenFoundMap = await targetTransport.HasObjects(childrenIds).ConfigureAwait(false); + List newChildrenIds = + new(from objId in childrenFoundMap.Keys where !childrenFoundMap[objId] select objId); - // Get the children that are not already in the targetTransport - List childrenIdBatch = new List(DOWNLOAD_BATCH_SIZE); - bool downloadBatchResult; - foreach (var objectId in newChildrenIds) - { - childrenIdBatch.Add(objectId); - if (childrenIdBatch.Count >= DOWNLOAD_BATCH_SIZE) - { - downloadBatchResult = await CopyObjects(childrenIdBatch, targetTransport); - if (!downloadBatchResult) - return null; - childrenIdBatch = new List(DOWNLOAD_BATCH_SIZE); - } - } - if (childrenIdBatch.Count > 0) + targetTransport.BeginWrite(); + + // Get the children that are not already in the targetTransport + List childrenIdBatch = new(DOWNLOAD_BATCH_SIZE); + bool downloadBatchResult; + foreach (var objectId in newChildrenIds) + { + childrenIdBatch.Add(objectId); + if (childrenIdBatch.Count >= DOWNLOAD_BATCH_SIZE) { - downloadBatchResult = await CopyObjects(childrenIdBatch, targetTransport); + downloadBatchResult = await CopyObjects(childrenIdBatch, targetTransport) + .ConfigureAwait(false); if (!downloadBatchResult) return null; + childrenIdBatch = new List(DOWNLOAD_BATCH_SIZE); } - - targetTransport.SaveObject(hash, rootObjectStr); - await targetTransport.WriteComplete(); - return rootObjectStr; } - - private async Task CopyObjects(List hashes, ITransport targetTransport) + if (childrenIdBatch.Count > 0) { - Stream childrenStream = null; + downloadBatchResult = await CopyObjects(childrenIdBatch, targetTransport) + .ConfigureAwait(false); + if (!downloadBatchResult) + return null; + } - if (hashes.Count > 0) + targetTransport.SaveObject(hash, rootObjectStr); + await targetTransport.WriteComplete().ConfigureAwait(false); + return rootObjectStr; + } + + private async Task CopyObjects(List hashes, ITransport targetTransport) + { + Stream childrenStream = null; + + if (hashes.Count > 0) + { + var childrenHttpMessage = new HttpRequestMessage() { - var childrenHttpMessage = new HttpRequestMessage() - { - RequestUri = new Uri($"/api/getobjects/{StreamId}", UriKind.Relative), - Method = HttpMethod.Post, - }; + RequestUri = new Uri($"/api/getobjects/{StreamId}", UriKind.Relative), + Method = HttpMethod.Post + }; - Dictionary postParameters = new Dictionary(); - postParameters.Add("objects", JsonConvert.SerializeObject(hashes)); - childrenHttpMessage.Content = new FormUrlEncodedContent(postParameters); - childrenHttpMessage.Headers.Add("Accept", "text/plain"); + Dictionary postParameters = new(); + postParameters.Add("objects", JsonConvert.SerializeObject(hashes)); + childrenHttpMessage.Content = new FormUrlEncodedContent(postParameters); + childrenHttpMessage.Headers.Add("Accept", "text/plain"); - HttpResponseMessage childrenHttpResponse = null; - try - { - childrenHttpResponse = await Client.SendAsync( + HttpResponseMessage childrenHttpResponse = null; + try + { + childrenHttpResponse = await Client + .SendAsync( childrenHttpMessage, HttpCompletionOption.ResponseHeadersRead, CancellationToken - ); - childrenHttpResponse.EnsureSuccessStatusCode(); - } - catch (Exception e) - { - OnErrorAction?.Invoke(TransportName, e); - return false; - } - - childrenStream = await childrenHttpResponse.Content.ReadAsStreamAsync(); + ) + .ConfigureAwait(false); + childrenHttpResponse.EnsureSuccessStatusCode(); } - else - { - childrenStream = new MemoryStream(); - } - - using (var stream = childrenStream) + catch (Exception e) { - using (var reader = new StreamReader(stream, Encoding.UTF8)) - { - string line; - while ((line = reader.ReadLine()) != null) - { - if (CancellationToken.IsCancellationRequested) - { - Queue = new ConcurrentQueue<(string, string, int)>(); - return false; - } - - var pcs = line.Split(new char[] { '\t' }, count: 2); - targetTransport.SaveObject(pcs[0], pcs[1]); - - OnProgressAction?.Invoke(TransportName, 1); // possibly make this more friendly - } - } + OnErrorAction?.Invoke(TransportName, e); + return false; } - return true; + childrenStream = await childrenHttpResponse.Content.ReadAsStreamAsync().ConfigureAwait(false); } - - #endregion - - public override string ToString() + else { - return $"Server Transport @{Account.serverInfo.url}"; + childrenStream = new MemoryStream(); } - public async Task> HasObjects(List objectIds) + using (var stream = childrenStream) + using (var reader = new StreamReader(stream, Encoding.UTF8)) { - var payload = new Dictionary() + string line; + while ((line = reader.ReadLine()) != null) { - { "objects", JsonConvert.SerializeObject(objectIds) } - }; - var uri = new Uri($"/api/diff/{StreamId}", UriKind.Relative); - var response = await Client.PostAsync( - uri, - new StringContent(JsonConvert.SerializeObject(payload), Encoding.UTF8, "application/json"), - CancellationToken - ); - response.EnsureSuccessStatusCode(); - - var hasObjectsJson = await response.Content.ReadAsStringAsync(); - var hasObjects = JsonConvert.DeserializeObject>(hasObjectsJson); - return hasObjects; - } + if (CancellationToken.IsCancellationRequested) + { + Queue = new ConcurrentQueue<(string, string, int)>(); + return false; + } - public void Dispose() - { - // TODO: check if it's writing first? - Client?.Dispose(); - WriteTimer.Dispose(); - } + var pcs = line.Split(new char[] { '\t' }, 2); + targetTransport.SaveObject(pcs[0], pcs[1]); - public object Clone() - { - return new ServerTransport(Account, StreamId) - { - OnErrorAction = OnErrorAction, - OnProgressAction = OnProgressAction, - CancellationToken = CancellationToken - }; + OnProgressAction?.Invoke(TransportName, 1); // possibly make this more friendly + } } - internal class Placeholder - { - public Dictionary __closure { get; set; } = new Dictionary(); - } + return true; } - /// - /// https://cymbeline.ch/2014/03/16/gzip-encoding-an-http-post-request-body/ - /// - internal sealed class GzipContent : HttpContent - { - private readonly HttpContent content; + #endregion +} - public GzipContent(HttpContent content) - { - if (content == null) - { - return; - } +/// +/// https://cymbeline.ch/2014/03/16/gzip-encoding-an-http-post-request-body/ +/// +internal sealed class GzipContent : HttpContent +{ + private readonly HttpContent content; - this.content = content; + public GzipContent(HttpContent content) + { + if (content == null) + return; - // Keep the original content's headers ... - if (content != null) - foreach (KeyValuePair> header in content.Headers) - { - Headers.TryAddWithoutValidation(header.Key, header.Value); - } + this.content = content; - // ... and let the server know we've Gzip-compressed the body of this request. - Headers.ContentEncoding.Add("gzip"); - } + // Keep the original content's headers ... + if (content != null) + foreach (KeyValuePair> header in content.Headers) + Headers.TryAddWithoutValidation(header.Key, header.Value); - protected override async Task SerializeToStreamAsync(Stream stream, TransportContext context) - { - // Open a GZipStream that writes to the specified output stream. - using (GZipStream gzip = new GZipStream(stream, CompressionMode.Compress, true)) - { - // Copy all the input content to the GZip stream. - if (content != null) - await content.CopyToAsync(gzip); - else - await (new System.Net.Http.StringContent(string.Empty)).CopyToAsync(gzip); - } - } + // ... and let the server know we've Gzip-compressed the body of this request. + Headers.ContentEncoding.Add("gzip"); + } - protected override bool TryComputeLength(out long length) - { - length = -1; - return false; - } + protected override async Task SerializeToStreamAsync(Stream stream, TransportContext context) + { + // Open a GZipStream that writes to the specified output stream. + using (GZipStream gzip = new(stream, CompressionMode.Compress, true)) + // Copy all the input content to the GZip stream. + if (content != null) + await content.CopyToAsync(gzip).ConfigureAwait(false); + else + await new StringContent(string.Empty).CopyToAsync(gzip).ConfigureAwait(false); + } + + protected override bool TryComputeLength(out long length) + { + length = -1; + return false; } } diff --git a/Core/Core/Transports/ServerUtils/GzipContent.cs b/Core/Core/Transports/ServerUtils/GzipContent.cs index 7a48b74e15..0f8723c3ba 100644 --- a/Core/Core/Transports/ServerUtils/GzipContent.cs +++ b/Core/Core/Transports/ServerUtils/GzipContent.cs @@ -1,59 +1,52 @@ -using System; using System.Collections.Generic; using System.IO; using System.IO.Compression; using System.Net; using System.Net.Http; -using System.Text; using System.Threading.Tasks; -namespace Speckle.Core.Transports.ServerUtils +namespace Speckle.Core.Transports.ServerUtils; + +/// +/// https://cymbeline.ch/2014/03/16/gzip-encoding-an-http-post-request-body/ +/// +internal sealed class GzipContent : HttpContent { - /// - /// https://cymbeline.ch/2014/03/16/gzip-encoding-an-http-post-request-body/ - /// - internal sealed class GzipContent : HttpContent + private readonly HttpContent content; + + public GzipContent(HttpContent content) { - private readonly HttpContent content; + if (content == null) + return; - public GzipContent(HttpContent content) - { - if (content == null) - { - return; - } + this.content = content; - this.content = content; + // Keep the original content's headers ... + if (content != null) + foreach (KeyValuePair> header in content.Headers) + Headers.TryAddWithoutValidation(header.Key, header.Value); - // Keep the original content's headers ... - if (content != null) - foreach (KeyValuePair> header in content.Headers) - { - Headers.TryAddWithoutValidation(header.Key, header.Value); - } - - // ... and let the server know we've Gzip-compressed the body of this request. - Headers.ContentEncoding.Add("gzip"); - } + // ... and let the server know we've Gzip-compressed the body of this request. + Headers.ContentEncoding.Add("gzip"); + } - protected override async Task SerializeToStreamAsync(Stream stream, TransportContext context) + protected override async Task SerializeToStreamAsync(Stream stream, TransportContext context) + { + // Open a GZipStream that writes to the specified output stream. + using (GZipStream gzip = new(stream, CompressionMode.Compress, true)) { - // Open a GZipStream that writes to the specified output stream. - using (GZipStream gzip = new GZipStream(stream, CompressionMode.Compress, true)) - { - // Copy all the input content to the GZip stream. - if (content != null) - await content.CopyToAsync(gzip); - else - await (new System.Net.Http.StringContent(string.Empty)).CopyToAsync(gzip); - await gzip.FlushAsync(); - } + // Copy all the input content to the GZip stream. + if (content != null) + await content.CopyToAsync(gzip).ConfigureAwait(false); + else + await new StringContent(string.Empty).CopyToAsync(gzip).ConfigureAwait(false); + await gzip.FlushAsync().ConfigureAwait(false); } + } - protected override bool TryComputeLength(out long length) - { - length = -1; - return false; - } + protected override bool TryComputeLength(out long length) + { + length = -1; + return false; } } diff --git a/Core/Core/Transports/ServerUtils/IServerApi.cs b/Core/Core/Transports/ServerUtils/IServerApi.cs index ea5f6d62cd..8bc52ca531 100644 --- a/Core/Core/Transports/ServerUtils/IServerApi.cs +++ b/Core/Core/Transports/ServerUtils/IServerApi.cs @@ -1,25 +1,30 @@ -using System; using System.Collections.Generic; -using System.Text; using System.Threading.Tasks; -namespace Speckle.Core.Transports.ServerUtils -{ - public delegate void CbObjectDownloaded(string id, string json); - public delegate void CbBlobdDownloaded(); +namespace Speckle.Core.Transports.ServerUtils; + +public delegate void CbObjectDownloaded(string id, string json); +public delegate void CbBlobdDownloaded(); - internal interface IServerApi - { - public Task DownloadSingleObject(string streamId, string objectId); +internal interface IServerApi +{ + public Task DownloadSingleObject(string streamId, string objectId); - public Task DownloadObjects(string streamId, List objectIds, CbObjectDownloaded onObjectCallback); + public Task DownloadObjects( + string streamId, + List objectIds, + CbObjectDownloaded onObjectCallback + ); - public Task> HasObjects(string streamId, List objectIds); + public Task> HasObjects(string streamId, List objectIds); - public Task UploadObjects(string streamId, List<(string, string)> objects); + public Task UploadObjects(string streamId, List<(string, string)> objects); - public Task UploadBlobs(string streamId, List<(string, string)> objects); + public Task UploadBlobs(string streamId, List<(string, string)> objects); - public Task DownloadBlobs(string streamId, List blobIds, CbBlobdDownloaded onBlobCallback); - } + public Task DownloadBlobs( + string streamId, + List blobIds, + CbBlobdDownloaded onBlobCallback + ); } diff --git a/Core/Core/Transports/ServerUtils/ParallelServerAPI.cs b/Core/Core/Transports/ServerUtils/ParallelServerAPI.cs index c5ab8d1b28..f7f9207430 100644 --- a/Core/Core/Transports/ServerUtils/ParallelServerAPI.cs +++ b/Core/Core/Transports/ServerUtils/ParallelServerAPI.cs @@ -1,307 +1,348 @@ -using System; +using System; using System.Collections.Concurrent; using System.Collections.Generic; -using System.Diagnostics; -using System.IO; using System.Linq; -using System.Text; using System.Threading; using System.Threading.Tasks; -namespace Speckle.Core.Transports.ServerUtils +namespace Speckle.Core.Transports.ServerUtils; + +internal enum ServerApiOperation { - internal enum ServerApiOperation - { - DownloadSingleObject, - DownloadObjects, - HasObjects, - UploadObjects, - UploadBlobs, - DownloadBlobs, - HasBlobs, - _NoOp - } + DownloadSingleObject, + DownloadObjects, + HasObjects, + UploadObjects, + UploadBlobs, + DownloadBlobs, + HasBlobs, + _NoOp +} - public class ParallelServerApi : IDisposable, IServerApi - { - private List Threads = new List(); +public class ParallelServerApi : IDisposable, IServerApi +{ + private string AuthToken; - private string BaseUri; - private string AuthToken; - private int TimeoutSeconds; - public CancellationToken CancellationToken { get; set; } - public int NumThreads { get; set; } - public bool CompressPayloads { get; set; } = true; + private string BaseUri; - private object CallbackLock = new object(); - public Action OnBatchSent { get; set; } + private object CallbackLock = new(); - private BlockingCollection<(ServerApiOperation, object, TaskCompletionSource)> Tasks; + private BlockingCollection<(ServerApiOperation, object, TaskCompletionSource)> Tasks; + private List Threads = new(); + private int TimeoutSeconds; - public string BlobStorageFolder { get; set; } + public ParallelServerApi( + string baseUri, + string authorizationToken, + string blobStorageFolder, + int timeoutSeconds = 60, + int numThreads = 4, + int numBufferedOperations = 8 + ) + { + BaseUri = baseUri; + AuthToken = authorizationToken; + TimeoutSeconds = timeoutSeconds; + NumThreads = numThreads; + CancellationToken = CancellationToken.None; - public ParallelServerApi(string baseUri, string authorizationToken, string blobStorageFolder, int timeoutSeconds = 60, int numThreads = 4, int numBufferedOperations = 8) - { - BaseUri = baseUri; - AuthToken = authorizationToken; - TimeoutSeconds = timeoutSeconds; - NumThreads = numThreads; - CancellationToken = CancellationToken.None; + BlobStorageFolder = blobStorageFolder; - BlobStorageFolder = blobStorageFolder; + Tasks = new BlockingCollection<(ServerApiOperation, object, TaskCompletionSource)>( + numBufferedOperations + ); + } - Tasks = new BlockingCollection<(ServerApiOperation, object, TaskCompletionSource)>(numBufferedOperations); - } + public CancellationToken CancellationToken { get; set; } + public int NumThreads { get; set; } + public bool CompressPayloads { get; set; } = true; + public Action OnBatchSent { get; set; } - public void EnsureStarted() - { - if (Threads.Count == 0) - Start(); - } + public string BlobStorageFolder { get; set; } - public void Start() - { - if (Threads.Count > 0) - throw new Exception("ServerAPI: Threads already started"); - for (int i = 0; i < NumThreads; i++) - { - Thread t = new Thread(new ThreadStart(ThreadMain)); - t.Name = $"ParallelServerAPI"; - t.IsBackground = true; - Threads.Add(t); - t.Start(); - } - } + public void Dispose() + { + EnsureStopped(); + Tasks.Dispose(); + } - public void EnsureStopped() + public async Task> HasObjects(string streamId, List objectIds) + { + EnsureStarted(); + List> tasks = new(); + List> splitObjectsIds; + if (objectIds.Count <= 50) + splitObjectsIds = new List>() { objectIds }; + else + splitObjectsIds = SplitList(objectIds, NumThreads); + + for (int i = 0; i < NumThreads; i++) { - if (Threads.Count > 0) - Stop(); + if (splitObjectsIds.Count <= i || splitObjectsIds[i].Count == 0) + continue; + Task op = QueueOperation( + ServerApiOperation.HasObjects, + (streamId, splitObjectsIds[i]) + ); + tasks.Add(op); } - - public void Stop() + Dictionary ret = new(); + foreach (Task task in tasks) { - if (Threads.Count == 0) - throw new Exception("ServerAPI: Threads not started"); - foreach (Thread t in Threads) - { - Tasks.Add((ServerApiOperation._NoOp, null, null)); - } - foreach (Thread t in Threads) - { - t.Join(); - } - Threads = new List(); + Dictionary taskResult = + await task.ConfigureAwait(false) as Dictionary; + foreach (KeyValuePair kv in taskResult) + ret[kv.Key] = kv.Value; } - private void ThreadMain() - { - using (ServerApi serialApi = new ServerApi(BaseUri, AuthToken, BlobStorageFolder, TimeoutSeconds)) - { - serialApi.OnBatchSent = (num, size) => { lock (CallbackLock) OnBatchSent(num, size); }; - serialApi.CancellationToken = CancellationToken; - serialApi.CompressPayloads = CompressPayloads; + return ret; + } - while (true) - { - (ServerApiOperation operation, object inputValue, TaskCompletionSource tcs) = Tasks.Take(); - if (tcs == null) - { - return; - } + public async Task DownloadSingleObject(string streamId, string objectId) + { + EnsureStarted(); + Task op = QueueOperation(ServerApiOperation.DownloadSingleObject, (streamId, objectId)); + object result = await op.ConfigureAwait(false); + return result as string; + } - try - { - switch (operation) - { - case ServerApiOperation.DownloadSingleObject: - (string dsoStreamId, string dsoObjectId) = ((string, string))inputValue; - var dsoResult = serialApi.DownloadSingleObject(dsoStreamId, dsoObjectId).Result; - tcs.SetResult(dsoResult); - break; - case ServerApiOperation.DownloadObjects: - (string doStreamId, List doObjectIds, CbObjectDownloaded doCallback) = ((string, List, CbObjectDownloaded))inputValue; - serialApi.DownloadObjects(doStreamId, doObjectIds, doCallback).Wait(); - // TODO: pass errors? - tcs.SetResult(null); - break; - case ServerApiOperation.HasObjects: - (string hoStreamId, List hoObjectIds) = ((string, List))inputValue; - var hoResult = serialApi.HasObjects(hoStreamId, hoObjectIds).Result; - tcs.SetResult(hoResult); - break; - case ServerApiOperation.UploadObjects: - (string uoStreamId, List<(string, string)> uoObjects) = ((string, List<(string, string)>))inputValue; - serialApi.UploadObjects(uoStreamId, uoObjects).Wait(); - // TODO: pass errors? - tcs.SetResult(null); - break; - case ServerApiOperation.UploadBlobs: - (string ubStreamId, List<(string, string)> ubBlobs) = ((string, List<(string, string)>))inputValue; - serialApi.UploadBlobs(ubStreamId, ubBlobs).Wait(); - tcs.SetResult(null); - break; - case ServerApiOperation.HasBlobs: - (string hbStreamId, List<(string, string)> hBlobs) = ((string, List<(string, string)>))inputValue; - var hasBlobResult = serialApi.HasBlobs(hbStreamId, hBlobs.Select(b => b.Item1.Split(':')[1]).ToList()).Result; - tcs.SetResult(hasBlobResult); - break; - case ServerApiOperation.DownloadBlobs: - (string dbStreamId, List blobIds, CbBlobdDownloaded cb) = ((string, List, CbBlobdDownloaded))inputValue; - serialApi.DownloadBlobs(dbStreamId, blobIds, cb).Wait(); - tcs.SetResult(null); - break; - } - } - catch (Exception e) - { - tcs.SetException(e); - } + public async Task DownloadObjects( + string streamId, + List objectIds, + CbObjectDownloaded onObjectCallback + ) + { + // Stopwatch sw = new Stopwatch(); sw.Start(); // TODO: remove - } - } - } + EnsureStarted(); + List> tasks = new(); + List> splitObjectsIds = SplitList(objectIds, NumThreads); + object callbackLock = new(); - private Task QueueOperation(ServerApiOperation operation, object inputValue) + CbObjectDownloaded callbackWrapper = (string id, string json) => { - TaskCompletionSource tcs = new TaskCompletionSource(TaskCreationOptions.RunContinuationsAsynchronously); - Tasks.Add((operation, inputValue, tcs)); - return tcs.Task; - } + lock (callbackLock) + onObjectCallback(id, json); + }; - private List> SplitList(List list, int parts) + for (int i = 0; i < NumThreads; i++) { - List> ret = new List>(parts); - for (int i = 0; i < parts; i++) - ret.Add(new List(list.Count / parts + 1)); - for (int i = 0; i < list.Count; i++) - ret[i % parts].Add(list[i]); - return ret; + if (splitObjectsIds[i].Count == 0) + continue; + Task op = QueueOperation( + ServerApiOperation.DownloadObjects, + (streamId, splitObjectsIds[i], callbackWrapper) + ); + tasks.Add(op); } + await Task.WhenAll(tasks.ToArray()).ConfigureAwait(false); + // Console.WriteLine($"ParallelServerApi::DownloadObjects({objectIds.Count}) request in {sw.ElapsedMilliseconds / 1000.0} sec"); + } - public async Task> HasObjects(string streamId, List objectIds) - { - EnsureStarted(); - List> tasks = new List>(); - List> splitObjectsIds; - if (objectIds.Count <= 50) - splitObjectsIds = new List>() { objectIds }; - else - splitObjectsIds = SplitList(objectIds, NumThreads); - - for (int i = 0; i < NumThreads; i++) - { - if (splitObjectsIds.Count <= i || splitObjectsIds[i].Count == 0) - continue; - Task op = QueueOperation(ServerApiOperation.HasObjects, (streamId, splitObjectsIds[i])); - tasks.Add(op); - } - Dictionary ret = new Dictionary(); - foreach (Task task in tasks) - { - Dictionary taskResult = (await task) as Dictionary; - foreach (KeyValuePair kv in taskResult) - ret[kv.Key] = kv.Value; - } + public async Task UploadObjects(string streamId, List<(string, string)> objects) + { + // Stopwatch sw = new Stopwatch(); sw.Start(); - return ret; - } + EnsureStarted(); + List> tasks = new(); + List> splitObjects; - public async Task DownloadSingleObject(string streamId, string objectId) + // request count optimization: if objects are < 500k, send in 1 request + int totalSize = 0; + foreach ((string id, string json) in objects) { - EnsureStarted(); - Task op = QueueOperation(ServerApiOperation.DownloadSingleObject, (streamId, objectId)); - object result = await op; - return result as string; + totalSize += json.Length; + if (totalSize >= 500000) + break; } + if (totalSize < 500000) + splitObjects = new List>() { objects }; + else + splitObjects = SplitList(objects, NumThreads); - public async Task DownloadObjects(string streamId, List objectIds, CbObjectDownloaded onObjectCallback) + for (int i = 0; i < NumThreads; i++) { - // Stopwatch sw = new Stopwatch(); sw.Start(); // TODO: remove + if (splitObjects.Count <= i || splitObjects[i].Count == 0) + continue; + Task op = QueueOperation( + ServerApiOperation.UploadObjects, + (streamId, splitObjects[i]) + ); + tasks.Add(op); + } + await Task.WhenAll(tasks.ToArray()).ConfigureAwait(false); + // Console.WriteLine($"ParallelServerApi::UploadObjects({objects.Count}) request in {sw.ElapsedMilliseconds / 1000.0} sec"); + } - EnsureStarted(); - List> tasks = new List>(); - List> splitObjectsIds = SplitList(objectIds, NumThreads); - object callbackLock = new object(); + public async Task UploadBlobs(string streamId, List<(string, string)> blobs) + { + EnsureStarted(); + Task op = QueueOperation(ServerApiOperation.UploadBlobs, (streamId, blobs)); + await op.ConfigureAwait(false); + } - CbObjectDownloaded callbackWrapper = (string id, string json) => - { - lock (callbackLock) - { - onObjectCallback(id, json); - } - }; + public async Task DownloadBlobs( + string streamId, + List blobIds, + CbBlobdDownloaded onBlobDownloaded + ) + { + EnsureStarted(); + Task op = QueueOperation( + ServerApiOperation.DownloadBlobs, + (streamId, blobIds, onBlobDownloaded) + ); + await op.ConfigureAwait(false); + } - for (int i = 0; i < NumThreads; i++) - { - if (splitObjectsIds[i].Count == 0) - continue; - Task op = QueueOperation(ServerApiOperation.DownloadObjects, (streamId, splitObjectsIds[i], callbackWrapper)); - tasks.Add(op); - } - await Task.WhenAll(tasks.ToArray()); - // Console.WriteLine($"ParallelServerApi::DownloadObjects({objectIds.Count}) request in {sw.ElapsedMilliseconds / 1000.0} sec"); + public void EnsureStarted() + { + if (Threads.Count == 0) + Start(); + } + public void Start() + { + if (Threads.Count > 0) + throw new Exception("ServerAPI: Threads already started"); + for (int i = 0; i < NumThreads; i++) + { + Thread t = new(new ThreadStart(ThreadMain)); + t.Name = $"ParallelServerAPI"; + t.IsBackground = true; + Threads.Add(t); + t.Start(); } + } - public async Task UploadObjects(string streamId, List<(string, string)> objects) - { - // Stopwatch sw = new Stopwatch(); sw.Start(); + public void EnsureStopped() + { + if (Threads.Count > 0) + Stop(); + } - EnsureStarted(); - List> tasks = new List>(); - List> splitObjects; + public void Stop() + { + if (Threads.Count == 0) + throw new Exception("ServerAPI: Threads not started"); + foreach (Thread t in Threads) + Tasks.Add((ServerApiOperation._NoOp, null, null)); + foreach (Thread t in Threads) + t.Join(); + Threads = new List(); + } - // request count optimization: if objects are < 500k, send in 1 request - int totalSize = 0; - foreach ((string id, string json) in objects) + private void ThreadMain() + { + using (ServerApi serialApi = new(BaseUri, AuthToken, BlobStorageFolder, TimeoutSeconds)) + { + serialApi.OnBatchSent = (num, size) => { - totalSize += json.Length; - if (totalSize >= 500000) - break; - } - if (totalSize < 500000) - splitObjects = new List>() { objects }; - else - splitObjects = SplitList(objects, NumThreads); + lock (CallbackLock) + OnBatchSent(num, size); + }; + serialApi.CancellationToken = CancellationToken; + serialApi.CompressPayloads = CompressPayloads; - for (int i = 0; i < NumThreads; i++) + while (true) { - if (splitObjects.Count <= i || splitObjects[i].Count == 0) - continue; - Task op = QueueOperation(ServerApiOperation.UploadObjects, (streamId, splitObjects[i])); - tasks.Add(op); - } - await Task.WhenAll(tasks.ToArray()); - // Console.WriteLine($"ParallelServerApi::UploadObjects({objects.Count}) request in {sw.ElapsedMilliseconds / 1000.0} sec"); - } + (ServerApiOperation operation, object inputValue, TaskCompletionSource tcs) = + Tasks.Take(); + if (tcs == null) + return; - public async Task UploadBlobs(string streamId, List<(string, string)> blobs) - { - EnsureStarted(); - Task op = QueueOperation(ServerApiOperation.UploadBlobs, (streamId, blobs)); - await op; + try + { + switch (operation) + { + case ServerApiOperation.DownloadSingleObject: + (string dsoStreamId, string dsoObjectId) = ((string, string))inputValue; + var dsoResult = serialApi.DownloadSingleObject(dsoStreamId, dsoObjectId).Result; + tcs.SetResult(dsoResult); + break; + case ServerApiOperation.DownloadObjects: + (string doStreamId, List doObjectIds, CbObjectDownloaded doCallback) = (( + string, + List, + CbObjectDownloaded + ))inputValue; + serialApi.DownloadObjects(doStreamId, doObjectIds, doCallback).Wait(); + // TODO: pass errors? + tcs.SetResult(null); + break; + case ServerApiOperation.HasObjects: + (string hoStreamId, List hoObjectIds) = ((string, List))inputValue; + var hoResult = serialApi.HasObjects(hoStreamId, hoObjectIds).Result; + tcs.SetResult(hoResult); + break; + case ServerApiOperation.UploadObjects: + (string uoStreamId, List<(string, string)> uoObjects) = (( + string, + List<(string, string)> + ))inputValue; + serialApi.UploadObjects(uoStreamId, uoObjects).Wait(); + // TODO: pass errors? + tcs.SetResult(null); + break; + case ServerApiOperation.UploadBlobs: + (string ubStreamId, List<(string, string)> ubBlobs) = (( + string, + List<(string, string)> + ))inputValue; + serialApi.UploadBlobs(ubStreamId, ubBlobs).Wait(); + tcs.SetResult(null); + break; + case ServerApiOperation.HasBlobs: + (string hbStreamId, List<(string, string)> hBlobs) = (( + string, + List<(string, string)> + ))inputValue; + var hasBlobResult = serialApi + .HasBlobs(hbStreamId, hBlobs.Select(b => b.Item1.Split(':')[1]).ToList()) + .Result; + tcs.SetResult(hasBlobResult); + break; + case ServerApiOperation.DownloadBlobs: + (string dbStreamId, List blobIds, CbBlobdDownloaded cb) = (( + string, + List, + CbBlobdDownloaded + ))inputValue; + serialApi.DownloadBlobs(dbStreamId, blobIds, cb).Wait(); + tcs.SetResult(null); + break; + } + } + catch (Exception e) + { + tcs.SetException(e); + } + } } + } - public async Task DownloadBlobs(string streamId, List blobIds, CbBlobdDownloaded onBlobDownloaded) - { - EnsureStarted(); - Task op = QueueOperation(ServerApiOperation.DownloadBlobs, (streamId, blobIds, onBlobDownloaded)); - await op; - } + private Task QueueOperation(ServerApiOperation operation, object inputValue) + { + TaskCompletionSource tcs = new(TaskCreationOptions.RunContinuationsAsynchronously); + Tasks.Add((operation, inputValue, tcs)); + return tcs.Task; + } - public async Task> HasBlobs(string streamId, List<(string, string)> blobs) - { - EnsureStarted(); - Task op = QueueOperation(ServerApiOperation.HasBlobs, (streamId, blobs)); - var res = await op; - return res as List; - } + private List> SplitList(List list, int parts) + { + List> ret = new(parts); + for (int i = 0; i < parts; i++) + ret.Add(new List(list.Count / parts + 1)); + for (int i = 0; i < list.Count; i++) + ret[i % parts].Add(list[i]); + return ret; + } - public void Dispose() - { - EnsureStopped(); - Tasks.Dispose(); - } + public async Task> HasBlobs(string streamId, List<(string, string)> blobs) + { + EnsureStarted(); + Task op = QueueOperation(ServerApiOperation.HasBlobs, (streamId, blobs)); + var res = await op.ConfigureAwait(false); + return res as List; } } diff --git a/Core/Core/Transports/ServerUtils/ServerAPI.cs b/Core/Core/Transports/ServerUtils/ServerAPI.cs index 202818e370..0c390f142e 100644 --- a/Core/Core/Transports/ServerUtils/ServerAPI.cs +++ b/Core/Core/Transports/ServerUtils/ServerAPI.cs @@ -1,11 +1,10 @@ -using System; +using System; using System.Collections.Generic; -using System.Diagnostics; using System.IO; -using System.IO.Compression; using System.Linq; using System.Net; using System.Net.Http; +using System.Net.Http.Headers; using System.Text; using System.Threading; using System.Threading.Tasks; @@ -14,444 +13,480 @@ using Speckle.Newtonsoft.Json; using Speckle.Newtonsoft.Json.Linq; -namespace Speckle.Core.Transports.ServerUtils +namespace Speckle.Core.Transports.ServerUtils; + +public class ServerApi : IDisposable, IServerApi { - public class ServerApi : IDisposable, IServerApi + private string BaseUri; + private int BATCH_SIZE_GET_OBJECTS = 10000; + private int BATCH_SIZE_HAS_OBJECTS = 100000; + + private HttpClient Client; + private int MAX_MULTIPART_COUNT = 5; + private int MAX_MULTIPART_SIZE = 25_000_000; + private int MAX_OBJECT_SIZE = 25_000_000; + private int MAX_REQUEST_SIZE = 100_000_000; + private HashSet RETRY_CODES = new() { 408, 502, 503, 504 }; + private int RETRY_COUNT = 3; + + public ServerApi( + string baseUri, + string authorizationToken, + string blobStorageFolder, + int timeoutSeconds = 60 + ) { - private int BATCH_SIZE_HAS_OBJECTS = 100000; - private int BATCH_SIZE_GET_OBJECTS = 10000; - private int MAX_OBJECT_SIZE = 25_000_000; - private int MAX_MULTIPART_COUNT = 5; - private int MAX_MULTIPART_SIZE = 25_000_000; - private int MAX_REQUEST_SIZE = 100_000_000; - - private int DOWNLOAD_BATCH_SIZE = 1000; - private int RETRY_COUNT = 3; - private HashSet RETRY_CODES = new HashSet() { 408, 502, 503, 504 }; - private int RetriedCount { get; set; } = 0; - - private HttpClient Client; - private string BaseUri; - public CancellationToken CancellationToken { get; set; } - public bool CompressPayloads { get; set; } = true; - - public string BlobStorageFolder { get; set; } - - /// - /// Callback when sending batches. Parameters: object count, total bytes sent - /// - public Action OnBatchSent { get; set; } - - public ServerApi(string baseUri, string authorizationToken, string blobStorageFolder, int timeoutSeconds = 60) - { - BaseUri = baseUri; - CancellationToken = CancellationToken.None; + BaseUri = baseUri; + CancellationToken = CancellationToken.None; - BlobStorageFolder = blobStorageFolder; + BlobStorageFolder = blobStorageFolder; - Client = Http.GetHttpProxyClient(new SpeckleHttpClientHandler() - { - AutomaticDecompression = System.Net.DecompressionMethods.GZip, - }); + Client = Http.GetHttpProxyClient( + new SpeckleHttpClientHandler() { AutomaticDecompression = DecompressionMethods.GZip } + ); - Client.BaseAddress = new Uri(baseUri); - Client.Timeout = new TimeSpan(0, 0, timeoutSeconds); + Client.BaseAddress = new Uri(baseUri); + Client.Timeout = new TimeSpan(0, 0, timeoutSeconds); + if (authorizationToken.ToLowerInvariant().Contains("bearer")) + Client.DefaultRequestHeaders.Add("Authorization", authorizationToken); + else + Client.DefaultRequestHeaders.Add("Authorization", $"Bearer {authorizationToken}"); + } - if (authorizationToken.ToLowerInvariant().Contains("bearer")) - { - Client.DefaultRequestHeaders.Add("Authorization", authorizationToken); - } - else - { - Client.DefaultRequestHeaders.Add("Authorization", $"Bearer {authorizationToken}"); - } - } + private int RetriedCount { get; set; } = 0; + public CancellationToken CancellationToken { get; set; } + public bool CompressPayloads { get; set; } = true; - public async Task DownloadSingleObject(string streamId, string objectId) - { - CancellationToken.ThrowIfCancellationRequested(); + public string BlobStorageFolder { get; set; } - // Get root object - var rootHttpMessage = new HttpRequestMessage() - { - RequestUri = new Uri($"/objects/{streamId}/{objectId}/single", UriKind.Relative), - Method = HttpMethod.Get, - }; + /// + /// Callback when sending batches. Parameters: object count, total bytes sent + /// + public Action OnBatchSent { get; set; } - HttpResponseMessage rootHttpResponse = null; - while (ShouldRetry(rootHttpResponse)) - rootHttpResponse = await Client.SendAsync(rootHttpMessage, HttpCompletionOption.ResponseContentRead, CancellationToken); - rootHttpResponse.EnsureSuccessStatusCode(); + public void Dispose() + { + Client.Dispose(); + } - String rootObjectStr = await rootHttpResponse.Content.ReadAsStringAsync(); - return rootObjectStr; - } + public async Task DownloadSingleObject(string streamId, string objectId) + { + CancellationToken.ThrowIfCancellationRequested(); - public async Task DownloadObjects(string streamId, List objectIds, CbObjectDownloaded onObjectCallback) + // Get root object + var rootHttpMessage = new HttpRequestMessage() { + RequestUri = new Uri($"/objects/{streamId}/{objectId}/single", UriKind.Relative), + Method = HttpMethod.Get + }; + + HttpResponseMessage rootHttpResponse = null; + while (ShouldRetry(rootHttpResponse)) + rootHttpResponse = await Client + .SendAsync(rootHttpMessage, HttpCompletionOption.ResponseContentRead, CancellationToken) + .ConfigureAwait(false); + rootHttpResponse.EnsureSuccessStatusCode(); + + string rootObjectStr = await rootHttpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); + return rootObjectStr; + } - if (objectIds.Count == 0) - return; - if (objectIds.Count < BATCH_SIZE_GET_OBJECTS) - { - await DownloadObjectsImpl(streamId, objectIds, onObjectCallback); - return; - } - - List crtRequest = new List(); - foreach (string id in objectIds) - { - if (crtRequest.Count >= BATCH_SIZE_GET_OBJECTS) - { - await DownloadObjectsImpl(streamId, crtRequest, onObjectCallback); - crtRequest = new List(); - } - crtRequest.Add(id); - } - await DownloadObjectsImpl(streamId, crtRequest, onObjectCallback); - + public async Task DownloadObjects( + string streamId, + List objectIds, + CbObjectDownloaded onObjectCallback + ) + { + if (objectIds.Count == 0) + return; + if (objectIds.Count < BATCH_SIZE_GET_OBJECTS) + { + await DownloadObjectsImpl(streamId, objectIds, onObjectCallback).ConfigureAwait(false); + return; } - private async Task DownloadObjectsImpl(string streamId, List objectIds, CbObjectDownloaded onObjectCallback) + List crtRequest = new(); + foreach (string id in objectIds) { - // Stopwatch sw = new Stopwatch(); sw.Start(); - - CancellationToken.ThrowIfCancellationRequested(); - - var childrenHttpMessage = new HttpRequestMessage() - { - RequestUri = new Uri($"/api/getobjects/{streamId}", UriKind.Relative), - Method = HttpMethod.Post, - }; - - Dictionary postParameters = new Dictionary(); - postParameters.Add("objects", JsonConvert.SerializeObject(objectIds)); - string serializedPayload = JsonConvert.SerializeObject(postParameters); - childrenHttpMessage.Content = new StringContent(serializedPayload, Encoding.UTF8, "application/json"); - childrenHttpMessage.Headers.Add("Accept", "text/plain"); - - HttpResponseMessage childrenHttpResponse = null; - while (ShouldRetry(childrenHttpResponse)) - childrenHttpResponse = await Client.SendAsync(childrenHttpMessage, HttpCompletionOption.ResponseHeadersRead, CancellationToken); - childrenHttpResponse.EnsureSuccessStatusCode(); - - Stream childrenStream = await childrenHttpResponse.Content.ReadAsStreamAsync(); - - using (childrenStream) + if (crtRequest.Count >= BATCH_SIZE_GET_OBJECTS) { - using (var reader = new StreamReader(childrenStream, Encoding.UTF8)) - { - string line; - while ((line = reader.ReadLine()) != null) - { - CancellationToken.ThrowIfCancellationRequested(); - - var pcs = line.Split(new char[] { '\t' }, count: 2); - onObjectCallback(pcs[0], pcs[1]); - } - } + await DownloadObjectsImpl(streamId, crtRequest, onObjectCallback).ConfigureAwait(false); + crtRequest = new List(); } - - // Console.WriteLine($"ServerApi::DownloadObjects({objectIds.Count}) request in {sw.ElapsedMilliseconds / 1000.0} sec"); - + crtRequest.Add(id); } + await DownloadObjectsImpl(streamId, crtRequest, onObjectCallback).ConfigureAwait(false); + } - public async Task> HasObjects(string streamId, List objectIds) - { - if (objectIds.Count <= BATCH_SIZE_HAS_OBJECTS) - return await HasObjectsImpl(streamId, objectIds); + public async Task> HasObjects(string streamId, List objectIds) + { + if (objectIds.Count <= BATCH_SIZE_HAS_OBJECTS) + return await HasObjectsImpl(streamId, objectIds).ConfigureAwait(false); - Dictionary ret = new Dictionary(); - List crtBatch = new List(BATCH_SIZE_HAS_OBJECTS); - foreach (string objectId in objectIds) - { - crtBatch.Add(objectId); - if (crtBatch.Count >= BATCH_SIZE_HAS_OBJECTS) - { - Dictionary batchResult = await HasObjectsImpl(streamId, crtBatch); - foreach (KeyValuePair kv in batchResult) - ret[kv.Key] = kv.Value; - crtBatch = new List(BATCH_SIZE_HAS_OBJECTS); - } - } - if (crtBatch.Count > 0) + Dictionary ret = new(); + List crtBatch = new(BATCH_SIZE_HAS_OBJECTS); + foreach (string objectId in objectIds) + { + crtBatch.Add(objectId); + if (crtBatch.Count >= BATCH_SIZE_HAS_OBJECTS) { - Dictionary batchResult = await HasObjectsImpl(streamId, crtBatch); + Dictionary batchResult = await HasObjectsImpl(streamId, crtBatch) + .ConfigureAwait(false); foreach (KeyValuePair kv in batchResult) ret[kv.Key] = kv.Value; + crtBatch = new List(BATCH_SIZE_HAS_OBJECTS); } - return ret; } - - private async Task> HasObjectsImpl(string streamId, List objectIds) + if (crtBatch.Count > 0) { - CancellationToken.ThrowIfCancellationRequested(); - - // Stopwatch sw = new Stopwatch(); sw.Start(); - - string objectsPostParameter = JsonConvert.SerializeObject(objectIds); - var payload = new Dictionary() { { "objects", objectsPostParameter } }; - string serializedPayload = JsonConvert.SerializeObject(payload); - var uri = new Uri($"/api/diff/{streamId}", UriKind.Relative); - HttpResponseMessage response = null; - while (ShouldRetry(response)) - response = await Client.PostAsync(uri, new StringContent(serializedPayload, Encoding.UTF8, "application/json"), CancellationToken); - response.EnsureSuccessStatusCode(); - - var hasObjectsJson = await response.Content.ReadAsStringAsync(); - Dictionary hasObjects = new Dictionary(); + Dictionary batchResult = await HasObjectsImpl(streamId, crtBatch) + .ConfigureAwait(false); + foreach (KeyValuePair kv in batchResult) + ret[kv.Key] = kv.Value; + } + return ret; + } - JObject doc = JObject.Parse(hasObjectsJson); - foreach (KeyValuePair prop in doc) - hasObjects[prop.Key] = (bool)prop.Value; + public async Task UploadObjects(string streamId, List<(string, string)> objects) + { + if (objects.Count == 0) + return; - // Console.WriteLine($"ServerApi::HasObjects({objectIds.Count}) request in {sw.ElapsedMilliseconds / 1000.0} sec"); + // 1. Split into parts of MAX_MULTIPART_SIZE size (can be exceptions until a max of MAX_OBJECT_SIZE if a single obj is larger than MAX_MULTIPART_SIZE) + List> multipartedObjects = new(); + List multipartedObjectsSize = new(); - return hasObjects; - } + List<(string, string)> crtMultipart = new(); + int crtMultipartSize = 0; - public async Task UploadObjects(string streamId, List<(string, string)> objects) + foreach ((string id, string json) in objects) { - if (objects.Count == 0) - return; - - // 1. Split into parts of MAX_MULTIPART_SIZE size (can be exceptions until a max of MAX_OBJECT_SIZE if a single obj is larger than MAX_MULTIPART_SIZE) - List> multipartedObjects = new List>(); - List multipartedObjectsSize = new List(); + int objSize = Encoding.UTF8.GetByteCount(json); + if (objSize > MAX_OBJECT_SIZE) + throw new Exception( + $"Object too large (size {objSize}, max size {MAX_OBJECT_SIZE}). Consider using detached/chunked properties" + ); - List<(string, string)> crtMultipart = new List<(string, string)>(); - int crtMultipartSize = 0; - - foreach ((string id, string json) in objects) + if (crtMultipartSize + objSize <= MAX_MULTIPART_SIZE) { - int objSize = Encoding.UTF8.GetByteCount(json); - if (objSize > MAX_OBJECT_SIZE) - throw new Exception($"Object too large (size {objSize}, max size {MAX_OBJECT_SIZE}). Consider using detached/chunked properties"); - - if (crtMultipartSize + objSize <= MAX_MULTIPART_SIZE) - { - crtMultipart.Add((id, json)); - crtMultipartSize += objSize; - continue; - } - - // new multipart - if (crtMultipart.Count > 0) - { - multipartedObjects.Add(crtMultipart); - multipartedObjectsSize.Add(crtMultipartSize); - } - crtMultipart = new List<(string, string)>(); crtMultipart.Add((id, json)); - crtMultipartSize = objSize; + crtMultipartSize += objSize; + continue; } - multipartedObjects.Add(crtMultipart); - multipartedObjectsSize.Add(crtMultipartSize); - - // 2. Split multiparts into individual server requests of max size MAX_REQUEST_SIZE or max length MAX_MULTIPART_COUNT and send them - List> crtRequest = new List>(); - int crtRequestSize = 0; - int crtObjectCount = 0; - for (int i = 0; i < multipartedObjects.Count; i++) + + // new multipart + if (crtMultipart.Count > 0) { - List<(string, string)> multipart = multipartedObjects[i]; - int multipartSize = multipartedObjectsSize[i]; - if (crtRequestSize + multipartSize > MAX_REQUEST_SIZE || crtRequest.Count >= MAX_MULTIPART_COUNT) - { - await UploadObjectsImpl(streamId, crtRequest); - OnBatchSent?.Invoke(crtObjectCount, crtRequestSize); - crtRequest = new List>(); - crtRequestSize = 0; - crtObjectCount = 0; - } - crtRequest.Add(multipart); - crtRequestSize += multipartSize; - crtObjectCount += multipart.Count; + multipartedObjects.Add(crtMultipart); + multipartedObjectsSize.Add(crtMultipartSize); } - if (crtRequest.Count > 0) + crtMultipart = new List<(string, string)>(); + crtMultipart.Add((id, json)); + crtMultipartSize = objSize; + } + multipartedObjects.Add(crtMultipart); + multipartedObjectsSize.Add(crtMultipartSize); + + // 2. Split multiparts into individual server requests of max size MAX_REQUEST_SIZE or max length MAX_MULTIPART_COUNT and send them + List> crtRequest = new(); + int crtRequestSize = 0; + int crtObjectCount = 0; + for (int i = 0; i < multipartedObjects.Count; i++) + { + List<(string, string)> multipart = multipartedObjects[i]; + int multipartSize = multipartedObjectsSize[i]; + if ( + crtRequestSize + multipartSize > MAX_REQUEST_SIZE || crtRequest.Count >= MAX_MULTIPART_COUNT + ) { - await UploadObjectsImpl(streamId, crtRequest); + await UploadObjectsImpl(streamId, crtRequest).ConfigureAwait(false); OnBatchSent?.Invoke(crtObjectCount, crtRequestSize); + crtRequest = new List>(); + crtRequestSize = 0; + crtObjectCount = 0; } + crtRequest.Add(multipart); + crtRequestSize += multipartSize; + crtObjectCount += multipart.Count; } - - private async Task UploadObjectsImpl(string streamId, List> multipartedObjects) + if (crtRequest.Count > 0) { - // Stopwatch sw = new Stopwatch(); sw.Start(); - - CancellationToken.ThrowIfCancellationRequested(); + await UploadObjectsImpl(streamId, crtRequest).ConfigureAwait(false); + OnBatchSent?.Invoke(crtObjectCount, crtRequestSize); + } + } - var message = new HttpRequestMessage() - { - RequestUri = new Uri($"/objects/{streamId}", UriKind.Relative), - Method = HttpMethod.Post - }; + public async Task UploadBlobs(string streamId, List<(string, string)> blobs) + { + CancellationToken.ThrowIfCancellationRequested(); + if (blobs.Count == 0) + return; - var multipart = new MultipartFormDataContent(); + var multipartFormDataContent = new MultipartFormDataContent(); + var streams = new List(); + foreach (var (id, filePath) in blobs) + { + var fileName = Path.GetFileName(filePath); + var stream = File.OpenRead(filePath); + streams.Add(stream); + var fsc = new StreamContent(stream); + var hash = id.Split(':')[1]; - int mpId = 0; - foreach (List<(string, string)> mpData in multipartedObjects) - { - mpId++; + multipartFormDataContent.Add(fsc, $"hash:{hash}", fileName); + } - var _ctBuilder = new StringBuilder("["); - for (int i = 0; i < mpData.Count; i++) - { - if (i > 0) - { - _ctBuilder.Append(","); - } - _ctBuilder.Append(mpData[i].Item2); - } - _ctBuilder.Append("]"); - String _ct = _ctBuilder.ToString(); - - if (CompressPayloads) - { - var content = new GzipContent(new StringContent(_ct, Encoding.UTF8)); - content.Headers.ContentType = new System.Net.Http.Headers.MediaTypeHeaderValue("application/gzip"); - multipart.Add(content, $"batch-{mpId}", $"batch-{mpId}"); - } - else - { - multipart.Add(new StringContent(_ct, Encoding.UTF8), $"batch-{mpId}", $"batch-{mpId}"); - } + var message = new HttpRequestMessage() + { + RequestUri = new Uri($"/api/stream/{streamId}/blob", UriKind.Relative), + Method = HttpMethod.Post, + Content = multipartFormDataContent + }; - } - message.Content = multipart; + try + { HttpResponseMessage response = null; - while (ShouldRetry(response)) - response = await Client.SendAsync(message, CancellationToken); + while (ShouldRetry(response)) //TODO: can we get rid of this now we have polly? + response = await Client.SendAsync(message, CancellationToken).ConfigureAwait(false); response.EnsureSuccessStatusCode(); - // Console.WriteLine($"ServerApi::UploadObjects({totalObjCount}) request in {sw.ElapsedMilliseconds / 1000.0} sec"); + foreach (var stream in streams) + stream.Dispose(); } - - public async Task UploadBlobs(string streamId, List<(string, string)> blobs) + catch (Exception ex) { - CancellationToken.ThrowIfCancellationRequested(); - if (blobs.Count == 0) return; + foreach (var stream in streams) + stream.Dispose(); + throw; + } + } - var multipartFormDataContent = new MultipartFormDataContent(); - var streams = new List(); - foreach (var (id, filePath) in blobs) + public async Task DownloadBlobs( + string streamId, + List blobIds, + CbBlobdDownloaded onBlobDownloaded + ) + { + foreach (var blobId in blobIds) + try { - var fileName = Path.GetFileName(filePath); - var stream = File.OpenRead(filePath); - streams.Add(stream); - var fsc = new StreamContent(stream); - var hash = id.Split(':')[1]; - - multipartFormDataContent.Add(fsc, $"hash:{hash}", fileName); + var blobMessage = new HttpRequestMessage() + { + RequestUri = new Uri($"api/stream/{streamId}/blob/{blobId}", UriKind.Relative), + Method = HttpMethod.Get + }; + + var response = await Client.SendAsync(blobMessage, CancellationToken).ConfigureAwait(false); + IEnumerable cdHeaderValues; + response.Content.Headers.TryGetValues("Content-Disposition", out cdHeaderValues); + + var cdHeader = cdHeaderValues.First(); + var fileName = cdHeader.Split(new[] { "filename=" }, StringSplitOptions.None)[1] + .TrimStart('"') + .TrimEnd('"'); + + string fileLocation = Path.Combine( + BlobStorageFolder, + $"{blobId.Substring(0, Blob.LocalHashPrefixLength)}-{fileName}" + ); + using (var fs = new FileStream(fileLocation, FileMode.OpenOrCreate)) + await response.Content.CopyToAsync(fs).ConfigureAwait(false); + + response.Dispose(); + onBlobDownloaded(); } - - var message = new HttpRequestMessage() + catch (Exception ex) { - RequestUri = new Uri($"/api/stream/{streamId}/blob", UriKind.Relative), - Method = HttpMethod.Post, - Content = multipartFormDataContent - }; + throw new Exception($"Failed to download blob {blobId}", ex); + } + } - try - { - HttpResponseMessage response = null; - while (ShouldRetry(response)) //TODO: can we get rid of this now we have polly? - response = await Client.SendAsync(message, CancellationToken); - response.EnsureSuccessStatusCode(); + private async Task DownloadObjectsImpl( + string streamId, + List objectIds, + CbObjectDownloaded onObjectCallback + ) + { + // Stopwatch sw = new Stopwatch(); sw.Start(); - foreach (var stream in streams) stream.Dispose(); - } - catch (Exception ex) + CancellationToken.ThrowIfCancellationRequested(); + + var childrenHttpMessage = new HttpRequestMessage() + { + RequestUri = new Uri($"/api/getobjects/{streamId}", UriKind.Relative), + Method = HttpMethod.Post + }; + + Dictionary postParameters = new(); + postParameters.Add("objects", JsonConvert.SerializeObject(objectIds)); + string serializedPayload = JsonConvert.SerializeObject(postParameters); + childrenHttpMessage.Content = new StringContent( + serializedPayload, + Encoding.UTF8, + "application/json" + ); + childrenHttpMessage.Headers.Add("Accept", "text/plain"); + + HttpResponseMessage childrenHttpResponse = null; + while (ShouldRetry(childrenHttpResponse)) + childrenHttpResponse = await Client + .SendAsync(childrenHttpMessage, HttpCompletionOption.ResponseHeadersRead, CancellationToken) + .ConfigureAwait(false); + childrenHttpResponse.EnsureSuccessStatusCode(); + + Stream childrenStream = await childrenHttpResponse.Content + .ReadAsStreamAsync() + .ConfigureAwait(false); + + using (childrenStream) + using (var reader = new StreamReader(childrenStream, Encoding.UTF8)) + { + string line; + while ((line = reader.ReadLine()) != null) { - foreach (var stream in streams) stream.Dispose(); - throw; + CancellationToken.ThrowIfCancellationRequested(); + + var pcs = line.Split(new char[] { '\t' }, 2); + onObjectCallback(pcs[0], pcs[1]); } } - public async Task> HasBlobs(string streamId, List blobIds) - { - CancellationToken.ThrowIfCancellationRequested(); + // Console.WriteLine($"ServerApi::DownloadObjects({objectIds.Count}) request in {sw.ElapsedMilliseconds / 1000.0} sec"); + } + + private async Task> HasObjectsImpl( + string streamId, + List objectIds + ) + { + CancellationToken.ThrowIfCancellationRequested(); + + // Stopwatch sw = new Stopwatch(); sw.Start(); + + string objectsPostParameter = JsonConvert.SerializeObject(objectIds); + var payload = new Dictionary() { { "objects", objectsPostParameter } }; + string serializedPayload = JsonConvert.SerializeObject(payload); + var uri = new Uri($"/api/diff/{streamId}", UriKind.Relative); + HttpResponseMessage response = null; + while (ShouldRetry(response)) + response = await Client + .PostAsync( + uri, + new StringContent(serializedPayload, Encoding.UTF8, "application/json"), + CancellationToken + ) + .ConfigureAwait(false); + response.EnsureSuccessStatusCode(); + + var hasObjectsJson = await response.Content.ReadAsStringAsync().ConfigureAwait(false); + Dictionary hasObjects = new(); + + JObject doc = JObject.Parse(hasObjectsJson); + foreach (KeyValuePair prop in doc) + hasObjects[prop.Key] = (bool)prop.Value; + + // Console.WriteLine($"ServerApi::HasObjects({objectIds.Count}) request in {sw.ElapsedMilliseconds / 1000.0} sec"); + + return hasObjects; + } - var payload = JsonConvert.SerializeObject(blobIds); - var uri = new Uri($"/api/stream/{streamId}/blob/diff", UriKind.Relative); + private async Task UploadObjectsImpl( + string streamId, + List> multipartedObjects + ) + { + // Stopwatch sw = new Stopwatch(); sw.Start(); - HttpResponseMessage response = null; - while (ShouldRetry(response)) //TODO: can we get rid of this now we have polly? - response = await Client.PostAsync(uri, new StringContent(payload, Encoding.UTF8, "application/json"), CancellationToken); - response.EnsureSuccessStatusCode(); + CancellationToken.ThrowIfCancellationRequested(); - var responseString = await response.Content.ReadAsStringAsync(); - var parsed = JsonConvert.DeserializeObject>(responseString); - return parsed; - } + var message = new HttpRequestMessage() + { + RequestUri = new Uri($"/objects/{streamId}", UriKind.Relative), + Method = HttpMethod.Post + }; + + var multipart = new MultipartFormDataContent(); - public async Task DownloadBlobs(string streamId, List blobIds, CbBlobdDownloaded onBlobDownloaded) + int mpId = 0; + foreach (List<(string, string)> mpData in multipartedObjects) { + mpId++; - foreach (var blobId in blobIds) + var _ctBuilder = new StringBuilder("["); + for (int i = 0; i < mpData.Count; i++) { - try - { - var blobMessage = new HttpRequestMessage() - { - RequestUri = new Uri($"api/stream/{streamId}/blob/{blobId}", UriKind.Relative), - Method = HttpMethod.Get, - }; - - var response = await Client.SendAsync(blobMessage, CancellationToken); - IEnumerable cdHeaderValues; - response.Content.Headers.TryGetValues("Content-Disposition", out cdHeaderValues); - - var cdHeader = cdHeaderValues.First(); - var fileName = cdHeader.Split(new[] { "filename=" }, StringSplitOptions.None)[1].TrimStart('"').TrimEnd('"'); - - string fileLocation = Path.Combine(BlobStorageFolder, $"{blobId.Substring(0, Blob.LocalHashPrefixLength)}-{fileName}"); - using (var fs = new FileStream(fileLocation, FileMode.OpenOrCreate)) - { - await response.Content.CopyToAsync(fs); - } - - response.Dispose(); - onBlobDownloaded(); - } - catch (Exception ex) - { - throw new Exception($"Failed to download blob {blobId}", ex); - } + if (i > 0) + _ctBuilder.Append(","); + _ctBuilder.Append(mpData[i].Item2); } + _ctBuilder.Append("]"); + string _ct = _ctBuilder.ToString(); + if (CompressPayloads) + { + var content = new GzipContent(new StringContent(_ct, Encoding.UTF8)); + content.Headers.ContentType = new MediaTypeHeaderValue("application/gzip"); + multipart.Add(content, $"batch-{mpId}", $"batch-{mpId}"); + } + else + { + multipart.Add(new StringContent(_ct, Encoding.UTF8), $"batch-{mpId}", $"batch-{mpId}"); + } } + message.Content = multipart; + HttpResponseMessage response = null; + while (ShouldRetry(response)) + response = await Client.SendAsync(message, CancellationToken).ConfigureAwait(false); + response.EnsureSuccessStatusCode(); - //TODO: can we get rid of this now we have polly? - private bool ShouldRetry(HttpResponseMessage serverResponse) - { - if (serverResponse == null) - return true; - if (!RETRY_CODES.Contains((int)serverResponse.StatusCode)) - return false; - if (RetriedCount >= RETRY_COUNT) - return false; - RetriedCount += 1; - return true; - } + // Console.WriteLine($"ServerApi::UploadObjects({totalObjCount}) request in {sw.ElapsedMilliseconds / 1000.0} sec"); + } - public void Dispose() - { - Client.Dispose(); - } + public async Task> HasBlobs(string streamId, List blobIds) + { + CancellationToken.ThrowIfCancellationRequested(); + + var payload = JsonConvert.SerializeObject(blobIds); + var uri = new Uri($"/api/stream/{streamId}/blob/diff", UriKind.Relative); + + HttpResponseMessage response = null; + while (ShouldRetry(response)) //TODO: can we get rid of this now we have polly? + response = await Client + .PostAsync( + uri, + new StringContent(payload, Encoding.UTF8, "application/json"), + CancellationToken + ) + .ConfigureAwait(false); + response.EnsureSuccessStatusCode(); + + var responseString = await response.Content.ReadAsStringAsync().ConfigureAwait(false); + var parsed = JsonConvert.DeserializeObject>(responseString); + return parsed; + } - private class BlobUploadResult - { - public List uploadResults { get; set; } - } + //TODO: can we get rid of this now we have polly? + private bool ShouldRetry(HttpResponseMessage serverResponse) + { + if (serverResponse == null) + return true; + if (!RETRY_CODES.Contains((int)serverResponse.StatusCode)) + return false; + if (RetriedCount >= RETRY_COUNT) + return false; + RetriedCount += 1; + return true; + } - private class BlobUploadResultItem - { - public string blobId { get; set; } - public string formKey { get; set; } - public string fileName { get; set; } - } + private class BlobUploadResult + { + public List uploadResults { get; set; } } + private class BlobUploadResultItem + { + public string blobId { get; set; } + public string formKey { get; set; } + public string fileName { get; set; } + } } diff --git a/Core/Core/Transports/ServerV2.cs b/Core/Core/Transports/ServerV2.cs index f7b2892ef3..39245cedde 100644 --- a/Core/Core/Transports/ServerV2.cs +++ b/Core/Core/Transports/ServerV2.cs @@ -1,11 +1,10 @@ -using System; +using System; using System.Collections.Generic; using System.Diagnostics; using System.IO; using System.Linq; using System.Threading; using System.Threading.Tasks; -using Serilog; using Speckle.Core.Credentials; using Speckle.Core.Helpers; using Speckle.Core.Logging; @@ -13,155 +12,157 @@ using Speckle.Core.Transports.ServerUtils; using Speckle.Newtonsoft.Json.Linq; -namespace Speckle.Core.Transports +namespace Speckle.Core.Transports; + +public class ServerTransport : ServerTransportV2 { - public class ServerTransport : ServerTransportV2 - { - public ServerTransport( - Account account, - string streamId, - int timeoutSeconds = 60, - string blobStorageFolder = null - ) : base(account, streamId, timeoutSeconds, blobStorageFolder) { } - } + public ServerTransport( + Account account, + string streamId, + int timeoutSeconds = 60, + string blobStorageFolder = null + ) + : base(account, streamId, timeoutSeconds, blobStorageFolder) { } +} - public class ServerTransportV2 : IDisposable, ICloneable, ITransport, IBlobCapableTransport - { - public string TransportName { get; set; } = "RemoteTransport"; +public class ServerTransportV2 : IDisposable, ICloneable, ITransport, IBlobCapableTransport +{ + private object ElapsedLock = new(); - public Dictionary TransportContext => - new Dictionary - { - { "name", TransportName }, - { "type", this.GetType().Name }, - { "streamId", StreamId }, - { "serverUrl", BaseUri }, - { "blobStorageFolder", BlobStorageFolder } - }; + private bool ErrorState = false; + private bool IsWriteComplete = false; - public CancellationToken CancellationToken { get; set; } - public Action OnProgressAction { get; set; } - public Action OnErrorAction { get; set; } + // TODO: make send buffer more flexible to accept blobs too + private List<(string, string)> SendBuffer = new(); + private object SendBufferLock = new(); + private Thread SendingThread = null; - public int TotalSentBytes { get; set; } = 0; - public int SavedObjectCount { get; private set; } = 0; + private bool ShouldSendThreadRun = false; - public Account Account { get; set; } - public string BaseUri { get; private set; } - public string StreamId { get; set; } + public ServerTransportV2( + Account account, + string streamId, + int timeoutSeconds = 60, + string blobStorageFolder = null + ) + { + Account = account; + CancellationToken = CancellationToken.None; + Initialize(account.serverInfo.url, streamId, account.token, timeoutSeconds); - public int TimeoutSeconds { get; set; } - private string AuthorizationToken { get; set; } + if (blobStorageFolder == null) + BlobStorageFolder = SpecklePathProvider.BlobStoragePath(); + Directory.CreateDirectory(BlobStorageFolder); + } - public ParallelServerApi Api { get; private set; } + public int TotalSentBytes { get; set; } = 0; - public string BlobStorageFolder { get; set; } + public Account Account { get; set; } + public string BaseUri { get; private set; } + public string StreamId { get; set; } - private object ElapsedLock = new object(); - public TimeSpan Elapsed { get; set; } = TimeSpan.Zero; + public int TimeoutSeconds { get; set; } + private string AuthorizationToken { get; set; } - private bool ShouldSendThreadRun = false; - private bool IsWriteComplete = false; - private Thread SendingThread = null; - private object SendBufferLock = new object(); + public ParallelServerApi Api { get; private set; } - // TODO: make send buffer more flexible to accept blobs too - private List<(string, string)> SendBuffer = new List<(string, string)>(); + public string BlobStorageFolder { get; set; } - private bool ErrorState = false; + public void SaveBlob(Blob obj) + { + if (string.IsNullOrEmpty(StreamId) || obj == null) + throw new Exception("Invalid parameters to SaveBlob"); + var hash = obj.GetFileHash(); - public ServerTransportV2( - Account account, - string streamId, - int timeoutSeconds = 60, - string blobStorageFolder = null - ) + lock (SendBufferLock) { - Account = account; - CancellationToken = CancellationToken.None; - Initialize(account.serverInfo.url, streamId, account.token, timeoutSeconds); - - if (blobStorageFolder == null) - { - BlobStorageFolder = SpecklePathProvider.BlobStoragePath(); - } - Directory.CreateDirectory(BlobStorageFolder); + if (ErrorState) + return; + SendBuffer.Add(($"blob:{hash}", obj.filePath)); } + } - private void Initialize( - string baseUri, - string streamId, - string authorizationToken, - int timeoutSeconds = 60 - ) + public object Clone() + { + return new ServerTransportV2(Account, StreamId) { - SpeckleLog.Logger.Information("Initializing a new Remote Transport for {baseUri}", baseUri); - - BaseUri = baseUri; - StreamId = streamId; - AuthorizationToken = authorizationToken; - TimeoutSeconds = timeoutSeconds; + OnErrorAction = OnErrorAction, + OnProgressAction = OnProgressAction, + CancellationToken = CancellationToken + }; + } - Api = new ParallelServerApi(BaseUri, AuthorizationToken, BlobStorageFolder, TimeoutSeconds); - Api.OnBatchSent = (num, size) => - { - OnProgressAction?.Invoke(TransportName, num); - TotalSentBytes += size; - SavedObjectCount += num; - }; + public void Dispose() + { + if (SendingThread != null) + { + ShouldSendThreadRun = false; + SendingThread.Join(); } + Api.Dispose(); + } - public async Task CopyObjectAndChildren( - string id, - ITransport targetTransport, - Action onTotalChildrenCountKnown = null - ) + public string TransportName { get; set; } = "RemoteTransport"; + + public Dictionary TransportContext => + new() { - if (String.IsNullOrEmpty(StreamId) || String.IsNullOrEmpty(id) || targetTransport == null) - throw new Exception("Invalid parameters to CopyObjectAndChildren"); + { "name", TransportName }, + { "type", GetType().Name }, + { "streamId", StreamId }, + { "serverUrl", BaseUri }, + { "blobStorageFolder", BlobStorageFolder } + }; + + public CancellationToken CancellationToken { get; set; } + public Action OnProgressAction { get; set; } + public Action OnErrorAction { get; set; } + public int SavedObjectCount { get; private set; } = 0; + public TimeSpan Elapsed { get; set; } = TimeSpan.Zero; + + public async Task CopyObjectAndChildren( + string id, + ITransport targetTransport, + Action onTotalChildrenCountKnown = null + ) + { + if (string.IsNullOrEmpty(StreamId) || string.IsNullOrEmpty(id) || targetTransport == null) + throw new Exception("Invalid parameters to CopyObjectAndChildren"); - if (CancellationToken.IsCancellationRequested) - return null; + if (CancellationToken.IsCancellationRequested) + return null; - using ( - ParallelServerApi api = new ParallelServerApi( - BaseUri, - AuthorizationToken, - BlobStorageFolder, - TimeoutSeconds - ) - ) + using ( + ParallelServerApi api = new(BaseUri, AuthorizationToken, BlobStorageFolder, TimeoutSeconds) + ) + { + var stopwatch = Stopwatch.StartNew(); + api.CancellationToken = CancellationToken; + try { - var stopwatch = Stopwatch.StartNew(); - api.CancellationToken = CancellationToken; - try - { - string rootObjectJson = await api.DownloadSingleObject(StreamId, id); - List allIds = ParseChildrenIds(rootObjectJson); + string rootObjectJson = await api.DownloadSingleObject(StreamId, id).ConfigureAwait(false); + List allIds = ParseChildrenIds(rootObjectJson); - List childrenIds = allIds.Where(id => !id.Contains("blob:")).ToList(); - List blobIds = allIds - .Where(id => id.Contains("blob:")) - .Select(id => id.Remove(0, 5)) - .ToList(); + List childrenIds = allIds.Where(id => !id.Contains("blob:")).ToList(); + List blobIds = allIds + .Where(id => id.Contains("blob:")) + .Select(id => id.Remove(0, 5)) + .ToList(); - onTotalChildrenCountKnown?.Invoke(allIds.Count); + onTotalChildrenCountKnown?.Invoke(allIds.Count); - // - // Objects download - // + // + // Objects download + // - // Check which children are not already in the local transport - var childrenFoundMap = await targetTransport.HasObjects(childrenIds); - List newChildrenIds = new List( - from objId in childrenFoundMap.Keys - where !childrenFoundMap[objId] - select objId - ); + // Check which children are not already in the local transport + var childrenFoundMap = await targetTransport.HasObjects(childrenIds).ConfigureAwait(false); + List newChildrenIds = + new(from objId in childrenFoundMap.Keys where !childrenFoundMap[objId] select objId); - targetTransport.BeginWrite(); + targetTransport.BeginWrite(); - await api.DownloadObjects( + await api.DownloadObjects( StreamId, newChildrenIds, (string id, string json) => @@ -171,276 +172,250 @@ await api.DownloadObjects( OnProgressAction?.Invoke(TransportName, 1); stopwatch.Start(); } - ); - - // pausing until writing to the target transport - stopwatch.Stop(); - targetTransport.SaveObject(id, rootObjectJson); - - await targetTransport.WriteComplete(); - targetTransport.EndWrite(); - stopwatch.Start(); - - // - // Blobs download - // - var localBlobTrimmedHashes = Directory - .GetFiles(BlobStorageFolder) - .Select(fileName => fileName.Split(Path.DirectorySeparatorChar).Last()) - .Where(fileName => fileName.Length > 10) - .Select(fileName => fileName.Substring(0, Blob.LocalHashPrefixLength)) - .ToList(); - - var newBlobIds = blobIds - .Where( - id => !localBlobTrimmedHashes.Contains(id.Substring(0, Blob.LocalHashPrefixLength)) - ) - .ToList(); - - await api.DownloadBlobs( + ) + .ConfigureAwait(false); + + // pausing until writing to the target transport + stopwatch.Stop(); + targetTransport.SaveObject(id, rootObjectJson); + + await targetTransport.WriteComplete().ConfigureAwait(false); + targetTransport.EndWrite(); + stopwatch.Start(); + + // + // Blobs download + // + var localBlobTrimmedHashes = Directory + .GetFiles(BlobStorageFolder) + .Select(fileName => fileName.Split(Path.DirectorySeparatorChar).Last()) + .Where(fileName => fileName.Length > 10) + .Select(fileName => fileName.Substring(0, Blob.LocalHashPrefixLength)) + .ToList(); + + var newBlobIds = blobIds + .Where( + id => !localBlobTrimmedHashes.Contains(id.Substring(0, Blob.LocalHashPrefixLength)) + ) + .ToList(); + + await api.DownloadBlobs( StreamId, newBlobIds, () => { OnProgressAction?.Invoke(TransportName, 1); } - ); + ) + .ConfigureAwait(false); - stopwatch.Stop(); - Elapsed += stopwatch.Elapsed; - return rootObjectJson; - } - catch (Exception e) - { - OnErrorAction?.Invoke(TransportName, e); - return null; - } + stopwatch.Stop(); + Elapsed += stopwatch.Elapsed; + return rootObjectJson; } - } - - public string GetObject(string id) - { - if (CancellationToken.IsCancellationRequested) + catch (Exception e) { + OnErrorAction?.Invoke(TransportName, e); return null; } - var stopwatch = Stopwatch.StartNew(); - var result = Api.DownloadSingleObject(StreamId, id).Result; - stopwatch.Stop(); - Elapsed += stopwatch.Elapsed; - return result; } + } - public async Task> HasObjects(List objectIds) - { - if (String.IsNullOrEmpty(StreamId) || objectIds == null) - throw new Exception("Invalid parameters to HasObjects"); - return await Api.HasObjects(StreamId, objectIds); - } + public string GetObject(string id) + { + if (CancellationToken.IsCancellationRequested) + return null; + var stopwatch = Stopwatch.StartNew(); + var result = Api.DownloadSingleObject(StreamId, id).Result; + stopwatch.Stop(); + Elapsed += stopwatch.Elapsed; + return result; + } - public void SaveObject(string id, string serializedObject) - { - if (String.IsNullOrEmpty(StreamId) || String.IsNullOrEmpty(id) || serializedObject == null) - throw new Exception("Invalid parameters to SaveObject"); - lock (SendBufferLock) - { - if (ErrorState) - return; - SendBuffer.Add((id, serializedObject)); - IsWriteComplete = false; - } - } + public async Task> HasObjects(List objectIds) + { + if (string.IsNullOrEmpty(StreamId) || objectIds == null) + throw new Exception("Invalid parameters to HasObjects"); + return await Api.HasObjects(StreamId, objectIds).ConfigureAwait(false); + } - public void SaveObject(string id, ITransport sourceTransport) + public void SaveObject(string id, string serializedObject) + { + if (string.IsNullOrEmpty(StreamId) || string.IsNullOrEmpty(id) || serializedObject == null) + throw new Exception("Invalid parameters to SaveObject"); + lock (SendBufferLock) { - if (String.IsNullOrEmpty(StreamId) || String.IsNullOrEmpty(id) || sourceTransport == null) - throw new Exception("Invalid parameters to SaveObject"); - SaveObject(id, sourceTransport.GetObject(id)); + if (ErrorState) + return; + SendBuffer.Add((id, serializedObject)); + IsWriteComplete = false; } + } - public void SaveBlob(Blob obj) - { - if (String.IsNullOrEmpty(StreamId) || obj == null) - throw new Exception("Invalid parameters to SaveBlob"); - var hash = obj.GetFileHash(); + public void SaveObject(string id, ITransport sourceTransport) + { + if (string.IsNullOrEmpty(StreamId) || string.IsNullOrEmpty(id) || sourceTransport == null) + throw new Exception("Invalid parameters to SaveObject"); + SaveObject(id, sourceTransport.GetObject(id)); + } + + public void BeginWrite() + { + if (ShouldSendThreadRun || SendingThread != null) + throw new Exception("ServerTransport already sending"); + TotalSentBytes = 0; + SavedObjectCount = 0; + + ErrorState = false; + ShouldSendThreadRun = true; + SendingThread = new Thread(new ThreadStart(SendingThreadMain)); + SendingThread.Name = "ServerTransportSender"; + SendingThread.IsBackground = true; + SendingThread.Start(); + } + public async Task WriteComplete() + { + while (true) + { lock (SendBufferLock) - { - if (ErrorState) + if (IsWriteComplete || ErrorState) return; - SendBuffer.Add(($"blob:{hash}", obj.filePath)); - } + await Task.Delay(50).ConfigureAwait(false); } + } - public void BeginWrite() - { - if (ShouldSendThreadRun || SendingThread != null) - throw new Exception("ServerTransport already sending"); - TotalSentBytes = 0; - SavedObjectCount = 0; - - ErrorState = false; - ShouldSendThreadRun = true; - SendingThread = new Thread(new ThreadStart(SendingThreadMain)); - SendingThread.Name = "ServerTransportSender"; - SendingThread.IsBackground = true; - SendingThread.Start(); - } + public void EndWrite() + { + if (!ShouldSendThreadRun || SendingThread == null) + throw new Exception("ServerTransport not sending"); + ShouldSendThreadRun = false; + SendingThread.Join(); + SendingThread = null; + } - public async Task WriteComplete() - { - while (true) - { - lock (SendBufferLock) - { - if (IsWriteComplete || ErrorState) - return; - } - await Task.Delay(50); - } - } + private void Initialize( + string baseUri, + string streamId, + string authorizationToken, + int timeoutSeconds = 60 + ) + { + SpeckleLog.Logger.Information("Initializing a new Remote Transport for {baseUri}", baseUri); - public void EndWrite() - { - if (!ShouldSendThreadRun || SendingThread == null) - throw new Exception("ServerTransport not sending"); - ShouldSendThreadRun = false; - SendingThread.Join(); - SendingThread = null; - } + BaseUri = baseUri; + StreamId = streamId; + AuthorizationToken = authorizationToken; + TimeoutSeconds = timeoutSeconds; - public override string ToString() + Api = new ParallelServerApi(BaseUri, AuthorizationToken, BlobStorageFolder, TimeoutSeconds); + Api.OnBatchSent = (num, size) => { - return $"Server Transport @{Account.serverInfo.url}"; - } + OnProgressAction?.Invoke(TransportName, num); + TotalSentBytes += size; + SavedObjectCount += num; + }; + } + + public override string ToString() + { + return $"Server Transport @{Account.serverInfo.url}"; + } - public object Clone() + private List ParseChildrenIds(string json) + { + List childrenIds = new(); + try { - return new ServerTransportV2(Account, StreamId) - { - OnErrorAction = OnErrorAction, - OnProgressAction = OnProgressAction, - CancellationToken = CancellationToken - }; + JObject doc1 = JObject.Parse(json); + foreach (JToken prop in doc1["__closure"]) + childrenIds.Add(((JProperty)prop).Name); } - - private List ParseChildrenIds(string json) + catch { - List childrenIds = new List(); - try - { - JObject doc1 = JObject.Parse(json); - foreach (JToken prop in doc1["__closure"]) - childrenIds.Add(((JProperty)prop).Name); - } - catch - { - // empty children list if no __closure key is found - } - return childrenIds; + // empty children list if no __closure key is found } + return childrenIds; + } - private async void SendingThreadMain() + private async void SendingThreadMain() + { + while (true) { - while (true) - { - var stopwatch = Stopwatch.StartNew(); - if (!ShouldSendThreadRun || CancellationToken.IsCancellationRequested) - { - return; - } - List<(string, string)> buffer = null; - lock (SendBufferLock) + var stopwatch = Stopwatch.StartNew(); + if (!ShouldSendThreadRun || CancellationToken.IsCancellationRequested) + return; + List<(string, string)> buffer = null; + lock (SendBufferLock) + if (SendBuffer.Count > 0) { - if (SendBuffer.Count > 0) - { - buffer = SendBuffer; - SendBuffer = new List<(string, string)>(); - } - else - { - IsWriteComplete = true; - } + buffer = SendBuffer; + SendBuffer = new List<(string, string)>(); } - if (buffer == null) + else { - Thread.Sleep(100); - continue; + IsWriteComplete = true; } - try - { - List<(string, string)> bufferObjects = buffer - .Where(tuple => !tuple.Item1.Contains("blob")) - .ToList(); - List<(string, string)> bufferBlobs = buffer - .Where(tuple => tuple.Item1.Contains("blob")) - .ToList(); - List objectIds = new List(bufferObjects.Count); + if (buffer == null) + { + Thread.Sleep(100); + continue; + } + try + { + List<(string, string)> bufferObjects = buffer + .Where(tuple => !tuple.Item1.Contains("blob")) + .ToList(); + List<(string, string)> bufferBlobs = buffer + .Where(tuple => tuple.Item1.Contains("blob")) + .ToList(); - foreach ((string id, _) in bufferObjects) - { - if (id != "blob") - { - objectIds.Add(id); - } - } + List objectIds = new(bufferObjects.Count); - Dictionary hasObjects = await Api.HasObjects(StreamId, objectIds); - List<(string, string)> newObjects = new List<(string, string)>(); - foreach ((string id, object json) in bufferObjects) - { - if (!hasObjects[id]) - { - newObjects.Add((id, json as string)); - } - } + foreach ((string id, _) in bufferObjects) + if (id != "blob") + objectIds.Add(id); - // Report the objects that are already on the server - OnProgressAction?.Invoke(TransportName, hasObjects.Count - newObjects.Count); + Dictionary hasObjects = await Api.HasObjects(StreamId, objectIds) + .ConfigureAwait(false); + List<(string, string)> newObjects = new(); + foreach ((string id, object json) in bufferObjects) + if (!hasObjects[id]) + newObjects.Add((id, json as string)); - await Api.UploadObjects(StreamId, newObjects); + // Report the objects that are already on the server + OnProgressAction?.Invoke(TransportName, hasObjects.Count - newObjects.Count); - if (bufferBlobs.Count != 0) - { - var blobIdsToUpload = await Api.HasBlobs(StreamId, bufferBlobs); - var formattedIds = blobIdsToUpload.Select(id => $"blob:{id}").ToList(); - var newBlobs = bufferBlobs - .Where(tuple => formattedIds.IndexOf(tuple.Item1) != -1) - .ToList(); - if (newBlobs.Count != 0) - { - await Api.UploadBlobs(StreamId, newBlobs); - } - } - } - catch (Exception ex) + await Api.UploadObjects(StreamId, newObjects).ConfigureAwait(false); + + if (bufferBlobs.Count != 0) { - OnErrorAction?.Invoke(TransportName, ex); - lock (SendBufferLock) - { - SendBuffer.Clear(); - ErrorState = true; - } - return; + var blobIdsToUpload = await Api.HasBlobs(StreamId, bufferBlobs).ConfigureAwait(false); + var formattedIds = blobIdsToUpload.Select(id => $"blob:{id}").ToList(); + var newBlobs = bufferBlobs + .Where(tuple => formattedIds.IndexOf(tuple.Item1) != -1) + .ToList(); + if (newBlobs.Count != 0) + await Api.UploadBlobs(StreamId, newBlobs).ConfigureAwait(false); } - finally + } + catch (Exception ex) + { + OnErrorAction?.Invoke(TransportName, ex); + lock (SendBufferLock) { - stopwatch.Stop(); - lock (ElapsedLock) - Elapsed += stopwatch.Elapsed; + SendBuffer.Clear(); + ErrorState = true; } + return; } - } - - public void Dispose() - { - if (SendingThread != null) + finally { - ShouldSendThreadRun = false; - SendingThread.Join(); + stopwatch.Stop(); + lock (ElapsedLock) + Elapsed += stopwatch.Elapsed; } - Api.Dispose(); } } } diff --git a/Core/Core/Transports/Utilities.cs b/Core/Core/Transports/Utilities.cs index 71cdcd4d43..4c97bf5fc1 100644 --- a/Core/Core/Transports/Utilities.cs +++ b/Core/Core/Transports/Utilities.cs @@ -1,36 +1,27 @@ -using System; -using System.Collections.Generic; +using System; using System.Threading.Tasks; using Speckle.Core.Logging; -using Speckle.Core.Models; -namespace Speckle.Core.Transports +namespace Speckle.Core.Transports; + +public static class Utilities { - public static class Utilities + /// + /// Waits until the provided function returns true. + /// + /// + /// + /// + /// + public static async Task WaitUntil(Func condition, int frequency = 25, int timeout = -1) { - /// - /// Waits until the provided function returns true. - /// - /// - /// - /// - /// - public static async Task WaitUntil(Func condition, int frequency = 25, int timeout = -1) + var waitTask = Task.Run(async () => { - var waitTask = Task.Run(async () => - { - while (!condition()) - { - await Task.Delay(frequency); - } - }); - - if (waitTask != await Task.WhenAny(waitTask, - Task.Delay(timeout))) - { - throw new SpeckleException("Process timed out", new TimeoutException()); - } - } + while (!condition()) + await Task.Delay(frequency).ConfigureAwait(false); + }); + if (waitTask != await Task.WhenAny(waitTask, Task.Delay(timeout)).ConfigureAwait(false)) + throw new SpeckleException("Process timed out", new TimeoutException()); } } diff --git a/Core/Examples/Program.cs b/Core/Examples/Program.cs index 8b88c1a96e..3c3d37486b 100644 --- a/Core/Examples/Program.cs +++ b/Core/Examples/Program.cs @@ -1,18 +1,18 @@ -using System; +using System; using System.Collections.Concurrent; using System.Collections.Generic; using System.Diagnostics; using System.Linq; using System.Threading.Tasks; +using Serilog; +using Serilog.Context; using Speckle.Core.Api; using Speckle.Core.Credentials; +using Speckle.Core.Helpers; using Speckle.Core.Logging; using Speckle.Core.Models; using Speckle.Core.Transports; -using Speckle.Core.Helpers; -using Serilog; using Tests; -using Serilog.Context; /// /// Quick and dirty tests/examples of Speckle usage. @@ -51,7 +51,9 @@ static async Task Main(string[] args) 40759.5123 ); - var log = SpeckleLog.Logger.ForContext("currentSpeed", "warp 5").ForContext("captain", "Jean-Luc Picard"); + var log = SpeckleLog.Logger + .ForContext("currentSpeed", "warp 5") + .ForContext("captain", "Jean-Luc Picard"); SpeckleLog.Logger.Information( "We're traveling to {destination} our current speed is {currentSpeed}", @@ -140,41 +142,45 @@ public static async Task SendReceiveManyLargeObjects( Console.WriteLine("Done generating objects."); var myClient = new Client(AccountManager.GetDefaultAccount()); - var streamId = await myClient.StreamCreate( - new StreamCreateInput { name = "test", description = "this is a test" } - ); + var streamId = await myClient + .StreamCreate(new StreamCreateInput { name = "test", description = "this is a test" }) + .ConfigureAwait(false); var myServer = new ServerTransport(AccountManager.GetDefaultAccount(), streamId); var myObject = new Base(); myObject["items"] = objs; - var res = await Operations.Send( - myObject, - new List() { myServer }, - onProgressAction: dict => - { - Console.CursorLeft = 0; - Console.CursorTop = 2; + var res = await Operations + .Send( + myObject, + new List() { myServer }, + onProgressAction: dict => + { + Console.CursorLeft = 0; + Console.CursorTop = 2; - foreach (var kvp in dict) - Console.WriteLine($"<<<< {kvp.Key} progress: {kvp.Value} / {numObjects + 1}"); - } - ); + foreach (var kvp in dict) + Console.WriteLine($"<<<< {kvp.Key} progress: {kvp.Value} / {numObjects + 1}"); + } + ) + .ConfigureAwait(false); Console.WriteLine($"Big commit id is {res}"); - var receivedCommit = await Operations.Receive( - res, - remoteTransport: myServer, - onProgressAction: dict => - { - Console.CursorLeft = 0; - Console.CursorTop = 7; + var receivedCommit = await Operations + .Receive( + res, + remoteTransport: myServer, + onProgressAction: dict => + { + Console.CursorLeft = 0; + Console.CursorTop = 7; - foreach (var kvp in dict) - Console.WriteLine($"<<<< {kvp.Key} progress: {kvp.Value} / {numObjects + 1}"); - } - ); + foreach (var kvp in dict) + Console.WriteLine($"<<<< {kvp.Key} progress: {kvp.Value} / {numObjects + 1}"); + } + ) + .ConfigureAwait(false); Console.Clear(); Console.WriteLine($"Received big commit {res}"); @@ -200,19 +206,21 @@ public static async Task SendReceiveLargeSingleObjects(int numVertices = 100_000 } var myClient = new Client(AccountManager.GetDefaultAccount()); - var streamId = await myClient.StreamCreate( - new StreamCreateInput { name = "test", description = "this is a test" } - ); + var streamId = await myClient + .StreamCreate(new StreamCreateInput { name = "test", description = "this is a test" }) + .ConfigureAwait(false); var server = new ServerTransport(AccountManager.GetDefaultAccount(), streamId); - var res = await Operations.Send(myMesh, transports: new List() { server }); + var res = await Operations + .Send(myMesh, transports: new List() { server }) + .ConfigureAwait(false); ; Console.WriteLine($"Big mesh id is {res}"); var cp = res; - var pullMyMesh = await Operations.Receive(res); + var pullMyMesh = await Operations.Receive(res).ConfigureAwait(false); Console.WriteLine("Pulled back big mesh."); } @@ -282,22 +290,26 @@ public static async Task SendAndReceive(int numObjects = 3000) // Let's set up some fake server transports. var myClient = new Client(AccountManager.GetDefaultAccount()); - var streamId = await myClient.StreamCreate( - new StreamCreateInput { name = "test", description = "this is a test" } - ); + var streamId = await myClient + .StreamCreate(new StreamCreateInput { name = "test", description = "this is a test" }) + .ConfigureAwait(false); var firstServer = new ServerTransport(AccountManager.GetDefaultAccount(), streamId); var mySecondClient = new Client(AccountManager.GetDefaultAccount()); - var secondStreamId = await myClient.StreamCreate( - new StreamCreateInput { name = "test2", description = "this is a second test" } - ); + var secondStreamId = await myClient + .StreamCreate( + new StreamCreateInput { name = "test2", description = "this is a second test" } + ) + .ConfigureAwait(false); var secondServer = new ServerTransport(AccountManager.GetDefaultAccount(), secondStreamId); - var res = await Operations.Send( - @object: myRevision, - transports: new List() { firstServer, secondServer }, - onProgressAction: pushProgressAction - ); + var res = await Operations + .Send( + @object: myRevision, + transports: new List() { firstServer, secondServer }, + onProgressAction: pushProgressAction + ) + .ConfigureAwait(false); Console.Clear(); Console.CursorLeft = 0; @@ -311,18 +323,20 @@ public static async Task SendAndReceive(int numObjects = 3000) Console.Clear(); // Time for getting our revision object back. - var res2 = await Operations.Receive( - res, - remoteTransport: firstServer, - onProgressAction: dict => - { - Console.CursorLeft = 0; - Console.CursorTop = 0; + var res2 = await Operations + .Receive( + res, + remoteTransport: firstServer, + onProgressAction: dict => + { + Console.CursorLeft = 0; + Console.CursorTop = 0; - foreach (var kvp in dict) - Console.WriteLine($"<<<< {kvp.Key} progress: {kvp.Value} / {numObjects + 1}"); - } - ); + foreach (var kvp in dict) + Console.WriteLine($"<<<< {kvp.Key} progress: {kvp.Value} / {numObjects + 1}"); + } + ) + .ConfigureAwait(false); Console.Clear(); Console.WriteLine("Got those objects back"); @@ -354,7 +368,7 @@ public static async Task SqliteStressTest() } // waits for the buffer to be empty. - await transport.WriteComplete(); + await transport.WriteComplete().ConfigureAwait(false); var stopWatchStep = stopWatch.ElapsedMilliseconds; var objsPerSecond = (double)numObjects / (stopWatchStep / 1000); diff --git a/Core/Examples/Subscriptions.cs b/Core/Examples/Subscriptions.cs index 5262873508..3d80eb157e 100644 --- a/Core/Examples/Subscriptions.cs +++ b/Core/Examples/Subscriptions.cs @@ -1,4 +1,4 @@ -using System; +using System; using System.Threading.Tasks; using Newtonsoft.Json; using Speckle.Core.Api; @@ -15,7 +15,9 @@ public static async Task SubscriptionConnection() Console.WriteLine("Client created..."); - Console.WriteLine("Subscribing to stream created. On first created event will subscribe to that stream's updates."); + Console.WriteLine( + "Subscribing to stream created. On first created event will subscribe to that stream's updates." + ); myClient.SubscribeUserStreamAdded(); @@ -27,7 +29,8 @@ public static async Task SubscriptionConnection() { first = false; myClient.SubscribeStreamUpdated(e.id); - myClient.OnStreamUpdated += MyClient_OnStreamUpdated; ; + myClient.OnStreamUpdated += MyClient_OnStreamUpdated; + ; } Console.WriteLine("UserStreamCreated Fired"); diff --git a/Core/IntegrationTests/Api.cs b/Core/IntegrationTests/Api.cs index 54697c6b7a..9005984da2 100644 --- a/Core/IntegrationTests/Api.cs +++ b/Core/IntegrationTests/Api.cs @@ -1,401 +1,473 @@ -using Speckle.Core.Api; +using Speckle.Core.Api; using Speckle.Core.Credentials; -using Speckle.Core.Logging; using Speckle.Core.Models; using Speckle.Core.Transports; using Tests; -namespace TestsIntegration +namespace TestsIntegration; + +public class Api { - public class Api + private string branchId = ""; + private string branchName = ""; + private string commitId = ""; + + public Account firstUserAccount, + secondUserAccount; + + public Client myClient, + secondClient; + + public ServerTransport myServerTransport, + otherServerTransport; + + private string objectId = ""; + + private string streamId = ""; + + [OneTimeSetUp] + public async Task Setup() + { + firstUserAccount = await Fixtures.SeedUser().ConfigureAwait(false); + secondUserAccount = await Fixtures.SeedUser().ConfigureAwait(false); + + myClient = new Client(firstUserAccount); + secondClient = new Client(secondUserAccount); + myServerTransport = new ServerTransport(firstUserAccount, null); + myServerTransport.Api.CompressPayloads = false; + otherServerTransport = new ServerTransport(firstUserAccount, null); + otherServerTransport.Api.CompressPayloads = false; + } + + [Test] + public async Task ActiveUserGet() + { + var res = await myClient.ActiveUserGet().ConfigureAwait(false); + Assert.That(myClient.Account.userInfo.id, Is.EqualTo(res.id)); + } + + [Test] + public async Task OtherUserGet() + { + var res = await myClient.OtherUserGet(secondUserAccount.userInfo.id).ConfigureAwait(false); + Assert.That(secondUserAccount.userInfo.name, Is.EqualTo(res.name)); + } + + [Test] + public async Task UserSearch() + { + var res = await myClient.UserSearch(firstUserAccount.userInfo.email).ConfigureAwait(false); + Assert.That(res.Count, Is.EqualTo(1)); + Assert.That(firstUserAccount.userInfo.id, Is.EqualTo(res[0].id)); + } + + [Test] + public async Task ServerVersion() + { + var res = await myClient.GetServerVersion().ConfigureAwait(false); + + Assert.NotNull(res); + } + + [Test, Order(0)] + public async Task StreamCreate() + { + var res = await myClient + .StreamCreate(new StreamCreateInput { description = "Hello World", name = "Super Stream 01" }) + .ConfigureAwait(false); + + myServerTransport.StreamId = res; + otherServerTransport.StreamId = res; + Assert.NotNull(res); + streamId = res; + } + + [Test, Order(10)] + public async Task StreamsGet() + { + var res = await myClient.StreamsGet().ConfigureAwait(false); + + Assert.NotNull(res); + } + + [Test, Order(11)] + public async Task StreamGet() + { + var res = await myClient.StreamGet(streamId).ConfigureAwait(false); + + Assert.NotNull(res); + Assert.That(res.branches.items[0].name, Is.EqualTo("main")); + Assert.IsNotEmpty(res.collaborators); + } + + [Test, Order(12)] + public async Task StreamSearch() + { + var res = await myClient.StreamSearch(streamId).ConfigureAwait(false); + + Assert.NotNull(res); + } + + [Test, Order(20)] + public async Task StreamUpdate() + { + var res = await myClient + .StreamUpdate( + new StreamUpdateInput + { + id = streamId, + description = "Hello World", + name = "Super Stream 01 EDITED" + } + ) + .ConfigureAwait(false); + + Assert.IsTrue(res); + } + + [Test, Order(31)] + public async Task StreamInviteCreate() + { + var res = await myClient + .StreamInviteCreate( + new StreamInviteCreateInput + { + streamId = streamId, + email = secondUserAccount.userInfo.email, + message = "Whasssup!" + } + ) + .ConfigureAwait(false); + + Assert.IsTrue(res); + + Assert.ThrowsAsync( + async () => + await myClient + .StreamInviteCreate(new StreamInviteCreateInput { streamId = streamId }) + .ConfigureAwait(false) + ); + } + + [Test, Order(32)] + public async Task StreamInviteGet() + { + var invites = await secondClient.GetAllPendingInvites().ConfigureAwait(false); + + Assert.NotNull(invites); + } + + [Test, Order(33)] + public async Task StreamInviteUse() + { + var invites = await secondClient.GetAllPendingInvites().ConfigureAwait(false); + + var res = await secondClient + .StreamInviteUse(invites[0].streamId, invites[0].token) + .ConfigureAwait(false); + + Assert.IsTrue(res); + } + + [Test, Order(34)] + public async Task StreamUpdatePermission() + { + var res = await myClient + .StreamUpdatePermission( + new StreamPermissionInput + { + role = "stream:reviewer", + streamId = streamId, + userId = secondUserAccount.userInfo.id + } + ) + .ConfigureAwait(false); + + Assert.IsTrue(res); + } + + [Test, Order(40)] + public async Task StreamRevokePermission() + { + var res = await myClient + .StreamRevokePermission( + new StreamRevokePermissionInput + { + streamId = streamId, + userId = secondUserAccount.userInfo.id + } + ) + .ConfigureAwait(false); + + Assert.IsTrue(res); + } + + #region activity + + [Test, Order(51)] + public async Task StreamGetActivity() + { + var res = await myClient.StreamGetActivity(streamId).ConfigureAwait(false); + + Assert.NotNull(res); + //Assert.AreEqual(commitId, res[0].); + } + + #endregion + + #region comments + + [Test, Order(52)] + public async Task StreamGetComments() + { + var res = await myClient.StreamGetActivity(streamId).ConfigureAwait(false); + + Assert.NotNull(res); + //Assert.AreEqual(commitId, res[0].); + } + + #endregion + + [Test, Order(60)] + public async Task StreamDelete() + { + var res = await myClient.StreamDelete(streamId).ConfigureAwait(false); + Assert.IsTrue(res); + } + + #region branches + + [Test, Order(41)] + public async Task BranchCreate() + { + var res = await myClient + .BranchCreate( + new BranchCreateInput + { + streamId = streamId, + description = "this is a sample branch", + name = "sample-branch" + } + ) + .ConfigureAwait(false); + Assert.NotNull(res); + branchId = res; + branchName = "sample-branch"; + } + + [Test, Order(42)] + public async Task BranchGet() + { + var res = await myClient.BranchGet(streamId, branchName).ConfigureAwait(false); + + Assert.NotNull(res); + Assert.That(res.description, Is.EqualTo("this is a sample branch")); + } + + [Test, Order(43)] + public async Task StreamGetBranches() + { + var res = await myClient.StreamGetBranches(streamId).ConfigureAwait(false); + + Assert.NotNull(res); + // Branches are now returned in order of creation so 'main' should always go first. + Assert.That(res[0].name, Is.EqualTo("main")); + } + + #region commit + + [Test, Order(43)] + public async Task CommitCreate() { - public Account firstUserAccount, secondUserAccount; - - public Client myClient, secondClient; - public ServerTransport myServerTransport, otherServerTransport; - - private string streamId = ""; - private string branchId = ""; - private string branchName = ""; - private string commitId = ""; - private string objectId = ""; - - [OneTimeSetUp] - public async Task Setup() - { - firstUserAccount = await Fixtures.SeedUser(); - secondUserAccount = await Fixtures.SeedUser(); - - myClient = new Client(firstUserAccount); - secondClient = new Client(secondUserAccount); - myServerTransport = new ServerTransport(firstUserAccount, null); - myServerTransport.Api.CompressPayloads = false; - otherServerTransport = new ServerTransport(firstUserAccount, null); - otherServerTransport.Api.CompressPayloads = false; - } - - - [Test] - public async Task ActiveUserGet() - { - var res = await myClient.ActiveUserGet(); - Assert.That(myClient.Account.userInfo.id, Is.EqualTo(res.id)); - } - - [Test] - public async Task OtherUserGet() - { - var res = await myClient.OtherUserGet(secondUserAccount.userInfo.id); - Assert.That(secondUserAccount.userInfo.name, Is.EqualTo(res.name)); - - } - - [Test] - public async Task UserSearch() - { - var res = await myClient.UserSearch(firstUserAccount.userInfo.email); - Assert.That(res.Count, Is.EqualTo(1)); - Assert.That(firstUserAccount.userInfo.id, Is.EqualTo(res[0].id)); - } - - [Test] - public async Task ServerVersion() - { - var res = await myClient.GetServerVersion(); - - Assert.NotNull(res); - } - - [Test, Order(0)] - public async Task StreamCreate() - { - var res = await myClient.StreamCreate(new StreamCreateInput - { - description = "Hello World", - name = "Super Stream 01" - }); - - myServerTransport.StreamId = res; - otherServerTransport.StreamId = res; - Assert.NotNull(res); - streamId = res; - } - - [Test, Order(10)] - public async Task StreamsGet() - { - var res = await myClient.StreamsGet(); - - Assert.NotNull(res); - } - - [Test, Order(11)] - public async Task StreamGet() - { - var res = await myClient.StreamGet(streamId); - - Assert.NotNull(res); - Assert.That(res.branches.items[0].name, Is.EqualTo("main")); - Assert.IsNotEmpty(res.collaborators); - } - - [Test, Order(12)] - public async Task StreamSearch() - { - var res = await myClient.StreamSearch(streamId); - - Assert.NotNull(res); - } - - [Test, Order(20)] - public async Task StreamUpdate() - { - var res = await myClient.StreamUpdate(new StreamUpdateInput - { - id = streamId, - description = "Hello World", - name = "Super Stream 01 EDITED" - }); - - Assert.IsTrue(res); - } - - [Test, Order(31)] - public async Task StreamInviteCreate() - { - var res = await myClient.StreamInviteCreate( - new StreamInviteCreateInput { streamId = streamId, email = secondUserAccount.userInfo.email, message = "Whasssup!" } - ); - - Assert.IsTrue(res); - - Assert.ThrowsAsync(async () => - await myClient.StreamInviteCreate(new StreamInviteCreateInput { streamId = streamId })); - } - - [Test, Order(32)] - public async Task StreamInviteGet() - { - var invites = await secondClient.GetAllPendingInvites(); - - Assert.NotNull(invites); - } - - [Test, Order(33)] - public async Task StreamInviteUse() - { - var invites = await secondClient.GetAllPendingInvites(); - - var res = await secondClient.StreamInviteUse(invites[0].streamId, invites[0].token); - - Assert.IsTrue(res); - } - - [Test, Order(34)] - public async Task StreamUpdatePermission() - { - var res = await myClient.StreamUpdatePermission(new StreamPermissionInput - { - role = "stream:reviewer", - streamId = streamId, - userId = secondUserAccount.userInfo.id - }); - - Assert.IsTrue(res); - } - - [Test, Order(40)] - public async Task StreamRevokePermission() - { - var res = await myClient.StreamRevokePermission( - new StreamRevokePermissionInput { streamId = streamId, userId = secondUserAccount.userInfo.id } - ); - - Assert.IsTrue(res); - } - - #region branches - - [Test, Order(41)] - public async Task BranchCreate() - { - var res = await myClient.BranchCreate(new BranchCreateInput - { - streamId = streamId, - description = "this is a sample branch", - name = "sample-branch" - }); - Assert.NotNull(res); - branchId = res; - branchName = "sample-branch"; - } - - [Test, Order(42)] - public async Task BranchGet() - { - var res = await myClient.BranchGet(streamId, branchName); - - Assert.NotNull(res); - Assert.That(res.description, Is.EqualTo("this is a sample branch")); - } - - [Test, Order(43)] - public async Task StreamGetBranches() - { - var res = await myClient.StreamGetBranches(streamId); - - Assert.NotNull(res); - // Branches are now returned in order of creation so 'main' should always go first. - Assert.That(res[0].name, Is.EqualTo("main")); - } - - #region commit - - [Test, Order(43)] - public async Task CommitCreate() - { - var myObject = new Base(); - var ptsList = new List(); - for (int i = 0; i < 100; i++) - ptsList.Add(new Point(i, i, i)); - - myObject["@Points"] = ptsList; - - bool sendError = false; - objectId = await Operations.Send(myObject, new List() { myServerTransport }, false, disposeTransports: true, onErrorAction: (s, e) => { sendError = true; }); - Assert.IsFalse(sendError); - - var res = await myClient.CommitCreate(new CommitCreateInput - { - streamId = streamId, - branchName = branchName, - objectId = objectId, - message = "Fibber Fibbo", - sourceApplication = "Tests", - totalChildrenCount = 100, - }); - - Assert.NotNull(res); - commitId = res; - - var res2 = await myClient.CommitCreate(new CommitCreateInput - { - streamId = streamId, - branchName = branchName, - objectId = objectId, - message = "Fabber Fabbo", - sourceApplication = "Tests", - totalChildrenCount = 100, - parents = new List() { commitId } - }); - - Assert.NotNull(res2); - commitId = res2; - } - - [Test, Order(44)] - public async Task CommitGet() - { - var res = await myClient.CommitGet(streamId, commitId); - - Assert.NotNull(res); - Assert.That(res.message, Is.EqualTo("Fabber Fabbo")); - } - - [Test, Order(45)] - public async Task StreamGetCommits() - { - var res = await myClient.StreamGetCommits(streamId); - - Assert.NotNull(res); - Assert.That(res[0].id, Is.EqualTo(commitId)); - } - - #region object - - [Test, Order(45)] - public async Task ObjectGet() - { - var res = await myClient.ObjectGet(streamId, objectId); - - Assert.NotNull(res); - Assert.That(res.totalChildrenCount, Is.EqualTo(100)); - } - - #endregion - - [Test, Order(46)] - public async Task CommitUpdate() - { - var res = await myClient.CommitUpdate(new CommitUpdateInput - { - streamId = streamId, - id = commitId, - message = "DIM IS DA BEST" - }); - - Assert.IsTrue(res); - } - - [Test, Order(47)] - public async Task CommitReceived() - { - var res = await myClient.CommitReceived(new CommitReceivedInput { commitId = commitId, streamId = streamId, sourceApplication = "sharp-tests", message = "The test message" }); - - Assert.IsTrue(res); - } - - [Test, Order(48)] - public async Task CommitDelete() - { - var res = await myClient.CommitDelete(new CommitDeleteInput { id = commitId, streamId = streamId } - ); - Assert.IsTrue(res); - } - - #endregion - - - [Test, Order(49)] - public async Task BranchUpdate() - { - var res = await myClient.BranchUpdate(new BranchUpdateInput - { - streamId = streamId, - id = branchId, - name = "sample-branch EDITED" - }); - - Assert.IsTrue(res); - } - - [Test, Order(50)] - public async Task BranchDelete() - { - var res = await myClient.BranchDelete(new BranchDeleteInput { id = branchId, streamId = streamId } - ); - Assert.IsTrue(res); - } - - #endregion - - #region activity - - [Test, Order(51)] - public async Task StreamGetActivity() - { - var res = await myClient.StreamGetActivity(streamId); - - Assert.NotNull(res); - //Assert.AreEqual(commitId, res[0].); - } - #endregion - - #region comments - - [Test, Order(52)] - public async Task StreamGetComments() - { - var res = await myClient.StreamGetActivity(streamId); - - Assert.NotNull(res); - //Assert.AreEqual(commitId, res[0].); - } - #endregion - - #region send/receive bare - - //[Test, Order(60)] - //public async Task SendDetached() - //{ - // var myObject = new Base(); - // var ptsList = new List(); - // for (int i = 0; i < 100; i++) - // ptsList.Add(new Point(i, i, i)); - - // myObject["@Points"] = ptsList; - - // var otherTransport = new ServerTransport(firstUserAccount, null); - // otherTransport.StreamId = - - // objectId = await Operations.Send(myObject, new List() { myServerTransport }, disposeTransports: true); - //} - - //[Test, Order(61)] - //public async Task ReceiveAndCompose() - //{ - // var myObject = await Operations.Receive(objectId, myServerTransport); - // Assert.NotNull(myObject); - // Assert.AreEqual(100, ((List)myObject["@Points"]).Count); - //} - - #endregion - - [Test, Order(60)] - public async Task StreamDelete() - { - var res = await myClient.StreamDelete(streamId); - Assert.IsTrue(res); - } + var myObject = new Base(); + var ptsList = new List(); + for (int i = 0; i < 100; i++) + ptsList.Add(new Point(i, i, i)); + + myObject["@Points"] = ptsList; + + bool sendError = false; + objectId = await Operations + .Send( + myObject, + new List() { myServerTransport }, + false, + disposeTransports: true, + onErrorAction: (s, e) => + { + sendError = true; + } + ) + .ConfigureAwait(false); + Assert.IsFalse(sendError); + + var res = await myClient + .CommitCreate( + new CommitCreateInput + { + streamId = streamId, + branchName = branchName, + objectId = objectId, + message = "Fibber Fibbo", + sourceApplication = "Tests", + totalChildrenCount = 100 + } + ) + .ConfigureAwait(false); + + Assert.NotNull(res); + commitId = res; + + var res2 = await myClient + .CommitCreate( + new CommitCreateInput + { + streamId = streamId, + branchName = branchName, + objectId = objectId, + message = "Fabber Fabbo", + sourceApplication = "Tests", + totalChildrenCount = 100, + parents = new List() { commitId } + } + ) + .ConfigureAwait(false); + + Assert.NotNull(res2); + commitId = res2; + } + + [Test, Order(44)] + public async Task CommitGet() + { + var res = await myClient.CommitGet(streamId, commitId).ConfigureAwait(false); + + Assert.NotNull(res); + Assert.That(res.message, Is.EqualTo("Fabber Fabbo")); + } + + [Test, Order(45)] + public async Task StreamGetCommits() + { + var res = await myClient.StreamGetCommits(streamId).ConfigureAwait(false); + + Assert.NotNull(res); + Assert.That(res[0].id, Is.EqualTo(commitId)); } + + #region object + + [Test, Order(45)] + public async Task ObjectGet() + { + var res = await myClient.ObjectGet(streamId, objectId).ConfigureAwait(false); + + Assert.NotNull(res); + Assert.That(res.totalChildrenCount, Is.EqualTo(100)); + } + + #endregion + + [Test, Order(46)] + public async Task CommitUpdate() + { + var res = await myClient + .CommitUpdate( + new CommitUpdateInput + { + streamId = streamId, + id = commitId, + message = "DIM IS DA BEST" + } + ) + .ConfigureAwait(false); + + Assert.IsTrue(res); + } + + [Test, Order(47)] + public async Task CommitReceived() + { + var res = await myClient + .CommitReceived( + new CommitReceivedInput + { + commitId = commitId, + streamId = streamId, + sourceApplication = "sharp-tests", + message = "The test message" + } + ) + .ConfigureAwait(false); + + Assert.IsTrue(res); + } + + [Test, Order(48)] + public async Task CommitDelete() + { + var res = await myClient + .CommitDelete(new CommitDeleteInput { id = commitId, streamId = streamId }) + .ConfigureAwait(false); + Assert.IsTrue(res); + } + + #endregion + + + [Test, Order(49)] + public async Task BranchUpdate() + { + var res = await myClient + .BranchUpdate( + new BranchUpdateInput + { + streamId = streamId, + id = branchId, + name = "sample-branch EDITED" + } + ) + .ConfigureAwait(false); + + Assert.IsTrue(res); + } + + [Test, Order(50)] + public async Task BranchDelete() + { + var res = await myClient + .BranchDelete(new BranchDeleteInput { id = branchId, streamId = streamId }) + .ConfigureAwait(false); + Assert.IsTrue(res); + } + + #endregion + + #region send/receive bare + + //[Test, Order(60)] + //public async Task SendDetached() + //{ + // var myObject = new Base(); + // var ptsList = new List(); + // for (int i = 0; i < 100; i++) + // ptsList.Add(new Point(i, i, i)); + + // myObject["@Points"] = ptsList; + + // var otherTransport = new ServerTransport(firstUserAccount, null); + // otherTransport.StreamId = + + // objectId = await Operations.Send(myObject, new List() { myServerTransport }, disposeTransports: true); + //} + + //[Test, Order(61)] + //public async Task ReceiveAndCompose() + //{ + // var myObject = await Operations.Receive(objectId, myServerTransport); + // Assert.NotNull(myObject); + // Assert.AreEqual(100, ((List)myObject["@Points"]).Count); + //} + + #endregion } diff --git a/Core/IntegrationTests/Fixtures.cs b/Core/IntegrationTests/Fixtures.cs index 0c1ddfb50e..0ee0f07465 100644 --- a/Core/IntegrationTests/Fixtures.cs +++ b/Core/IntegrationTests/Fixtures.cs @@ -1,57 +1,51 @@ -using System.Net.Mime; +using System.Net.Mime; using System.Text; using System.Web; using Newtonsoft.Json; +using Serilog.Events; using Speckle.Core.Api; using Speckle.Core.Credentials; using Speckle.Core.Logging; using Speckle.Core.Models; -namespace TestsIntegration +namespace TestsIntegration; + +[SetUpFixture] +public class SetUp { - [SetUpFixture] - public class SetUp + [OneTimeSetUp] + public void BeforeAll() { - [OneTimeSetUp] - public void BeforeAll() - { - SpeckleLog.Initialize( - "Core", - "Testing", - new SpeckleLogConfiguration( - Serilog.Events.LogEventLevel.Debug, - logToConsole: true, - logToFile: false, - logToSeq: false - ) - ); - SpeckleLog.Logger.Information("Initialized logger for testing"); - } + SpeckleLog.Initialize( + "Core", + "Testing", + new SpeckleLogConfiguration(LogEventLevel.Debug, true, logToFile: false, logToSeq: false) + ); + SpeckleLog.Logger.Information("Initialized logger for testing"); } +} - public static class Fixtures - { - public static readonly ServerInfo Server = new ServerInfo - { - url = "http://localhost:3000", - name = "Docker Server" - }; +public static class Fixtures +{ + public static readonly ServerInfo Server = + new() { url = "http://localhost:3000", name = "Docker Server" }; - public static async Task SeedUser() - { - var seed = Guid.NewGuid().ToString().ToLower(); - var user = new Dictionary(); - user["email"] = $"{seed.Substring(0, 7)}@acme.com"; - user["password"] = "12ABC3456789DEF0GHO"; - user["name"] = $"{seed.Substring(0, 5)} Name"; + public static async Task SeedUser() + { + var seed = Guid.NewGuid().ToString().ToLower(); + var user = new Dictionary(); + user["email"] = $"{seed.Substring(0, 7)}@acme.com"; + user["password"] = "12ABC3456789DEF0GHO"; + user["name"] = $"{seed.Substring(0, 5)} Name"; - var httpClient = new HttpClient(new HttpClientHandler() { AllowAutoRedirect = false }); - httpClient.BaseAddress = new Uri(Server.url); + var httpClient = new HttpClient(new HttpClientHandler() { AllowAutoRedirect = false }); + httpClient.BaseAddress = new Uri(Server.url); - string redirectUrl; - try - { - var response = await httpClient.PostAsync( + string redirectUrl; + try + { + var response = await httpClient + .PostAsync( "/auth/local/register?challenge=challengingchallenge", // $"{Server.url}/auth/local/register?challenge=challengingchallenge", new StringContent( @@ -59,98 +53,103 @@ public static async Task SeedUser() Encoding.UTF8, MediaTypeNames.Application.Json ) - ); - redirectUrl = response.Headers.Location.AbsoluteUri; - } - catch (Exception e) - { - throw new Exception($"Cannot seed user on the server {Server.url}", e); - } - - var uri = new Uri(redirectUrl); - var query = HttpUtility.ParseQueryString(uri.Query); - - var accessCode = query["access_code"] ?? throw new Exception("Redirect Uri has no 'access_code'."); - var tokenBody = new Dictionary() - { - ["accessCode"] = accessCode, - ["appId"] = "spklwebapp", - ["appSecret"] = "spklwebapp", - ["challenge"] = "challengingchallenge" - }; + ) + .ConfigureAwait(false); + redirectUrl = response.Headers.Location.AbsoluteUri; + } + catch (Exception e) + { + throw new Exception($"Cannot seed user on the server {Server.url}", e); + } + + var uri = new Uri(redirectUrl); + var query = HttpUtility.ParseQueryString(uri.Query); + + var accessCode = + query["access_code"] ?? throw new Exception("Redirect Uri has no 'access_code'."); + var tokenBody = new Dictionary() + { + ["accessCode"] = accessCode, + ["appId"] = "spklwebapp", + ["appSecret"] = "spklwebapp", + ["challenge"] = "challengingchallenge" + }; - var tokenResponse = await httpClient.PostAsync( + var tokenResponse = await httpClient + .PostAsync( "/auth/token", new StringContent( JsonConvert.SerializeObject(tokenBody), Encoding.UTF8, MediaTypeNames.Application.Json ) - ); - var deserialised = JsonConvert.DeserializeObject>( - await tokenResponse.Content.ReadAsStringAsync() - ); + ) + .ConfigureAwait(false); + var deserialised = JsonConvert.DeserializeObject>( + await tokenResponse.Content.ReadAsStringAsync().ConfigureAwait(false) + ); - var acc = new Account + var acc = new Account + { + token = deserialised["token"], + userInfo = new UserInfo { - token = deserialised["token"], - userInfo = new UserInfo - { - id = user["name"], - email = user["email"], - name = user["name"] - }, - serverInfo = Server - }; - var client = new Client(acc); - - var user1 = await client.ActiveUserGet(); - acc.userInfo.id = user1.id; - return acc; - } + id = user["name"], + email = user["email"], + name = user["name"] + }, + serverInfo = Server + }; + var client = new Client(acc); - public static Base GenerateSimpleObject() - { - var @base = new Base(); - @base["foo"] = "foo"; - @base["bar"] = "bar"; - @base["baz"] = "baz"; - @base["now"] = DateTime.Now.ToString(); + var user1 = await client.ActiveUserGet().ConfigureAwait(false); + acc.userInfo.id = user1.id; + return acc; + } - return @base; - } + public static Base GenerateSimpleObject() + { + var @base = new Base(); + @base["foo"] = "foo"; + @base["bar"] = "bar"; + @base["baz"] = "baz"; + @base["now"] = DateTime.Now.ToString(); - public static Base GenerateNestedObject() - { - var @base = new Base(); - @base["foo"] = "foo"; - @base["bar"] = "bar"; - @base["@baz"] = new Base(); - ((Base)@base["@baz"])["mux"] = "mux"; - ((Base)@base["@baz"])["qux"] = "qux"; - - return @base; - } + return @base; + } - public static Blob[] GenerateThreeBlobs() => - new Blob[] - { - GenerateBlob("blob 1 data"), - GenerateBlob("blob 2 data"), - GenerateBlob("blob 3 data"), - }; + public static Base GenerateNestedObject() + { + var @base = new Base(); + @base["foo"] = "foo"; + @base["bar"] = "bar"; + @base["@baz"] = new Base(); + ((Base)@base["@baz"])["mux"] = "mux"; + ((Base)@base["@baz"])["qux"] = "qux"; + + return @base; + } - private static Blob GenerateBlob(string content) + public static Blob[] GenerateThreeBlobs() + { + return new Blob[] { - var filePath = Path.GetTempFileName(); - File.WriteAllText(filePath, content); - return new Blob(filePath); - } + GenerateBlob("blob 1 data"), + GenerateBlob("blob 2 data"), + GenerateBlob("blob 3 data") + }; } - public class UserIdResponse + private static Blob GenerateBlob(string content) { - public string userId { get; set; } - public string apiToken { get; set; } + var filePath = Path.GetTempFileName(); + File.WriteAllText(filePath, content); + return new Blob(filePath); } } + +public class UserIdResponse +{ + public string userId { get; set; } + public string apiToken { get; set; } +} diff --git a/Core/IntegrationTests/GraphQLCLient.cs b/Core/IntegrationTests/GraphQLCLient.cs index 8210866712..031968ae04 100644 --- a/Core/IntegrationTests/GraphQLCLient.cs +++ b/Core/IntegrationTests/GraphQLCLient.cs @@ -1,28 +1,28 @@ +using GraphQL; using Speckle.Core.Api; using Speckle.Core.Credentials; -using GraphQL; -using System.Diagnostics; -namespace TestsIntegration +namespace TestsIntegration; + +public class GraphQLClientTests { - public class GraphQLClientTests - { - private Account _account; - private Client _client; + private Account _account; + private Client _client; - [OneTimeSetUp] - public async Task Setup() - { - _account = await Fixtures.SeedUser(); - _client = new Client(_account); - } + [OneTimeSetUp] + public async Task Setup() + { + _account = await Fixtures.SeedUser().ConfigureAwait(false); + _client = new Client(_account); + } - [Test] - public async Task ThrowsForbiddenException() - { - Assert.ThrowsAsync>>( - async () => - await _client.ExecuteGraphQLRequest>( + [Test] + public async Task ThrowsForbiddenException() + { + Assert.ThrowsAsync>>( + async () => + await _client + .ExecuteGraphQLRequest>( new GraphQLRequest { Query = @@ -34,7 +34,7 @@ await _client.ExecuteGraphQLRequest>( }, CancellationToken.None ) - ); - } + .ConfigureAwait(false) + ); } } diff --git a/Core/IntegrationTests/ServerTransportTests.cs b/Core/IntegrationTests/ServerTransportTests.cs index ff6db06181..3248760cc4 100644 --- a/Core/IntegrationTests/ServerTransportTests.cs +++ b/Core/IntegrationTests/ServerTransportTests.cs @@ -1,174 +1,181 @@ -using Speckle.Core.Api; +using Speckle.Core.Api; using Speckle.Core.Credentials; using Speckle.Core.Helpers; using Speckle.Core.Models; using Speckle.Core.Transports; -namespace TestsIntegration +namespace TestsIntegration; + +public class ServerTransportTests { - public class ServerTransportTests + private string _basePath; + public Account account; + public Client client; + public string streamId; + public ServerTransport transport; + + [OneTimeSetUp] + public async Task InitialSetup() { - public Account account; - public Client client; - public ServerTransport transport; - public string streamId; - private string _basePath; - - [OneTimeSetUp] - public async Task InitialSetup() - { - _basePath = Path.Join(Path.GetTempPath(), "speckleTest"); - - CleanData(); - Directory.CreateDirectory(_basePath); - SpecklePathProvider.OverrideApplicationDataPath(_basePath); + _basePath = Path.Join(Path.GetTempPath(), "speckleTest"); - account = await Fixtures.SeedUser(); - client = new Client(account); - streamId = client.StreamCreate(new StreamCreateInput - { - description = "Flobber", - name = "Blobber" - }).Result; - - } + CleanData(); + Directory.CreateDirectory(_basePath); + SpecklePathProvider.OverrideApplicationDataPath(_basePath); - [SetUp] - public void Setup() - { - CleanData(); - // need to recreate the server transport object for each test - // to make sure all folders are properly initialized - transport = new ServerTransport(account, streamId); - } + account = await Fixtures.SeedUser().ConfigureAwait(false); + client = new Client(account); + streamId = client + .StreamCreate(new StreamCreateInput { description = "Flobber", name = "Blobber" }) + .Result; + } - [TearDown] - public void TearDown() - { - CleanData(); - } + [SetUp] + public void Setup() + { + CleanData(); + // need to recreate the server transport object for each test + // to make sure all folders are properly initialized + transport = new ServerTransport(account, streamId); + } - private void CleanData() - { - try - { - Directory.Delete(_basePath, true); - } - catch (DirectoryNotFoundException) - { - - } - } + [TearDown] + public void TearDown() + { + CleanData(); + } - [Test] - public async Task SendObject() + private void CleanData() + { + try { - var myObject = Fixtures.GenerateNestedObject(); - - var objectId = await Operations.Send(myObject, new List { transport }); - - var test = objectId; - Assert.IsNotNull(test); + Directory.Delete(_basePath, true); } + catch (DirectoryNotFoundException) { } + } - [Test] - public async Task SendAndReceiveObjectWithBlobs() - { - var myObject = Fixtures.GenerateSimpleObject(); - myObject["blobs"] = Fixtures.GenerateThreeBlobs(); - - var sentObjectId = await Operations.Send(myObject, new List { transport }); - - // NOTE: used to debug diffing - // await Operations.Send(myObject, new List { transport }); - - var receivedObject = await Operations.Receive(sentObjectId, transport); + [Test] + public async Task SendObject() + { + var myObject = Fixtures.GenerateNestedObject(); - var allFiles = Directory.GetFiles(transport.BlobStorageFolder) - .Select(fp => fp.Split(Path.DirectorySeparatorChar).Last()).ToList(); - var blobPaths = allFiles - .Where(fp => fp.Length > Blob.LocalHashPrefixLength) // excludes things like .DS_store - .ToList(); + var objectId = await Operations + .Send(myObject, new List { transport }) + .ConfigureAwait(false); - // Check that there are three downloaded blobs! - Assert.That(blobPaths.Count, Is.EqualTo(3)); + var test = objectId; + Assert.IsNotNull(test); + } - var blobs = (receivedObject["blobs"] as List).Cast().ToList(); - // Check that we have three blobs - Assert.IsTrue(blobs.Count == 3); - // Check that received blobs point to local path (where they were received) - Assert.IsTrue(blobs[0].filePath.Contains(transport.BlobStorageFolder)); - Assert.IsTrue(blobs[1].filePath.Contains(transport.BlobStorageFolder)); - Assert.IsTrue(blobs[2].filePath.Contains(transport.BlobStorageFolder)); - } - [Test] - public async Task SendWithBlobsWithoutSQLiteSendCache() - { - var myObject = Fixtures.GenerateSimpleObject(); - myObject["blobs"] = Fixtures.GenerateThreeBlobs(); - - var memTransport = new MemoryTransport(); - var sentObjectId = await Operations.Send( - myObject, - new List { transport, memTransport }, - useDefaultCache: false - ); - - var receivedObject = await Operations.Receive(sentObjectId, transport); - - var allFiles = Directory.GetFiles(transport.BlobStorageFolder) - .Select(fp => fp.Split(Path.DirectorySeparatorChar).Last()).ToList(); - var blobPaths = allFiles - .Where(fp => fp.Length > Blob.LocalHashPrefixLength) // excludes things like .DS_store - .ToList(); - - // Check that there are three downloaded blobs! - Assert.That(blobPaths.Count, Is.EqualTo(3)); - - var blobs = (receivedObject["blobs"] as List).Cast().ToList(); - // Check that we have three blobs - Assert.IsTrue(blobs.Count == 3); - // Check that received blobs point to local path (where they were received) - Assert.IsTrue(blobs[0].filePath.Contains(transport.BlobStorageFolder)); - Assert.IsTrue(blobs[1].filePath.Contains(transport.BlobStorageFolder)); - Assert.IsTrue(blobs[2].filePath.Contains(transport.BlobStorageFolder)); - } + [Test] + public async Task SendAndReceiveObjectWithBlobs() + { + var myObject = Fixtures.GenerateSimpleObject(); + myObject["blobs"] = Fixtures.GenerateThreeBlobs(); + + var sentObjectId = await Operations + .Send(myObject, new List { transport }) + .ConfigureAwait(false); + + // NOTE: used to debug diffing + // await Operations.Send(myObject, new List { transport }); + + var receivedObject = await Operations.Receive(sentObjectId, transport).ConfigureAwait(false); + + var allFiles = Directory + .GetFiles(transport.BlobStorageFolder) + .Select(fp => fp.Split(Path.DirectorySeparatorChar).Last()) + .ToList(); + var blobPaths = allFiles + .Where(fp => fp.Length > Blob.LocalHashPrefixLength) // excludes things like .DS_store + .ToList(); + + // Check that there are three downloaded blobs! + Assert.That(blobPaths.Count, Is.EqualTo(3)); + + var blobs = (receivedObject["blobs"] as List).Cast().ToList(); + // Check that we have three blobs + Assert.IsTrue(blobs.Count == 3); + // Check that received blobs point to local path (where they were received) + Assert.IsTrue(blobs[0].filePath.Contains(transport.BlobStorageFolder)); + Assert.IsTrue(blobs[1].filePath.Contains(transport.BlobStorageFolder)); + Assert.IsTrue(blobs[2].filePath.Contains(transport.BlobStorageFolder)); + } - [Test] - public async Task SendReceiveWithCleanedMemoryCache() - { - var myObject = Fixtures.GenerateSimpleObject(); - myObject["blobs"] = Fixtures.GenerateThreeBlobs(); - - var memTransport = new MemoryTransport(); - var sentObjectId = await Operations.Send( - myObject, - new List { transport, memTransport }, - useDefaultCache: false - ); - - memTransport = new MemoryTransport(); - var receivedObject = await Operations.Receive(sentObjectId, transport, memTransport, onErrorAction: (s, e) => { - Console.WriteLine(s); }); - - var allFiles = Directory.GetFiles(transport.BlobStorageFolder) - .Select(fp => fp.Split(Path.DirectorySeparatorChar).Last()).ToList(); - var blobPaths = allFiles - .Where(fp => fp.Length > Blob.LocalHashPrefixLength) // excludes things like .DS_store - .ToList(); - - // Check that there are three downloaded blobs! - Assert.That(blobPaths.Count, Is.EqualTo(3)); - - var blobs = (receivedObject["blobs"] as List).Cast().ToList(); - // Check that we have three blobs - Assert.IsTrue(blobs.Count == 3); - // Check that received blobs point to local path (where they were received) - Assert.IsTrue(blobs[0].filePath.Contains(transport.BlobStorageFolder)); - Assert.IsTrue(blobs[1].filePath.Contains(transport.BlobStorageFolder)); - Assert.IsTrue(blobs[2].filePath.Contains(transport.BlobStorageFolder)); - } + [Test] + public async Task SendWithBlobsWithoutSQLiteSendCache() + { + var myObject = Fixtures.GenerateSimpleObject(); + myObject["blobs"] = Fixtures.GenerateThreeBlobs(); + + var memTransport = new MemoryTransport(); + var sentObjectId = await Operations + .Send(myObject, new List { transport, memTransport }, false) + .ConfigureAwait(false); + + var receivedObject = await Operations.Receive(sentObjectId, transport).ConfigureAwait(false); + + var allFiles = Directory + .GetFiles(transport.BlobStorageFolder) + .Select(fp => fp.Split(Path.DirectorySeparatorChar).Last()) + .ToList(); + var blobPaths = allFiles + .Where(fp => fp.Length > Blob.LocalHashPrefixLength) // excludes things like .DS_store + .ToList(); + + // Check that there are three downloaded blobs! + Assert.That(blobPaths.Count, Is.EqualTo(3)); + + var blobs = (receivedObject["blobs"] as List).Cast().ToList(); + // Check that we have three blobs + Assert.IsTrue(blobs.Count == 3); + // Check that received blobs point to local path (where they were received) + Assert.IsTrue(blobs[0].filePath.Contains(transport.BlobStorageFolder)); + Assert.IsTrue(blobs[1].filePath.Contains(transport.BlobStorageFolder)); + Assert.IsTrue(blobs[2].filePath.Contains(transport.BlobStorageFolder)); + } + [Test] + public async Task SendReceiveWithCleanedMemoryCache() + { + var myObject = Fixtures.GenerateSimpleObject(); + myObject["blobs"] = Fixtures.GenerateThreeBlobs(); + + var memTransport = new MemoryTransport(); + var sentObjectId = await Operations + .Send(myObject, new List { transport, memTransport }, false) + .ConfigureAwait(false); + + memTransport = new MemoryTransport(); + var receivedObject = await Operations + .Receive( + sentObjectId, + transport, + memTransport, + onErrorAction: (s, e) => + { + Console.WriteLine(s); + } + ) + .ConfigureAwait(false); + + var allFiles = Directory + .GetFiles(transport.BlobStorageFolder) + .Select(fp => fp.Split(Path.DirectorySeparatorChar).Last()) + .ToList(); + var blobPaths = allFiles + .Where(fp => fp.Length > Blob.LocalHashPrefixLength) // excludes things like .DS_store + .ToList(); + + // Check that there are three downloaded blobs! + Assert.That(blobPaths.Count, Is.EqualTo(3)); + + var blobs = (receivedObject["blobs"] as List).Cast().ToList(); + // Check that we have three blobs + Assert.IsTrue(blobs.Count == 3); + // Check that received blobs point to local path (where they were received) + Assert.IsTrue(blobs[0].filePath.Contains(transport.BlobStorageFolder)); + Assert.IsTrue(blobs[1].filePath.Contains(transport.BlobStorageFolder)); + Assert.IsTrue(blobs[2].filePath.Contains(transport.BlobStorageFolder)); } } - diff --git a/Core/IntegrationTests/Subscriptions/Branches.cs b/Core/IntegrationTests/Subscriptions/Branches.cs index 61ec329bee..a8176a99de 100644 --- a/Core/IntegrationTests/Subscriptions/Branches.cs +++ b/Core/IntegrationTests/Subscriptions/Branches.cs @@ -1,132 +1,124 @@ -using System; -using System.Threading; -using System.Threading.Tasks; -using NUnit.Framework; using Speckle.Core.Api; using Speckle.Core.Api.SubscriptionModels; using Speckle.Core.Credentials; -namespace TestsIntegration.Subscriptions +namespace TestsIntegration.Subscriptions; + +public class Branches { - public class Branches + private BranchInfo BranchCreatedInfo; + private BranchInfo BranchDeletedInfo; + private string branchId; + private BranchInfo BranchUpdatedInfo; + public Client client; + private string streamId; + public Account testUserAccount; + + [OneTimeSetUp] + public async Task Setup() { - public Client client; - public Account testUserAccount; - - private BranchInfo BranchCreatedInfo; - private BranchInfo BranchUpdatedInfo; - private BranchInfo BranchDeletedInfo; - string branchId; - string streamId; - - [OneTimeSetUp] - public async Task Setup() - { - testUserAccount = await Fixtures.SeedUser(); - client = new Client(testUserAccount); - } + testUserAccount = await Fixtures.SeedUser().ConfigureAwait(false); + client = new Client(testUserAccount); + } - [Test, Order(0)] - public async Task SubscribeBranchCreated() + [Test, Order(0)] + public async Task SubscribeBranchCreated() + { + var streamInput = new StreamCreateInput { - var streamInput = new StreamCreateInput - { - description = "Hello World", - name = "Super Stream 01" - }; + description = "Hello World", + name = "Super Stream 01" + }; - streamId = await client.StreamCreate(streamInput); - Assert.NotNull(streamId); + streamId = await client.StreamCreate(streamInput).ConfigureAwait(false); + Assert.NotNull(streamId); - client.SubscribeBranchCreated(streamId); - client.OnBranchCreated += Client_OnBranchCreated; + client.SubscribeBranchCreated(streamId); + client.OnBranchCreated += Client_OnBranchCreated; - Thread.Sleep(5000); //let server catch-up + Thread.Sleep(5000); //let server catch-up - var branchInput = new BranchCreateInput - { - description = "Just testing branch create...", - name = "awesome-features", - streamId = streamId - }; + var branchInput = new BranchCreateInput + { + description = "Just testing branch create...", + name = "awesome-features", + streamId = streamId + }; - branchId = await client.BranchCreate(branchInput); - Assert.NotNull(branchId); + branchId = await client.BranchCreate(branchInput).ConfigureAwait(false); + Assert.NotNull(branchId); - await Task.Run(() => + await Task.Run(() => { Thread.Sleep(1000); //let client catch-up Assert.NotNull(BranchCreatedInfo); Assert.That(BranchCreatedInfo.name, Is.EqualTo(branchInput.name)); - }); - } + }) + .ConfigureAwait(false); + } - private void Client_OnBranchCreated(object sender, BranchInfo e) - { - BranchCreatedInfo = e; - } + private void Client_OnBranchCreated(object sender, BranchInfo e) + { + BranchCreatedInfo = e; + } - [Test, Order(1)] - public async Task SubscribeBranchUpdated() - { - client.SubscribeBranchUpdated(streamId); - client.OnBranchUpdated += Client_OnBranchUpdated; + [Test, Order(1)] + public async Task SubscribeBranchUpdated() + { + client.SubscribeBranchUpdated(streamId); + client.OnBranchUpdated += Client_OnBranchUpdated; - Thread.Sleep(1000); //let server catch-up + Thread.Sleep(1000); //let server catch-up - var branchInput = new BranchUpdateInput - { - description = "Just testing branch bpdate...", - name = "cool-features", - streamId = streamId, - id = branchId, - }; + var branchInput = new BranchUpdateInput + { + description = "Just testing branch bpdate...", + name = "cool-features", + streamId = streamId, + id = branchId + }; - var res = await client.BranchUpdate(branchInput); - Assert.True(res); + var res = await client.BranchUpdate(branchInput).ConfigureAwait(false); + Assert.True(res); - await Task.Run(() => + await Task.Run(() => { Thread.Sleep(1000); //let client catch-up Assert.NotNull(BranchUpdatedInfo); Assert.That(BranchUpdatedInfo.name, Is.EqualTo(branchInput.name)); - }); - } + }) + .ConfigureAwait(false); + } - private void Client_OnBranchUpdated(object sender, BranchInfo e) - { - BranchUpdatedInfo = e; - } + private void Client_OnBranchUpdated(object sender, BranchInfo e) + { + BranchUpdatedInfo = e; + } - [Test, Order(3)] - public async Task SubscribeBranchDeleted() - { - client.SubscribeBranchDeleted(streamId); - client.OnBranchDeleted += Client_OnBranchDeleted; + [Test, Order(3)] + public async Task SubscribeBranchDeleted() + { + client.SubscribeBranchDeleted(streamId); + client.OnBranchDeleted += Client_OnBranchDeleted; - Thread.Sleep(1000); //let server catch-up + Thread.Sleep(1000); //let server catch-up - var branchInput = new BranchDeleteInput - { - streamId = streamId, - id = branchId, - }; + var branchInput = new BranchDeleteInput { streamId = streamId, id = branchId }; - var res = await client.BranchDelete(branchInput); - Assert.True(res); + var res = await client.BranchDelete(branchInput).ConfigureAwait(false); + Assert.True(res); - await Task.Run(() => + await Task.Run(() => { Thread.Sleep(1000); //let client catch-up Assert.NotNull(BranchDeletedInfo); Assert.That(BranchDeletedInfo.id, Is.EqualTo(branchId)); - }); - } - - private void Client_OnBranchDeleted(object sender, BranchInfo e) - { - BranchDeletedInfo = e; - } + }) + .ConfigureAwait(false); + } + private void Client_OnBranchDeleted(object sender, BranchInfo e) + { + BranchDeletedInfo = e; } } diff --git a/Core/IntegrationTests/Subscriptions/Commits.cs b/Core/IntegrationTests/Subscriptions/Commits.cs index 6660bb2393..2740c55da3 100644 --- a/Core/IntegrationTests/Subscriptions/Commits.cs +++ b/Core/IntegrationTests/Subscriptions/Commits.cs @@ -1,4 +1,4 @@ -using System.Diagnostics; +using System.Diagnostics; using Speckle.Core.Api; using Speckle.Core.Api.SubscriptionModels; using Speckle.Core.Credentials; @@ -6,160 +6,164 @@ using Speckle.Core.Transports; using Tests; -namespace TestsIntegration.Subscriptions +namespace TestsIntegration.Subscriptions; + +public class Commits { - public class Commits + public Client client; + + private CommitInfo CommitCreatedInfo; + private CommitInfo CommitDeletedInfo; + private string commitId; + private CommitInfo CommitUpdatedInfo; + private ServerTransport myServerTransport; + private string streamId; + public Account testUserAccount; + + [OneTimeSetUp] + public async Task Setup() { - public Client client; - public Account testUserAccount; - - private CommitInfo CommitCreatedInfo; - private CommitInfo CommitUpdatedInfo; - private CommitInfo CommitDeletedInfo; - private ServerTransport myServerTransport; - string commitId; - string streamId; - - [OneTimeSetUp] - public async Task Setup() - { - testUserAccount = await Fixtures.SeedUser(); - client = new Client(testUserAccount); - myServerTransport = new ServerTransport(testUserAccount, null); - myServerTransport.Api.CompressPayloads = false; - } - - [Test, Order(0)] - //[Ignore("Ironically, it fails.")] - public async Task SubscribeCommitCreated() - { - var streamInput = new StreamCreateInput - { - description = "Hello World", - name = "Super Stream 01" - }; - - streamId = await client.StreamCreate(streamInput); - Assert.NotNull(streamId); - - myServerTransport.StreamId = streamId; // FML - - var branchInput = new BranchCreateInput - { - description = "Just testing branch create...", - name = "awesome-features", - streamId = streamId - }; - - var branchId = await client.BranchCreate(branchInput); - Assert.NotNull(branchId); - - client.SubscribeCommitCreated(streamId); - client.OnCommitCreated += Client_OnCommitCreated; - - Thread.Sleep(1000); //let server catch-up + testUserAccount = await Fixtures.SeedUser().ConfigureAwait(false); + client = new Client(testUserAccount); + myServerTransport = new ServerTransport(testUserAccount, null); + myServerTransport.Api.CompressPayloads = false; + } - var myObject = new Base(); - var ptsList = new List(); - for (int i = 0; i < 100; i++) - ptsList.Add(new Point(i, i, i)); + [Test, Order(0)] + //[Ignore("Ironically, it fails.")] + public async Task SubscribeCommitCreated() + { + var streamInput = new StreamCreateInput + { + description = "Hello World", + name = "Super Stream 01" + }; - myObject["Points"] = ptsList; + streamId = await client.StreamCreate(streamInput).ConfigureAwait(false); + Assert.NotNull(streamId); - var objectId = await Operations.Send(myObject, new List() { myServerTransport }, false, onErrorAction: (name, err) => - { - Debug.WriteLine("Err in transport"); - Debug.WriteLine(err.Message); - }); + myServerTransport.StreamId = streamId; // FML - var commitInput = new CommitCreateInput - { - streamId = streamId, - branchName = "awesome-features", - objectId = objectId, - message = "sending some test points", - sourceApplication = "Tests", - totalChildrenCount = 20 - }; - - commitId = await client.CommitCreate(commitInput); - Assert.NotNull(commitId); - - await Task.Run(() => + var branchInput = new BranchCreateInput + { + description = "Just testing branch create...", + name = "awesome-features", + streamId = streamId + }; + + var branchId = await client.BranchCreate(branchInput).ConfigureAwait(false); + Assert.NotNull(branchId); + + client.SubscribeCommitCreated(streamId); + client.OnCommitCreated += Client_OnCommitCreated; + + Thread.Sleep(1000); //let server catch-up + + var myObject = new Base(); + var ptsList = new List(); + for (int i = 0; i < 100; i++) + ptsList.Add(new Point(i, i, i)); + + myObject["Points"] = ptsList; + + var objectId = await Operations + .Send( + myObject, + new List() { myServerTransport }, + false, + onErrorAction: (name, err) => + { + Debug.WriteLine("Err in transport"); + Debug.WriteLine(err.Message); + } + ) + .ConfigureAwait(false); + + var commitInput = new CommitCreateInput + { + streamId = streamId, + branchName = "awesome-features", + objectId = objectId, + message = "sending some test points", + sourceApplication = "Tests", + totalChildrenCount = 20 + }; + + commitId = await client.CommitCreate(commitInput).ConfigureAwait(false); + Assert.NotNull(commitId); + + await Task.Run(() => { Thread.Sleep(2000); //let client catch-up Assert.NotNull(CommitCreatedInfo); Assert.That(CommitCreatedInfo.message, Is.EqualTo(commitInput.message)); - }); - } + }) + .ConfigureAwait(false); + } - private void Client_OnCommitCreated(object sender, CommitInfo e) - { - CommitCreatedInfo = e; - } + private void Client_OnCommitCreated(object sender, CommitInfo e) + { + CommitCreatedInfo = e; + } - [Test, Order(1)] - //[Ignore("Ironically, it fails.")] - public async Task SubscribeCommitUpdated() - { - client.SubscribeCommitUpdated(streamId); - client.OnCommitUpdated += Client_OnCommitUpdated; + [Test, Order(1)] + //[Ignore("Ironically, it fails.")] + public async Task SubscribeCommitUpdated() + { + client.SubscribeCommitUpdated(streamId); + client.OnCommitUpdated += Client_OnCommitUpdated; - Thread.Sleep(1000); //let server catch-up + Thread.Sleep(1000); //let server catch-up - var commitInput = new CommitUpdateInput - { - message = "Just testing commit update...", - streamId = streamId, - id = commitId, - }; + var commitInput = new CommitUpdateInput + { + message = "Just testing commit update...", + streamId = streamId, + id = commitId + }; - var res = await client.CommitUpdate(commitInput); - Assert.True(res); + var res = await client.CommitUpdate(commitInput).ConfigureAwait(false); + Assert.True(res); - await Task.Run(() => + await Task.Run(() => { Thread.Sleep(2000); //let client catch-up Assert.NotNull(CommitUpdatedInfo); Assert.That(CommitUpdatedInfo.message, Is.EqualTo(commitInput.message)); - }); - } + }) + .ConfigureAwait(false); + } - private void Client_OnCommitUpdated(object sender, CommitInfo e) - { - CommitUpdatedInfo = e; - } + private void Client_OnCommitUpdated(object sender, CommitInfo e) + { + CommitUpdatedInfo = e; + } - [Test, Order(3)] - //[Ignore("Ironically, it fails.")] - public async Task SubscribeCommitDeleted() - { - client.SubscribeCommitDeleted(streamId); - client.OnCommitDeleted += Client_OnCommitDeleted; + [Test, Order(3)] + //[Ignore("Ironically, it fails.")] + public async Task SubscribeCommitDeleted() + { + client.SubscribeCommitDeleted(streamId); + client.OnCommitDeleted += Client_OnCommitDeleted; - Thread.Sleep(1000); //let server catch-up + Thread.Sleep(1000); //let server catch-up - var commitInput = new CommitDeleteInput - { - streamId = streamId, - id = commitId, - }; + var commitInput = new CommitDeleteInput { streamId = streamId, id = commitId }; - var res = await client.CommitDelete(commitInput); - Assert.True(res); + var res = await client.CommitDelete(commitInput).ConfigureAwait(false); + Assert.True(res); - await Task.Run(() => + await Task.Run(() => { Thread.Sleep(2000); //let client catch-up Assert.NotNull(CommitDeletedInfo); Assert.That(CommitDeletedInfo.id, Is.EqualTo(commitId)); - }); - } - - private void Client_OnCommitDeleted(object sender, CommitInfo e) - { - CommitDeletedInfo = e; - } + }) + .ConfigureAwait(false); + } + private void Client_OnCommitDeleted(object sender, CommitInfo e) + { + CommitDeletedInfo = e; } } diff --git a/Core/IntegrationTests/Subscriptions/Streams.cs b/Core/IntegrationTests/Subscriptions/Streams.cs index dce20bbe3a..ee8a348ce8 100644 --- a/Core/IntegrationTests/Subscriptions/Streams.cs +++ b/Core/IntegrationTests/Subscriptions/Streams.cs @@ -1,114 +1,116 @@ -using Speckle.Core.Api; +using Speckle.Core.Api; using Speckle.Core.Api.SubscriptionModels; using Speckle.Core.Credentials; -namespace TestsIntegration.Subscriptions +namespace TestsIntegration.Subscriptions; + +public class Streams { - public class Streams - { - public Client client; - public Account testUserAccount; + public Client client; - private StreamInfo StreamAddedInfo; - private StreamInfo StreamUpdatedInfo; - private StreamInfo StreamRemovedInfo; - string streamId; + private StreamInfo StreamAddedInfo; + private string streamId; + private StreamInfo StreamRemovedInfo; + private StreamInfo StreamUpdatedInfo; + public Account testUserAccount; - [OneTimeSetUp] - public async Task Setup() - { - testUserAccount = await Fixtures.SeedUser(); - client = new Client(testUserAccount); - } + [OneTimeSetUp] + public async Task Setup() + { + testUserAccount = await Fixtures.SeedUser().ConfigureAwait(false); + client = new Client(testUserAccount); + } - [Test, Order(0)] - public async Task SubscribeStreamAdded() - { - client.SubscribeUserStreamAdded(); - client.OnUserStreamAdded += Client_OnUserStreamAdded; + [Test, Order(0)] + public async Task SubscribeStreamAdded() + { + client.SubscribeUserStreamAdded(); + client.OnUserStreamAdded += Client_OnUserStreamAdded; - Thread.Sleep(1000); //let server catch-up + Thread.Sleep(1000); //let server catch-up - var streamInput = new StreamCreateInput - { - description = "Hello World", - name = "Super Stream 01" - }; + var streamInput = new StreamCreateInput + { + description = "Hello World", + name = "Super Stream 01" + }; - var res = await client.StreamCreate(streamInput).ConfigureAwait(true); - streamId = res; - Assert.NotNull(res); + var res = await client.StreamCreate(streamInput).ConfigureAwait(true); + streamId = res; + Assert.NotNull(res); - await Task.Run(() => + await Task.Run(() => { Thread.Sleep(1000); //let client catch-up Assert.NotNull(StreamAddedInfo); Assert.That(StreamAddedInfo.name, Is.EqualTo(streamInput.name)); - }); - } + }) + .ConfigureAwait(false); + } - private void Client_OnUserStreamAdded(object sender, StreamInfo e) - { - StreamAddedInfo = e; - } + private void Client_OnUserStreamAdded(object sender, StreamInfo e) + { + StreamAddedInfo = e; + } - [Test, Order(1)] - public async Task SubscribeStreamUpdated() - { - client.SubscribeStreamUpdated(streamId); - client.OnStreamUpdated += Client_OnStreamUpdated; ; + [Test, Order(1)] + public async Task SubscribeStreamUpdated() + { + client.SubscribeStreamUpdated(streamId); + client.OnStreamUpdated += Client_OnStreamUpdated; + ; - Thread.Sleep(100); //let server catch-up + Thread.Sleep(100); //let server catch-up - var streamInput = new StreamUpdateInput - { - id = streamId, - description = "Hello World", - name = "Super Stream 01 EDITED" - }; + var streamInput = new StreamUpdateInput + { + id = streamId, + description = "Hello World", + name = "Super Stream 01 EDITED" + }; - var res = await client.StreamUpdate(streamInput); + var res = await client.StreamUpdate(streamInput).ConfigureAwait(false); - Assert.True(res); + Assert.True(res); - await Task.Run(() => + await Task.Run(() => { Thread.Sleep(100); //let client catch-up Assert.NotNull(StreamUpdatedInfo); Assert.That(StreamUpdatedInfo.name, Is.EqualTo(streamInput.name)); - }); - - } + }) + .ConfigureAwait(false); + } - private void Client_OnStreamUpdated(object sender, StreamInfo e) - { - StreamUpdatedInfo = e; - } + private void Client_OnStreamUpdated(object sender, StreamInfo e) + { + StreamUpdatedInfo = e; + } - [Test, Order(2)] - public async Task SubscribeUserStreamRemoved() - { - client.SubscribeUserStreamRemoved(); - client.OnUserStreamRemoved += Client_OnStreamRemoved; ; + [Test, Order(2)] + public async Task SubscribeUserStreamRemoved() + { + client.SubscribeUserStreamRemoved(); + client.OnUserStreamRemoved += Client_OnStreamRemoved; + ; - Thread.Sleep(100); //let server catch-up + Thread.Sleep(100); //let server catch-up - var res = await client.StreamDelete(streamId); + var res = await client.StreamDelete(streamId).ConfigureAwait(false); - Assert.True(res); + Assert.True(res); - await Task.Run(() => + await Task.Run(() => { Thread.Sleep(100); //let client catch-up Assert.NotNull(StreamRemovedInfo); Assert.That(StreamRemovedInfo.id, Is.EqualTo(streamId)); - }); - - } + }) + .ConfigureAwait(false); + } - private void Client_OnStreamRemoved(object sender, StreamInfo e) - { - StreamRemovedInfo = e; - } + private void Client_OnStreamRemoved(object sender, StreamInfo e) + { + StreamRemovedInfo = e; } } diff --git a/Core/IntegrationTests/Usings.cs b/Core/IntegrationTests/Usings.cs index 9a28bd89e2..324456763a 100644 --- a/Core/IntegrationTests/Usings.cs +++ b/Core/IntegrationTests/Usings.cs @@ -1 +1 @@ -global using NUnit.Framework; +global using NUnit.Framework; diff --git a/Core/Tests/Accounts.cs b/Core/Tests/Accounts.cs index 49325766b6..c7f608c4a0 100644 --- a/Core/Tests/Accounts.cs +++ b/Core/Tests/Accounts.cs @@ -1,107 +1,85 @@ -using System; -using System.Linq; using NUnit.Framework; using Speckle.Core.Api; using Speckle.Core.Credentials; -namespace Tests +namespace Tests; + +[TestFixture] +public class CredentialInfrastructure { - [TestFixture] - public class CredentialInfrastructure + [SetUp] + public void SetUp() { - Account TestAccount1, TestAccount2; - - Account TestAccount3; + TestAccount1 = new Account + { + refreshToken = "bla", + token = "bla", + serverInfo = new ServerInfo { url = "bla", company = "bla" }, + userInfo = new UserInfo { email = "one@two.com" } + }; - [SetUp] - public void SetUp() + TestAccount2 = new Account { - TestAccount1 = new Account - { - refreshToken = "bla", - token = "bla", - serverInfo = new ServerInfo - { - url = "bla", - company = "bla" - }, - userInfo = new UserInfo - { - email = "one@two.com" - } - }; + refreshToken = "foo", + token = "bar", + serverInfo = new ServerInfo { url = "baz", company = "qux" }, + userInfo = new UserInfo { email = "three@four.com" } + }; - TestAccount2 = new Account + TestAccount3 = new Account + { + token = "secret", + serverInfo = new ServerInfo { url = "https://sample.com", name = "qux" }, + userInfo = new UserInfo { - refreshToken = "foo", - token = "bar", - serverInfo = new ServerInfo - { - url = "baz", - company = "qux" - }, - userInfo = new UserInfo - { - email = "three@four.com" - } - }; + email = "six@five.com", + id = "123345", + name = "Test Account 3" + } + }; - TestAccount3 = new Account - { - token = "secret", - serverInfo = new ServerInfo - { - url = "https://sample.com", - name = "qux" - }, - userInfo = new UserInfo - { - email = "six@five.com", - id = "123345", - name = "Test Account 3" - } - }; + Fixtures.UpdateOrSaveAccount(TestAccount1); + Fixtures.UpdateOrSaveAccount(TestAccount2); + Fixtures.SaveLocalAccount(TestAccount3); + } + [TearDown] + public void TearDown() + { + Fixtures.DeleteLocalAccount(TestAccount1.id); + Fixtures.DeleteLocalAccount(TestAccount2.id); + Fixtures.DeleteLocalAccountFile(); + } + private Account TestAccount1, + TestAccount2; - Fixtures.UpdateOrSaveAccount(TestAccount1); - Fixtures.UpdateOrSaveAccount(TestAccount2); - Fixtures.SaveLocalAccount(TestAccount3); - } + private Account TestAccount3; - [TearDown] - public void TearDown() - { - Fixtures.DeleteLocalAccount(TestAccount1.id); - Fixtures.DeleteLocalAccount(TestAccount2.id); - Fixtures.DeleteLocalAccountFile(); - } - - [Test] - public void GetAllAccounts() - { - var accs = AccountManager.GetAccounts(); - Assert.GreaterOrEqual(accs.Count(), 3); // Tests are adding three accounts, you might have extra accounts on your machine when testing :D - } + [Test] + public void GetAllAccounts() + { + var accs = AccountManager.GetAccounts(); + Assert.GreaterOrEqual(accs.Count(), 3); // Tests are adding three accounts, you might have extra accounts on your machine when testing :D + } - [Test] - public void GetAccountsForServer() - { - var accs = AccountManager.GetAccounts("baz").ToList(); + [Test] + public void GetAccountsForServer() + { + var accs = AccountManager.GetAccounts("baz").ToList(); - Assert.That(accs.Count, Is.EqualTo(1)); - Assert.That(accs[0].serverInfo.company, Is.EqualTo("qux")); - Assert.That(accs[0].serverInfo.url, Is.EqualTo("baz")); - Assert.That(accs[0].refreshToken, Is.EqualTo("foo")); - } + Assert.That(accs.Count, Is.EqualTo(1)); + Assert.That(accs[0].serverInfo.company, Is.EqualTo("qux")); + Assert.That(accs[0].serverInfo.url, Is.EqualTo("baz")); + Assert.That(accs[0].refreshToken, Is.EqualTo("foo")); + } - [Test] - public void GetLocalAccount() - { - var acc = AccountManager.GetAccounts().Where(x => x.userInfo.id == "123345").FirstOrDefault(); + [Test] + public void GetLocalAccount() + { + var acc = AccountManager.GetAccounts().Where(x => x.userInfo.id == "123345").FirstOrDefault(); - Assert.That(acc.serverInfo.url, Is.EqualTo("https://sample.com")); - Assert.That(acc.token, Is.EqualTo("secret")); - } + Assert.That(acc.serverInfo.url, Is.EqualTo("https://sample.com")); + Assert.That(acc.token, Is.EqualTo("secret")); } } diff --git a/Core/Tests/BaseTests.cs b/Core/Tests/BaseTests.cs index 0103ca3311..8e326b3829 100755 --- a/Core/Tests/BaseTests.cs +++ b/Core/Tests/BaseTests.cs @@ -1,253 +1,264 @@ -using NUnit.Framework; +using NUnit.Framework; using Speckle.Core.Kits; -using Speckle.Core.Logging; using Speckle.Core.Models; -namespace Tests +namespace Tests; + +[TestFixture] +public class BaseTests { - [TestFixture] - public class BaseTests + [Test] + public void CanGetSetDynamicItemProp() { - [Test] - public void CanGetSetDynamicItemProp() - { - var @base = new Base(); - @base["Item"] = "Item"; + var @base = new Base(); + @base["Item"] = "Item"; - Assert.That(@base["Item"], Is.EqualTo("Item")); - } + Assert.That(@base["Item"], Is.EqualTo("Item")); + } - [Test] - public void CanGetSetTypedItemProp() - { - var @base = new ObjectWithItemProp(); - @base.Item = "baz"; + [Test] + public void CanGetSetTypedItemProp() + { + var @base = new ObjectWithItemProp(); + @base.Item = "baz"; - Assert.That(@base["Item"], Is.EqualTo("baz")); - Assert.That(@base.Item, Is.EqualTo("baz")); - } + Assert.That(@base["Item"], Is.EqualTo("baz")); + Assert.That(@base.Item, Is.EqualTo("baz")); + } - [Test(Description = "Checks if validation is performed in property names")] - public void CanValidatePropNames() - { - dynamic @base = new Base(); + [Test(Description = "Checks if validation is performed in property names")] + public void CanValidatePropNames() + { + dynamic @base = new Base(); - // Word chars are OK - @base["something"] = "B"; + // Word chars are OK + @base["something"] = "B"; - // Only single leading @ allowed - @base["@something"] = "A"; - Assert.Throws(() => { @base["@@@something"] = "Testing"; }); + // Only single leading @ allowed + @base["@something"] = "A"; + Assert.Throws(() => + { + @base["@@@something"] = "Testing"; + }); - // Invalid chars: ./ - Assert.Throws(() => { @base["some.thing"] = "Testing"; }); - Assert.Throws(() => { @base["some/thing"] = "Testing"; }); + // Invalid chars: ./ + Assert.Throws(() => + { + @base["some.thing"] = "Testing"; + }); + Assert.Throws(() => + { + @base["some/thing"] = "Testing"; + }); - // Trying to change a class member value will throw exceptions. - //Assert.Throws(() => { @base["speckle_type"] = "Testing"; }); - //Assert.Throws(() => { @base["id"] = "Testing"; }); - } + // Trying to change a class member value will throw exceptions. + //Assert.Throws(() => { @base["speckle_type"] = "Testing"; }); + //Assert.Throws(() => { @base["id"] = "Testing"; }); + } - [Test] - public void CountDynamicChunkables() - { - int MAX_NUM = 3000; - var @base = new Base(); - var customChunk = new List(); - var customChunkArr = new double[MAX_NUM]; - - for (int i = 0; i < MAX_NUM; i++) - { - customChunk.Add(i / 2); - customChunkArr[i] = i; - } - - @base["@(1000)cc1"] = customChunk; - @base["@(1000)cc2"] = customChunkArr; - - var num = @base.GetTotalChildrenCount(); - Assert.That(num, Is.EqualTo(MAX_NUM / 1000 * 2 + 1)); - } + [Test] + public void CountDynamicChunkables() + { + int MAX_NUM = 3000; + var @base = new Base(); + var customChunk = new List(); + var customChunkArr = new double[MAX_NUM]; - [Test] - public void CountTypedChunkables() + for (int i = 0; i < MAX_NUM; i++) { - int MAX_NUM = 3000; - var @base = new SampleObject(); - var customChunk = new List(); - var customChunkArr = new double[MAX_NUM]; - - for (int i = 0; i < MAX_NUM; i++) - { - customChunk.Add(i / 2); - customChunkArr[i] = i; - } - - @base.list = customChunk; - @base.arr = customChunkArr; - - var num = @base.GetTotalChildrenCount(); - var actualNum = 1 + MAX_NUM / 300 + MAX_NUM / 1000; - Assert.That(num, Is.EqualTo(actualNum)); + customChunk.Add(i / 2); + customChunkArr[i] = i; } - [Test(Description = "Checks that no ignored or obsolete properties are returned")] - public void CanGetMemberNames() - { - var @base = new SampleObject(); - var dynamicProp = "dynamicProp"; - @base[dynamicProp] = 123; - var names = @base.GetMemberNames(); - Assert.That(names, Has.No.Member(nameof(@base.IgnoredSchemaProp))); - Assert.That(names, Has.No.Member(nameof(@base.ObsoleteSchemaProp))); - Assert.That(names, Has.Member(dynamicProp)); - Assert.That(names, Has.Member(nameof(@base.attachedProp))); - } + @base["@(1000)cc1"] = customChunk; + @base["@(1000)cc2"] = customChunkArr; - [Test(Description = "Checks that no ignored or obsolete properties are returned")] - public void CanGetMembers() + var num = @base.GetTotalChildrenCount(); + Assert.That(num, Is.EqualTo(MAX_NUM / 1000 * 2 + 1)); + } + + [Test] + public void CountTypedChunkables() + { + int MAX_NUM = 3000; + var @base = new SampleObject(); + var customChunk = new List(); + var customChunkArr = new double[MAX_NUM]; + + for (int i = 0; i < MAX_NUM; i++) { - var @base = new SampleObject(); - var dynamicProp = "dynamicProp"; - @base[dynamicProp] = 123; - - var names = @base.GetMembers().Keys; - Assert.That(names, Has.No.Member(nameof(@base.IgnoredSchemaProp))); - Assert.That(names, Has.No.Member(nameof(@base.ObsoleteSchemaProp))); - Assert.That(names, Has.Member(dynamicProp)); - Assert.That(names, Has.Member(nameof(@base.attachedProp))); + customChunk.Add(i / 2); + customChunkArr[i] = i; } - [Test(Description = "Checks that only instance properties are returned, excluding obsolete and ignored.")] - public void CanGetMembers_OnlyInstance() - { - var @base = new SampleObject(); - @base["dynamicProp"] = 123; + @base.list = customChunk; + @base.arr = customChunkArr; - var names = @base.GetMembers(DynamicBaseMemberType.Instance).Keys; - Assert.That(names, Has.Member(nameof(@base.attachedProp))); - } + var num = @base.GetTotalChildrenCount(); + var actualNum = 1 + MAX_NUM / 300 + MAX_NUM / 1000; + Assert.That(num, Is.EqualTo(actualNum)); + } - [Test(Description = "Checks that only dynamic properties are returned")] - public void CanGetMembers_OnlyDynamic() - { - var @base = new SampleObject(); - var dynamicProp = "dynamicProp"; - @base[dynamicProp] = 123; + [Test(Description = "Checks that no ignored or obsolete properties are returned")] + public void CanGetMemberNames() + { + var @base = new SampleObject(); + var dynamicProp = "dynamicProp"; + @base[dynamicProp] = 123; + var names = @base.GetMemberNames(); + Assert.That(names, Has.No.Member(nameof(@base.IgnoredSchemaProp))); + Assert.That(names, Has.No.Member(nameof(@base.ObsoleteSchemaProp))); + Assert.That(names, Has.Member(dynamicProp)); + Assert.That(names, Has.Member(nameof(@base.attachedProp))); + } - var names = @base.GetMembers(DynamicBaseMemberType.Dynamic).Keys; - Assert.That(names, Has.Member(dynamicProp)); - Assert.That(names.Count, Is.EqualTo(1)); - } + [Test(Description = "Checks that no ignored or obsolete properties are returned")] + public void CanGetMembers() + { + var @base = new SampleObject(); + var dynamicProp = "dynamicProp"; + @base[dynamicProp] = 123; + + var names = @base.GetMembers().Keys; + Assert.That(names, Has.No.Member(nameof(@base.IgnoredSchemaProp))); + Assert.That(names, Has.No.Member(nameof(@base.ObsoleteSchemaProp))); + Assert.That(names, Has.Member(dynamicProp)); + Assert.That(names, Has.Member(nameof(@base.attachedProp))); + } - [Test(Description = "Checks that all typed properties (including ignored ones) are returned")] - public void CanGetMembers_OnlyInstance_IncludeIgnored() - { - var @base = new SampleObject(); - @base["dynamicProp"] = 123; + [Test( + Description = "Checks that only instance properties are returned, excluding obsolete and ignored." + )] + public void CanGetMembers_OnlyInstance() + { + var @base = new SampleObject(); + @base["dynamicProp"] = 123; - var names = @base.GetMembers(DynamicBaseMemberType.Instance | DynamicBaseMemberType.SchemaIgnored).Keys; - Assert.That(names, Has.Member(nameof(@base.IgnoredSchemaProp))); - Assert.That(names, Has.Member(nameof(@base.attachedProp))); - } + var names = @base.GetMembers(DynamicBaseMemberType.Instance).Keys; + Assert.That(names, Has.Member(nameof(@base.attachedProp))); + } - [Test(Description = "Checks that all typed properties (including obsolete ones) are returned")] - public void CanGetMembers_OnlyInstance_IncludeObsolete() - { - var @base = new SampleObject(); - @base["dynamicProp"] = 123; + [Test(Description = "Checks that only dynamic properties are returned")] + public void CanGetMembers_OnlyDynamic() + { + var @base = new SampleObject(); + var dynamicProp = "dynamicProp"; + @base[dynamicProp] = 123; - var names = @base.GetMembers(DynamicBaseMemberType.Instance | DynamicBaseMemberType.Obsolete).Keys; - Assert.That(names, Has.Member(nameof(@base.ObsoleteSchemaProp))); - Assert.That(names, Has.Member(nameof(@base.attachedProp))); - } + var names = @base.GetMembers(DynamicBaseMemberType.Dynamic).Keys; + Assert.That(names, Has.Member(dynamicProp)); + Assert.That(names.Count, Is.EqualTo(1)); + } + [Test(Description = "Checks that all typed properties (including ignored ones) are returned")] + public void CanGetMembers_OnlyInstance_IncludeIgnored() + { + var @base = new SampleObject(); + @base["dynamicProp"] = 123; + + var names = @base + .GetMembers(DynamicBaseMemberType.Instance | DynamicBaseMemberType.SchemaIgnored) + .Keys; + Assert.That(names, Has.Member(nameof(@base.IgnoredSchemaProp))); + Assert.That(names, Has.Member(nameof(@base.attachedProp))); + } - [Test] - public void CanGetDynamicMembers() - { - var @base = new SampleObject(); - var dynamicProp = "dynamicProp"; - @base[dynamicProp] = null; + [Test(Description = "Checks that all typed properties (including obsolete ones) are returned")] + public void CanGetMembers_OnlyInstance_IncludeObsolete() + { + var @base = new SampleObject(); + @base["dynamicProp"] = 123; + + var names = @base + .GetMembers(DynamicBaseMemberType.Instance | DynamicBaseMemberType.Obsolete) + .Keys; + Assert.That(names, Has.Member(nameof(@base.ObsoleteSchemaProp))); + Assert.That(names, Has.Member(nameof(@base.attachedProp))); + } - var names = @base.GetDynamicMemberNames(); - Assert.That(names, Has.Member(dynamicProp)); - Assert.Null(@base[dynamicProp]); - } + [Test] + public void CanGetDynamicMembers() + { + var @base = new SampleObject(); + var dynamicProp = "dynamicProp"; + @base[dynamicProp] = null; - [Test] - public void CanSetDynamicMembers() - { - var @base = new SampleObject(); - var key = "dynamicProp"; - var value = "something"; - // Can create a new dynamic member - @base[key] = value; - Assert.That(value, Is.EqualTo((string)@base[key])); - - // Can overwrite existing - value = "some other value"; - @base[key] = value; - Assert.That(value, Is.EqualTo((string)@base[key])); - - // Accepts null values - @base[key] = null; - Assert.IsNull(@base[key]); - } + var names = @base.GetDynamicMemberNames(); + Assert.That(names, Has.Member(dynamicProp)); + Assert.Null(@base[dynamicProp]); + } + + [Test] + public void CanSetDynamicMembers() + { + var @base = new SampleObject(); + var key = "dynamicProp"; + var value = "something"; + // Can create a new dynamic member + @base[key] = value; + Assert.That(value, Is.EqualTo((string)@base[key])); + + // Can overwrite existing + value = "some other value"; + @base[key] = value; + Assert.That(value, Is.EqualTo((string)@base[key])); + + // Accepts null values + @base[key] = null; + Assert.IsNull(@base[key]); + } + + [Test] + public void CanShallowCopy() + { + var sample = new SampleObject(); + var copy = sample.ShallowCopy(); + + var selectedMembers = + DynamicBaseMemberType.Dynamic + | DynamicBaseMemberType.Instance + | DynamicBaseMemberType.SchemaIgnored; + var sampleMembers = sample.GetMembers(selectedMembers); + var copyMembers = copy.GetMembers(selectedMembers); - [Test] - public void CanShallowCopy() + foreach (var kvp in copyMembers) { - var sample = new SampleObject(); - var copy = sample.ShallowCopy(); - - var selectedMembers = DynamicBaseMemberType.Dynamic - | DynamicBaseMemberType.Instance - | DynamicBaseMemberType.SchemaIgnored; - var sampleMembers = sample.GetMembers(selectedMembers); - var copyMembers = copy.GetMembers(selectedMembers); - - foreach (var kvp in copyMembers) - { - Assert.Contains(kvp.Key, sampleMembers.Keys); - Assert.That(kvp.Value, Is.EqualTo(sample[kvp.Key])); - } + Assert.Contains(kvp.Key, sampleMembers.Keys); + Assert.That(kvp.Value, Is.EqualTo(sample[kvp.Key])); } + } - public class SampleObject : Base - { - [Chunkable] - [DetachProperty] - public List list { get; set; } = new List(); + public class SampleObject : Base + { + public SampleObject() { } - [Chunkable(300)] - [DetachProperty] - public double[] arr { get; set; } + [Chunkable, DetachProperty] + public List list { get; set; } = new(); - [DetachProperty] - public SampleProp detachedProp { get; set; } + [Chunkable(300), DetachProperty] + public double[] arr { get; set; } - public SampleProp attachedProp { get; set; } + [DetachProperty] + public SampleProp detachedProp { get; set; } - public string @crazyProp { get; set; } + public SampleProp attachedProp { get; set; } - [SchemaIgnore] - public string IgnoredSchemaProp { get; set; } + public string @crazyProp { get; set; } - [Obsolete("Use attached prop")] - public string ObsoleteSchemaProp { get; set; } + [SchemaIgnore] + public string IgnoredSchemaProp { get; set; } - public SampleObject() { } - } + [Obsolete("Use attached prop")] + public string ObsoleteSchemaProp { get; set; } + } - public class SampleProp - { - public string name { get; set; } - } + public class SampleProp + { + public string name { get; set; } + } - public class ObjectWithItemProp : Base - { - public string Item { get; set; } = "Item"; - } + public class ObjectWithItemProp : Base + { + public string Item { get; set; } = "Item"; } } diff --git a/Core/Tests/ClosureTests.cs b/Core/Tests/ClosureTests.cs index 9a67973525..4d676c5170 100644 --- a/Core/Tests/ClosureTests.cs +++ b/Core/Tests/ClosureTests.cs @@ -1,122 +1,116 @@ -using Newtonsoft.Json; +using Newtonsoft.Json; using NUnit.Framework; using Speckle.Core.Api; using Speckle.Core.Models; using Speckle.Core.Transports; -namespace Tests +namespace Tests; + +[TestFixture] +public class Closures { - [TestFixture] - public class Closures + [Test(Description = "Checks wether closures are generated correctly by the serialiser.")] + public void CorrectDecompositionTracking() { + var d5 = new Base(); + ((dynamic)d5).name = "depth five"; // end v - [Test(Description = "Checks wether closures are generated correctly by the serialiser.")] - public void CorrectDecompositionTracking() - { - var d5 = new Base(); - ((dynamic)d5).name = "depth five"; // end v + var d4 = new Base(); + ((dynamic)d4).name = "depth four"; + ((dynamic)d4)["@detach"] = d5; - var d4 = new Base(); - ((dynamic)d4).name = "depth four"; - ((dynamic)d4)["@detach"] = d5; + var d3 = new Base(); + ((dynamic)d3).name = "depth three"; + ((dynamic)d3)["@detach"] = d4; - var d3 = new Base(); - ((dynamic)d3).name = "depth three"; - ((dynamic)d3)["@detach"] = d4; + var d2 = new Base(); + ((dynamic)d2).name = "depth two"; + ((dynamic)d2)["@detach"] = d3; + ((dynamic)d2)["@joker"] = new object[] { d5 }; - var d2 = new Base(); - ((dynamic)d2).name = "depth two"; - ((dynamic)d2)["@detach"] = d3; - ((dynamic)d2)["@joker"] = new object[] { d5 }; + var d1 = new Base(); + ((dynamic)d1).name = "depth one"; + ((dynamic)d1)["@detach"] = d2; + ((dynamic)d1)["@joker"] = d5; // consequently, d5 depth in d1 should be 1 - var d1 = new Base(); - ((dynamic)d1).name = "depth one"; - ((dynamic)d1)["@detach"] = d2; - ((dynamic)d1)["@joker"] = d5; // consequently, d5 depth in d1 should be 1 + var transport = new MemoryTransport(); - var transport = new MemoryTransport(); + var result = Operations.Send(d1, new List() { transport }, false).Result; - var result = Operations.Send(d1, new List() { transport }, false).Result; + var test = Operations.Receive(result, localTransport: transport).Result; - var test = Operations.Receive(result, localTransport: transport).Result; + Assert.IsNotNull(test.id); + Assert.That(d1.GetId(true), Is.EqualTo(test.id)); - Assert.IsNotNull(test.id); - Assert.That(d1.GetId(true), Is.EqualTo(test.id)); + var d1_ = JsonConvert.DeserializeObject(transport.Objects[d1.GetId(true)]); + var d2_ = JsonConvert.DeserializeObject(transport.Objects[d2.GetId(true)]); + var d3_ = JsonConvert.DeserializeObject(transport.Objects[d3.GetId(true)]); + var d4_ = JsonConvert.DeserializeObject(transport.Objects[d4.GetId(true)]); + var d5_ = JsonConvert.DeserializeObject(transport.Objects[d5.GetId(true)]); - var d1_ = JsonConvert.DeserializeObject(transport.Objects[d1.GetId(true)]); - var d2_ = JsonConvert.DeserializeObject(transport.Objects[d2.GetId(true)]); - var d3_ = JsonConvert.DeserializeObject(transport.Objects[d3.GetId(true)]); - var d4_ = JsonConvert.DeserializeObject(transport.Objects[d4.GetId(true)]); - var d5_ = JsonConvert.DeserializeObject(transport.Objects[d5.GetId(true)]); + var depthOf_d5_in_d1 = int.Parse((string)d1_.__closure[d5.GetId(true)]); + Assert.That(depthOf_d5_in_d1, Is.EqualTo(1)); + var depthOf_d4_in_d1 = int.Parse((string)d1_.__closure[d4.GetId(true)]); + Assert.That(depthOf_d4_in_d1, Is.EqualTo(3)); - var depthOf_d5_in_d1 = int.Parse((string)d1_.__closure[d5.GetId(true)]); - Assert.That(depthOf_d5_in_d1, Is.EqualTo(1)); + var depthOf_d5_in_d3 = int.Parse((string)d3_.__closure[d5.GetId(true)]); + Assert.That(depthOf_d5_in_d3, Is.EqualTo(2)); - var depthOf_d4_in_d1 = int.Parse((string)d1_.__closure[d4.GetId(true)]); - Assert.That(depthOf_d4_in_d1, Is.EqualTo(3)); + var depthOf_d4_in_d3 = int.Parse((string)d3_.__closure[d4.GetId(true)]); + Assert.That(depthOf_d4_in_d3, Is.EqualTo(1)); - var depthOf_d5_in_d3 = int.Parse((string)d3_.__closure[d5.GetId(true)]); - Assert.That(depthOf_d5_in_d3, Is.EqualTo(2)); + var depthOf_d5_in_d2 = int.Parse((string)d2_.__closure[d5.GetId(true)]); + Assert.That(depthOf_d5_in_d2, Is.EqualTo(1)); + } - var depthOf_d4_in_d3 = int.Parse((string)d3_.__closure[d4.GetId(true)]); - Assert.That(depthOf_d4_in_d3, Is.EqualTo(1)); + [Test] + public void DescendantsCounting() + { + Base myBase = new(); - var depthOf_d5_in_d2 = int.Parse((string)d2_.__closure[d5.GetId(true)]); - Assert.That(depthOf_d5_in_d2, Is.EqualTo(1)); + var myList = new List(); + // These should be counted! + for (int i = 0; i < 100; i++) + { + var smolBase = new Base(); + smolBase["test"] = i; + myList.Add(smolBase); } - [Test] - public void DescendantsCounting() + // Primitives should not be counted! + for (int i = 0; i < 10; i++) + myList.Add(i); + myList.Add("Hello"); + myList.Add(new { hai = "bai" }); + + myBase["@detachTheList"] = myList; + + var dictionary = new Dictionary(); + for (int i = 0; i < 10; i++) { - Base myBase = new Base(); - - var myList = new List(); - // These should be counted! - for (int i = 0; i < 100; i++) - { - var smolBase = new Base(); - smolBase["test"] = i; - myList.Add(smolBase); - } - - // Primitives should not be counted! - for (int i = 0; i < 10; i++) - { - myList.Add(i); - } - myList.Add("Hello"); - myList.Add(new { hai = "bai" }); - - myBase["@detachTheList"] = myList; - - var dictionary = new Dictionary(); - for (int i = 0; i < 10; i++) - { - var smolBase = new Base() { applicationId = i.ToString() }; - dictionary[$"key {i}"] = smolBase; - } - - dictionary["string value"] = "bol"; - dictionary["int value"] = 42; - dictionary["THIS IS RECURSIVE SURPRISE"] = myBase; - - myBase["@detachTheDictionary"] = dictionary; - - var count = myBase.GetTotalChildrenCount(); - Assert.That(count, Is.EqualTo(112)); - - var tableTest = new DiningTable(); - var tableKidsCount = tableTest.GetTotalChildrenCount(); - Assert.That(tableKidsCount, Is.EqualTo(10)); - - // Explicitely test for recurisve references! - var recursiveRef = new Base() { applicationId = "random" }; - recursiveRef["@recursive"] = recursiveRef; - - var supriseCount = recursiveRef.GetTotalChildrenCount(); - Assert.That(supriseCount, Is.EqualTo(2)); + var smolBase = new Base() { applicationId = i.ToString() }; + dictionary[$"key {i}"] = smolBase; } + dictionary["string value"] = "bol"; + dictionary["int value"] = 42; + dictionary["THIS IS RECURSIVE SURPRISE"] = myBase; + + myBase["@detachTheDictionary"] = dictionary; + + var count = myBase.GetTotalChildrenCount(); + Assert.That(count, Is.EqualTo(112)); + + var tableTest = new DiningTable(); + var tableKidsCount = tableTest.GetTotalChildrenCount(); + Assert.That(tableKidsCount, Is.EqualTo(10)); + + // Explicitely test for recurisve references! + var recursiveRef = new Base() { applicationId = "random" }; + recursiveRef["@recursive"] = recursiveRef; + + var supriseCount = recursiveRef.GetTotalChildrenCount(); + Assert.That(supriseCount, Is.EqualTo(2)); } } diff --git a/Core/Tests/ExceptionTests.cs b/Core/Tests/ExceptionTests.cs index 97e35faf77..ba88e21944 100644 --- a/Core/Tests/ExceptionTests.cs +++ b/Core/Tests/ExceptionTests.cs @@ -1,26 +1,23 @@ -using System; using NUnit.Framework; using Speckle.Core.Models.Extensions; -namespace Tests -{ - [TestFixture] - public class ExceptionTests - { - [Test] - public void CanPrintAllInnerExceptions() - { - var ex = new Exception("Some error"); - var exMsg = ex.ToFormattedString(); - var ex2 = new Exception("One or more errors occurred", ex); - var ex2Msg = ex2.ToFormattedString(); +namespace Tests; +[TestFixture] +public class ExceptionTests +{ + [Test] + public void CanPrintAllInnerExceptions() + { + var ex = new Exception("Some error"); + var exMsg = ex.ToFormattedString(); - var ex3 = new AggregateException("One or more errors occurred", ex2); - var ex3Msg = ex3.ToFormattedString(); + var ex2 = new Exception("One or more errors occurred", ex); + var ex2Msg = ex2.ToFormattedString(); - Assert.NotNull(ex3Msg); + var ex3 = new AggregateException("One or more errors occurred", ex2); + var ex3Msg = ex3.ToFormattedString(); - } + Assert.NotNull(ex3Msg); } } diff --git a/Core/Tests/Fixtures.cs b/Core/Tests/Fixtures.cs index cec130ff44..fa2017bc44 100644 --- a/Core/Tests/Fixtures.cs +++ b/Core/Tests/Fixtures.cs @@ -1,61 +1,56 @@ using Newtonsoft.Json; using NUnit.Framework; -using Serilog; +using Serilog.Events; using Speckle.Core.Credentials; using Speckle.Core.Helpers; using Speckle.Core.Logging; using Speckle.Core.Transports; -namespace Tests +namespace Tests; + +[SetUpFixture] +public class SetUp { - [SetUpFixture] - public class SetUp + [OneTimeSetUp] + public void BeforeAll() { - [OneTimeSetUp] - public void BeforeAll() - { - SpeckleLog.Initialize( - "Core", - "Testing", - new SpeckleLogConfiguration( - Serilog.Events.LogEventLevel.Debug, - logToConsole: true, - logToFile: false, - logToSeq: false - ) - ); - SpeckleLog.Logger.Information("Initialized logger for testing"); - } + SpeckleLog.Initialize( + "Core", + "Testing", + new SpeckleLogConfiguration(LogEventLevel.Debug, true, logToFile: false, logToSeq: false) + ); + SpeckleLog.Logger.Information("Initialized logger for testing"); } +} - public class Fixtures - { - private static SQLiteTransport AccountStorage = new SQLiteTransport(scope: "Accounts"); - private static string accountPath = Path.Combine( - SpecklePathProvider.AccountsFolderPath, - "TestAccount.json" - ); +public abstract class Fixtures +{ + private static SQLiteTransport AccountStorage = new(scope: "Accounts"); + private static string accountPath = Path.Combine( + SpecklePathProvider.AccountsFolderPath, + "TestAccount.json" + ); - public static void UpdateOrSaveAccount(Account account) - { - AccountStorage.DeleteObject(account.id); - AccountStorage.SaveObjectSync(account.id, JsonConvert.SerializeObject(account)); - } + public static void UpdateOrSaveAccount(Account account) + { + AccountStorage.DeleteObject(account.id); + string serializedObject = JsonConvert.SerializeObject(account); + AccountStorage.SaveObjectSync(account.id, serializedObject); + } - public static void SaveLocalAccount(Account account) - { - var json = JsonConvert.SerializeObject(account); - File.WriteAllText(accountPath, json); - } + public static void SaveLocalAccount(Account account) + { + var json = JsonConvert.SerializeObject(account); + File.WriteAllText(accountPath, json); + } - public static void DeleteLocalAccount(string id) - { - AccountStorage.DeleteObject(id); - } + public static void DeleteLocalAccount(string id) + { + AccountStorage.DeleteObject(id); + } - public static void DeleteLocalAccountFile() - { - File.Delete(accountPath); - } + public static void DeleteLocalAccountFile() + { + File.Delete(accountPath); } } diff --git a/Core/Tests/GraphQLClient.cs b/Core/Tests/GraphQLClient.cs index 2e8b31524b..458402ff20 100644 --- a/Core/Tests/GraphQLClient.cs +++ b/Core/Tests/GraphQLClient.cs @@ -1,115 +1,116 @@ +using System.Diagnostics; +using GraphQL; using NUnit.Framework; using Speckle.Core.Api; using Speckle.Core.Credentials; -using GraphQL; -using System.Diagnostics; -namespace Tests -{ - public class GraphQLClientTests - { - private Client _client; +namespace Tests; - [OneTimeSetUp] - public void Setup() - { - _client = new Client( - new Account - { - token = "this is a scam", - serverInfo = new ServerInfo { url = "http://goto.testing" } - } - ); - } +public class GraphQLClientTests +{ + private Client _client; - class FakeGqlResponseModel { } + [OneTimeSetUp] + public void Setup() + { + _client = new Client( + new Account + { + token = "this is a scam", + serverInfo = new ServerInfo { url = "http://goto.testing" } + } + ); + } - private static IEnumerable ErrorCases() - { - yield return new TestCaseData( - typeof(SpeckleGraphQLForbiddenException), - new Map { { "code", "FORBIDDEN" } } - ); - yield return new TestCaseData( - typeof(SpeckleGraphQLForbiddenException), - new Map { { "code", "UNAUTHENTICATED" } } - ); - yield return new TestCaseData( - typeof(SpeckleGraphQLInternalErrorException), - new Map { { "code", "INTERNAL_SERVER_ERROR" } } - ); - yield return new TestCaseData( - typeof(SpeckleGraphQLException), - new Map { { "foo", "bar" } } - ); - } + private static IEnumerable ErrorCases() + { + yield return new TestCaseData( + typeof(SpeckleGraphQLForbiddenException), + new Map { { "code", "FORBIDDEN" } } + ); + yield return new TestCaseData( + typeof(SpeckleGraphQLForbiddenException), + new Map { { "code", "UNAUTHENTICATED" } } + ); + yield return new TestCaseData( + typeof(SpeckleGraphQLInternalErrorException), + new Map { { "code", "INTERNAL_SERVER_ERROR" } } + ); + yield return new TestCaseData( + typeof(SpeckleGraphQLException), + new Map { { "foo", "bar" } } + ); + } - [Test] - [TestCaseSource(nameof(ErrorCases))] - public void TestExceptionThrowingFromGraphQLErrors(Type exType, Map extensions) - { - Assert.Throws( - exType, - () => - _client.MaybeThrowFromGraphQLErrors( - new GraphQLRequest(), - new GraphQLResponse - { - Errors = new GraphQLError[] { new GraphQLError { Extensions = extensions } } - } - ) - ); - } + [Test, TestCaseSource(nameof(ErrorCases))] + public void TestExceptionThrowingFromGraphQLErrors(Type exType, Map extensions) + { + Assert.Throws( + exType, + () => + _client.MaybeThrowFromGraphQLErrors( + new GraphQLRequest(), + new GraphQLResponse + { + Errors = new GraphQLError[] { new() { Extensions = extensions } } + } + ) + ); + } - [Test] - public void TestMaybeThrowsDoesntThrowForNoErrors() - { - _client.MaybeThrowFromGraphQLErrors( - new GraphQLRequest(), - new GraphQLResponse() - ); - // We're just checking that the prev function didn't throw - Assert.True(true); - } + [Test] + public void TestMaybeThrowsDoesntThrowForNoErrors() + { + _client.MaybeThrowFromGraphQLErrors( + new GraphQLRequest(), + new GraphQLResponse() + ); + // We're just checking that the prev function didn't throw + Assert.True(true); + } - [Test] - public async Task TestExecuteWithResiliencePoliciesDoesntRetryTaskCancellation() + [Test] + public async Task TestExecuteWithResiliencePoliciesDoesntRetryTaskCancellation() + { + var timer = new Stopwatch(); + timer.Start(); + Assert.ThrowsAsync(async () => { - var timer = new Stopwatch(); - timer.Start(); - Assert.ThrowsAsync(async () => - { - var tokenSource = new CancellationTokenSource(); - tokenSource.Cancel(); - await _client.ExecuteWithResiliencePolicies( + var tokenSource = new CancellationTokenSource(); + tokenSource.Cancel(); + await _client + .ExecuteWithResiliencePolicies( async () => await Task.Run( - async () => - { - await Task.Delay(1000); - return "foo"; - }, - tokenSource.Token - ) - ); - }); - timer.Stop(); - var elapsed = timer.ElapsedMilliseconds; + async () => + { + await Task.Delay(1000).ConfigureAwait(false); + return "foo"; + }, + tokenSource.Token + ) + .ConfigureAwait(false) + ) + .ConfigureAwait(false); + }); + timer.Stop(); + var elapsed = timer.ElapsedMilliseconds; - // the default retry policy would retry 5 times with 1 second jitter backoff each - // if the elapsed is less than a second, this was def not retried - Assert.Less(elapsed, 1000); - } + // the default retry policy would retry 5 times with 1 second jitter backoff each + // if the elapsed is less than a second, this was def not retried + Assert.Less(elapsed, 1000); + } - [Test] - public async Task TestExecuteWithResiliencePoliciesRetry() - { - var counter = 0; - var maxRetryCount = 5; - var expectedResult = "finally it finishes"; - var timer = new Stopwatch(); - timer.Start(); - var result = await _client.ExecuteWithResiliencePolicies(async () => + [Test] + public async Task TestExecuteWithResiliencePoliciesRetry() + { + var counter = 0; + var maxRetryCount = 5; + var expectedResult = "finally it finishes"; + var timer = new Stopwatch(); + timer.Start(); + var result = await _client + .ExecuteWithResiliencePolicies(async () => { counter++; if (counter < maxRetryCount) @@ -118,11 +119,13 @@ public async Task TestExecuteWithResiliencePoliciesRetry() new GraphQLResponse() ); return expectedResult; - }); - timer.Stop(); - // The baseline for wait is 1 seconds between the jittered retry - Assert.GreaterOrEqual(timer.ElapsedMilliseconds, 5000); - Assert.AreEqual(maxRetryCount, counter); - } + }) + .ConfigureAwait(false); + timer.Stop(); + // The baseline for wait is 1 seconds between the jittered retry + Assert.GreaterOrEqual(timer.ElapsedMilliseconds, 5000); + Assert.That(counter, Is.EqualTo(maxRetryCount)); } -} \ No newline at end of file + + private class FakeGqlResponseModel { } +} diff --git a/Core/Tests/Hashing.cs b/Core/Tests/Hashing.cs index 932e2858a6..1c6fb35150 100644 --- a/Core/Tests/Hashing.cs +++ b/Core/Tests/Hashing.cs @@ -1,77 +1,90 @@ -using System.Diagnostics; +using System.Diagnostics; using NUnit.Framework; -namespace Tests +namespace Tests; + +[TestFixture] +public class Hashing { - [TestFixture] - public class Hashing + [Test(Description = "Checks that hashing (as represented by object ids) actually works.")] + public void HashChangeCheck() { + var table = new DiningTable(); + var secondTable = new DiningTable(); - [Test(Description = "Checks that hashing (as represented by object ids) actually works.")] - public void HashChangeCheck() - { - var table = new DiningTable(); - var secondTable = new DiningTable(); - - Assert.That(secondTable.GetId(), Is.EqualTo(table.GetId())); - - ((dynamic)secondTable).testProp = "wonderful"; + Assert.That(secondTable.GetId(), Is.EqualTo(table.GetId())); - Assert.That(secondTable.GetId(), Is.Not.EqualTo(table.GetId())); - } + ((dynamic)secondTable).testProp = "wonderful"; - [Test(Description = "Tests the convention that dynamic properties that have key names prepended with '__' are ignored.")] - public void IgnoredDynamicPropertiesCheck() - { - var table = new DiningTable(); - var originalHash = table.GetId(); - - ((dynamic)table).__testProp = "wonderful"; + Assert.That(secondTable.GetId(), Is.Not.EqualTo(table.GetId())); + } - Assert.That(table.GetId(), Is.EqualTo(originalHash)); - } + [Test( + Description = "Tests the convention that dynamic properties that have key names prepended with '__' are ignored." + )] + public void IgnoredDynamicPropertiesCheck() + { + var table = new DiningTable(); + var originalHash = table.GetId(); - [Test(Description = "Rather stupid test as results vary wildly even on one machine.")] - public void HashingPerformance() - { - var polyline = new Polyline(); + ((dynamic)table).__testProp = "wonderful"; - for (int i = 0; i < 1000; i++) - polyline.Points.Add(new Point() { X = i * 2, Y = i % 2 }); + Assert.That(table.GetId(), Is.EqualTo(originalHash)); + } - var stopWatch = new Stopwatch(); - stopWatch.Start(); + [Test(Description = "Rather stupid test as results vary wildly even on one machine.")] + public void HashingPerformance() + { + var polyline = new Polyline(); - // Warm-up: first hashing always takes longer due to json serialisation init - var h1 = polyline.GetId(); - var stopWatchStep = stopWatch.ElapsedMilliseconds; + for (int i = 0; i < 1000; i++) + polyline.Points.Add(new Point() { X = i * 2, Y = i % 2 }); - stopWatchStep = stopWatch.ElapsedMilliseconds; - var h2 = polyline.GetId(); + var stopWatch = new Stopwatch(); + stopWatch.Start(); - var diff1 = stopWatch.ElapsedMilliseconds - stopWatchStep; - Assert.True(diff1 < 300, $"Hashing shouldn't take that long ({diff1} ms) for the test object used."); - Console.WriteLine($"Big obj hash duration: {diff1} ms"); + // Warm-up: first hashing always takes longer due to json serialisation init + var h1 = polyline.GetId(); + var stopWatchStep = stopWatch.ElapsedMilliseconds; - var pt = new Point() { X = 10, Y = 12, Z = 30 }; - stopWatchStep = stopWatch.ElapsedMilliseconds; - var h3 = pt.GetId(); + stopWatchStep = stopWatch.ElapsedMilliseconds; + var h2 = polyline.GetId(); - var diff2 = stopWatch.ElapsedMilliseconds - stopWatchStep; - Assert.True(diff2 < 10, $"Hashing shouldn't take that long ({diff2} ms)for the point object used."); - Console.WriteLine($"Small obj hash duration: {diff2} ms"); - } + var diff1 = stopWatch.ElapsedMilliseconds - stopWatchStep; + Assert.True( + diff1 < 300, + $"Hashing shouldn't take that long ({diff1} ms) for the test object used." + ); + Console.WriteLine($"Big obj hash duration: {diff1} ms"); - [Test(Description = "The hash of a decomposed object is different that that of a non-decomposed object.")] - public void DecompositionHashes() + var pt = new Point() { - var table = new DiningTable(); - ((dynamic)table)["@decomposeMePlease"] = new Point(); + X = 10, + Y = 12, + Z = 30 + }; + stopWatchStep = stopWatch.ElapsedMilliseconds; + var h3 = pt.GetId(); + + var diff2 = stopWatch.ElapsedMilliseconds - stopWatchStep; + Assert.True( + diff2 < 10, + $"Hashing shouldn't take that long ({diff2} ms)for the point object used." + ); + Console.WriteLine($"Small obj hash duration: {diff2} ms"); + } + + [Test( + Description = "The hash of a decomposed object is different that that of a non-decomposed object." + )] + public void DecompositionHashes() + { + var table = new DiningTable(); + ((dynamic)table)["@decomposeMePlease"] = new Point(); - var hash1 = table.GetId(); - var hash2 = table.GetId(true); + var hash1 = table.GetId(); + var hash2 = table.GetId(true); - Assert.That(hash2, Is.Not.EqualTo(hash1)); - } + Assert.That(hash2, Is.Not.EqualTo(hash1)); } } diff --git a/Core/Tests/Helpers.cs b/Core/Tests/Helpers.cs index 002640eb85..c9f7b16a3c 100644 --- a/Core/Tests/Helpers.cs +++ b/Core/Tests/Helpers.cs @@ -1,34 +1,35 @@ -using NUnit.Framework; +using NUnit.Framework; -namespace Tests +namespace Tests; + +[TestFixture] +public class Helpers { - [TestFixture] - public class Helpers + [ + Test, + TestCase(30, "just now"), + TestCase(60, "1 minute ago"), + TestCase(60 * 2, "2 minutes ago"), + TestCase(60 * 60 * 1, "1 hour ago"), + TestCase(60 * 60 * 2, "2 hours ago"), + TestCase(60 * 60 * 24 * 1, "1 day ago"), + TestCase(60 * 60 * 24 * 2, "2 days ago"), + TestCase(60 * 60 * 24 * 7 * 1, "1 week ago"), + TestCase(60 * 60 * 24 * 7 * 2, "2 weeks ago"), + TestCase(60 * 60 * 24 * 31 * 1, "1 month ago"), + TestCase(60 * 60 * 24 * 31 * 2, "2 months ago"), + TestCase(60 * 60 * 24 * 365 * 1, "1 year ago"), + TestCase(60 * 60 * 24 * 365 * 2, "2 years ago") + ] + public void TimeAgo_DisplaysTextCorrectly(int secondsAgo, string expectedText) { - [Test] - [TestCase(30, "just now")] - [TestCase(60, "1 minute ago")] - [TestCase(60 * 2, "2 minutes ago")] - [TestCase(60 * 60 * 1, "1 hour ago")] - [TestCase(60 * 60 * 2, "2 hours ago")] - [TestCase(60 * 60 * 24 * 1, "1 day ago")] - [TestCase(60 * 60 * 24 * 2, "2 days ago")] - [TestCase(60 * 60 * 24 * 7 * 1, "1 week ago")] - [TestCase(60 * 60 * 24 * 7 * 2, "2 weeks ago")] - [TestCase(60 * 60 * 24 * 31 * 1, "1 month ago")] - [TestCase(60 * 60 * 24 * 31 * 2, "2 months ago")] - [TestCase(60 * 60 * 24 * 365 * 1, "1 year ago")] - [TestCase(60 * 60 * 24 * 365 * 2, "2 years ago")] - public void TimeAgo_DisplaysTextCorrectly(int secondsAgo, string expectedText) - { - // Get current time and substract the input amount - var dateTime = DateTime.Now; - dateTime = dateTime.Subtract(new TimeSpan(0, 0, secondsAgo)); + // Get current time and substract the input amount + var dateTime = DateTime.Now; + dateTime = dateTime.Subtract(new TimeSpan(0, 0, secondsAgo)); - // Get the timeAgo text representation - var actual = Speckle.Core.Api.Helpers.TimeAgo(dateTime); + // Get the timeAgo text representation + var actual = Speckle.Core.Api.Helpers.TimeAgo(dateTime); - Assert.That(actual, Is.EqualTo(expectedText)); - } + Assert.That(actual, Is.EqualTo(expectedText)); } } diff --git a/Core/Tests/Kits.cs b/Core/Tests/Kits.cs index ff46c283c4..3ca5a3352a 100644 --- a/Core/Tests/Kits.cs +++ b/Core/Tests/Kits.cs @@ -1,20 +1,18 @@ -using NUnit.Framework; +using NUnit.Framework; using Speckle.Core.Kits; -namespace Tests +namespace Tests; + +[TestFixture] +public class Kits { - [TestFixture] - public class Kits + [Test] + public void KitsExist() { - [Test] - public void KitsExist() - { - var kits = KitManager.Kits; - Assert.Greater(kits.Count(), 0); - - var types = KitManager.Types; - Assert.Greater(types.Count(), 0); - } + var kits = KitManager.Kits; + Assert.Greater(kits.Count(), 0); + var types = KitManager.Types; + Assert.Greater(types.Count(), 0); } } diff --git a/Core/Tests/ObjectTraversal/GraphTraversalTests.cs b/Core/Tests/ObjectTraversal/GraphTraversalTests.cs index 791c786231..5c0ca211ce 100644 --- a/Core/Tests/ObjectTraversal/GraphTraversalTests.cs +++ b/Core/Tests/ObjectTraversal/GraphTraversalTests.cs @@ -1,4 +1,4 @@ -using System.Collections; +using System.Collections; using NUnit.Framework; using Speckle.Core.Models; using Speckle.Core.Models.GraphTraversal; @@ -8,8 +8,10 @@ namespace TestsUnit.ObjectTraversal; [TestFixture, TestOf(typeof(GraphTraversal))] public class GraphTraversalTests { - - private static IEnumerable Traverse(Base testCase, params ITraversalRule[] rules) + private static IEnumerable Traverse( + Base testCase, + params ITraversalRule[] rules + ) { var sut = new GraphTraversal(rules); return sut.Traverse(testCase); @@ -18,22 +20,26 @@ private static IEnumerable Traverse(Base testCase, params ITra [Test] public void Traverse_TraversesListMembers() { - var traverseListsRule = TraversalRule.NewTraversalRule() + var traverseListsRule = TraversalRule + .NewTraversalRule() .When(_ => true) - .ContinueTraversing(x => x.GetMembers(DynamicBaseMemberType.All) - .Where(p => p.Value is IList) - .Select(kvp => kvp.Key) + .ContinueTraversing( + x => + x.GetMembers(DynamicBaseMemberType.All) + .Where(p => p.Value is IList) + .Select(kvp => kvp.Key) ); var expectTraverse = new Base() { id = "List Member" }; var expectIgnored = new Base() { id = "Not List Member" }; - TraversalMock testCase = new TraversalMock() - { - ListChildren = new List() { expectTraverse }, - DictChildren = new Dictionary() { ["myprop"] = expectIgnored }, - Child = expectIgnored, - }; + TraversalMock testCase = + new() + { + ListChildren = new List() { expectTraverse }, + DictChildren = new Dictionary() { ["myprop"] = expectIgnored }, + Child = expectIgnored + }; var ret = Traverse(testCase, traverseListsRule).Select(b => b.current).ToList(); @@ -49,22 +55,26 @@ public void Traverse_TraversesListMembers() [Test] public void Traverse_TraversesDictMembers() { - var traverseListsRule = TraversalRule.NewTraversalRule() + var traverseListsRule = TraversalRule + .NewTraversalRule() .When(_ => true) - .ContinueTraversing(x => x.GetMembers(DynamicBaseMemberType.All) - .Where(p => p.Value is IDictionary) - .Select(kvp => kvp.Key) + .ContinueTraversing( + x => + x.GetMembers(DynamicBaseMemberType.All) + .Where(p => p.Value is IDictionary) + .Select(kvp => kvp.Key) ); var expectTraverse = new Base() { id = "Dict Member" }; var expectIgnored = new Base() { id = "Not Dict Member" }; - TraversalMock testCase = new TraversalMock() - { - ListChildren = new List() { expectIgnored }, - DictChildren = new Dictionary() { ["myprop"] = expectTraverse }, - Child = expectIgnored, - }; + TraversalMock testCase = + new() + { + ListChildren = new List() { expectIgnored }, + DictChildren = new Dictionary() { ["myprop"] = expectTraverse }, + Child = expectIgnored + }; var ret = Traverse(testCase, traverseListsRule).Select(b => b.current).ToList(); @@ -80,21 +90,21 @@ public void Traverse_TraversesDictMembers() [Test] public void Traverse_TraversesDynamic() { - var traverseListsRule = TraversalRule.NewTraversalRule() + var traverseListsRule = TraversalRule + .NewTraversalRule() .When(_ => true) - .ContinueTraversing(x => x.GetMembers(DynamicBaseMemberType.Dynamic) - .Select(kvp => kvp.Key) - ); + .ContinueTraversing(x => x.GetMembers(DynamicBaseMemberType.Dynamic).Select(kvp => kvp.Key)); var expectTraverse = new Base() { id = "List Member" }; var expectIgnored = new Base() { id = "Not List Member" }; - TraversalMock testCase = new TraversalMock() - { - Child = expectIgnored, - ["dynamicChild"] = expectTraverse, - ["dynamicListChild"] = new List { expectTraverse } - }; + TraversalMock testCase = + new() + { + Child = expectIgnored, + ["dynamicChild"] = expectTraverse, + ["dynamicListChild"] = new List { expectTraverse } + }; var ret = Traverse(testCase, traverseListsRule).Select(b => b.current).ToList(); @@ -107,26 +117,24 @@ public void Traverse_TraversesDynamic() Assert.That(ret, Has.Count.EqualTo(3)); } - [Test] public void Traverse_ExclusiveRule() { var expectTraverse = new Base() { id = "List Member" }; var expectIgnored = new Base() { id = "Not List Member" }; - var traverseListsRule = TraversalRule.NewTraversalRule() + var traverseListsRule = TraversalRule + .NewTraversalRule() .When(_ => true) - .ContinueTraversing(x => x.GetMembers(DynamicBaseMemberType.Dynamic) - .Select(kvp => kvp.Key) - ); - - - TraversalMock testCase = new TraversalMock() - { - Child = expectIgnored, - ["dynamicChild"] = expectTraverse, - ["dynamicListChild"] = new List { expectTraverse } - }; + .ContinueTraversing(x => x.GetMembers(DynamicBaseMemberType.Dynamic).Select(kvp => kvp.Key)); + + TraversalMock testCase = + new() + { + Child = expectIgnored, + ["dynamicChild"] = expectTraverse, + ["dynamicListChild"] = new List { expectTraverse } + }; var ret = Traverse(testCase, traverseListsRule).Select(b => b.current).ToList(); diff --git a/Core/Tests/ObjectTraversal/TraversalMockObjects.cs b/Core/Tests/ObjectTraversal/TraversalMockObjects.cs index 2c97291568..5ee6f62e94 100644 --- a/Core/Tests/ObjectTraversal/TraversalMockObjects.cs +++ b/Core/Tests/ObjectTraversal/TraversalMockObjects.cs @@ -1,4 +1,4 @@ -using Speckle.Core.Models; +using Speckle.Core.Models; namespace TestsUnit.ObjectTraversal; @@ -7,10 +7,10 @@ public class TraversalMock : Base public Base? Child { get; set; } public object ObjectChild { get; set; } - - public List ListChildren { get; set; } = new (); - - public List> NestedListChildren { get; set; } = new (); - - public Dictionary DictChildren { get; set; } = new (); + + public List ListChildren { get; set; } = new(); + + public List> NestedListChildren { get; set; } = new(); + + public Dictionary DictChildren { get; set; } = new(); } diff --git a/Core/Tests/ObjectTraversal/TraversalRuleTests.cs b/Core/Tests/ObjectTraversal/TraversalRuleTests.cs index 4b1748654b..fca14f7801 100644 --- a/Core/Tests/ObjectTraversal/TraversalRuleTests.cs +++ b/Core/Tests/ObjectTraversal/TraversalRuleTests.cs @@ -1,10 +1,7 @@ -using NUnit.Framework; +using NUnit.Framework; using Speckle.Core.Models.GraphTraversal; namespace TestsUnit.ObjectTraversal; [TestFixture, TestOf(typeof(TraversalRule))] -public class TraversalTests -{ - -} +public class TraversalTests { } diff --git a/Core/Tests/Path.cs b/Core/Tests/Path.cs index dc0d7e872c..841e353ddd 100644 --- a/Core/Tests/Path.cs +++ b/Core/Tests/Path.cs @@ -1,100 +1,88 @@ -using System.Runtime.InteropServices; +using System.Runtime.InteropServices; using System.Text.RegularExpressions; using NUnit.Framework; using Speckle.Core.Helpers; -namespace Tests +namespace Tests; + +[TestFixture] +public class SpecklePaths { - [TestFixture] - public class SpecklePaths + [Test] + public void TestUserApplicationDataPath() { - [Test] - public void TestUserApplicationDataPath() - { - var userPath = SpecklePathProvider.UserApplicationDataPath(); - string pattern; - - if (String.IsNullOrEmpty(Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData))) - { - pattern = @"\/root"; - } - if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) - { - pattern = @"C:\\Users\\.*\\AppData\\Roaming"; - } - else if (RuntimeInformation.IsOSPlatform(OSPlatform.OSX)) - { - pattern = @"\/Users\/.*\/\.config"; - } - else if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux)) - { - // if running under root user, the .config folder is in another location... - if (userPath.StartsWith("/root")) - { - pattern = @"\/root/\.config"; - } - else - { - pattern = @"\/home/.*/\.config"; + var userPath = SpecklePathProvider.UserApplicationDataPath(); + string pattern; - } - } + if (string.IsNullOrEmpty(Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData))) + pattern = @"\/root"; + if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) + { + pattern = @"C:\\Users\\.*\\AppData\\Roaming"; + } + else if (RuntimeInformation.IsOSPlatform(OSPlatform.OSX)) + { + pattern = @"\/Users\/.*\/\.config"; + } + else if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux)) + { + // if running under root user, the .config folder is in another location... + if (userPath.StartsWith("/root")) + pattern = @"\/root/\.config"; else - { - throw new NotImplementedException("Your OS platform is not supported"); - } - var regex = new Regex(pattern); - var match = regex.Match(userPath); - Assert.True(match.Success, $"{userPath} did not match on {pattern}"); + pattern = @"\/home/.*/\.config"; } - - [Test] - public void TestUserApplicationDataPathOverride() + else { - var newPath = Path.GetTempPath(); - SpecklePathProvider.OverrideApplicationDataPath(newPath); - Assert.That(SpecklePathProvider.UserApplicationDataPath(), Is.EqualTo(newPath)); - SpecklePathProvider.OverrideApplicationDataPath(null); + throw new NotImplementedException("Your OS platform is not supported"); } + var regex = new Regex(pattern); + var match = regex.Match(userPath); + Assert.True(match.Success, $"{userPath} did not match on {pattern}"); + } - [Test] - public void TestInstallApplicationDataPath() - { - var installPath = SpecklePathProvider.InstallApplicationDataPath; - string pattern; + [Test] + public void TestUserApplicationDataPathOverride() + { + var newPath = Path.GetTempPath(); + SpecklePathProvider.OverrideApplicationDataPath(newPath); + Assert.That(SpecklePathProvider.UserApplicationDataPath(), Is.EqualTo(newPath)); + SpecklePathProvider.OverrideApplicationDataPath(null); + } + + [Test] + public void TestInstallApplicationDataPath() + { + var installPath = SpecklePathProvider.InstallApplicationDataPath; + string pattern; - if (String.IsNullOrEmpty(Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData))) - { - pattern = @"\/root"; - } - else if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) - { - // this will prob fail on windows - pattern = @"C:\\Users\\.*\\AppData\\Roaming"; - } - else if (RuntimeInformation.IsOSPlatform(OSPlatform.OSX)) - { - pattern = @"\/Users\/.*\/\.config"; - } - else if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux)) - { - // if running under root user, the .config folder is in another location... - if (installPath.StartsWith("/root")) - { - pattern = @"\/root/\.config"; - } - else - { - pattern = @"\/home/.*/\.config"; - } - } + if (string.IsNullOrEmpty(Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData))) + { + pattern = @"\/root"; + } + else if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) + { + // this will prob fail on windows + pattern = @"C:\\Users\\.*\\AppData\\Roaming"; + } + else if (RuntimeInformation.IsOSPlatform(OSPlatform.OSX)) + { + pattern = @"\/Users\/.*\/\.config"; + } + else if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux)) + { + // if running under root user, the .config folder is in another location... + if (installPath.StartsWith("/root")) + pattern = @"\/root/\.config"; else - { - throw new NotImplementedException("Your OS platform is not supported"); - } - var regex = new Regex(pattern); - var match = regex.Match(installPath); - Assert.True(match.Success, $"{installPath} did not match on {pattern}"); + pattern = @"\/home/.*/\.config"; + } + else + { + throw new NotImplementedException("Your OS platform is not supported"); } + var regex = new Regex(pattern); + var match = regex.Match(installPath); + Assert.True(match.Success, $"{installPath} did not match on {pattern}"); } } diff --git a/Core/Tests/SendReceiveLocal.cs b/Core/Tests/SendReceiveLocal.cs index 368811b5b8..1bff0e5f15 100644 --- a/Core/Tests/SendReceiveLocal.cs +++ b/Core/Tests/SendReceiveLocal.cs @@ -1,269 +1,292 @@ -using System; using System.Collections.Concurrent; -using System.Collections.Generic; -using System.Linq; -using System.Threading.Tasks; using NUnit.Framework; using Speckle.Core.Api; using Speckle.Core.Models; using Speckle.Core.Transports; -namespace Tests +namespace Tests; + +[TestFixture] +public class SendReceiveLocal { - [TestFixture] - public class SendReceiveLocal + private string objId_01, + commitId_02; + + private int numObjects = 3001; + + [Test(Description = "Pushing a commit locally"), Order(1)] + public void LocalUpload() { - string objId_01, commitId_02; - int numObjects = 3001; + var myObject = new Base(); + var rand = new Random(); + myObject["@items"] = new List(); - [Test(Description = "Pushing a commit locally"), Order(1)] - public void LocalUpload() - { - var myObject = new Base(); - var rand = new Random(); + for (int i = 0; i < numObjects; i++) + ((List)myObject["@items"]).Add( + new Point(i, i, i + rand.NextDouble()) { applicationId = i + "-___/---" } + ); - myObject["@items"] = new List(); + objId_01 = Operations.Send(myObject).Result; - for (int i = 0; i < numObjects; i++) - { - ((List)myObject["@items"]).Add(new Point(i, i, i + rand.NextDouble()) { applicationId = i + "-___/---" }); - } + Assert.NotNull(objId_01); + TestContext.Out.WriteLine($"Written {numObjects + 1} objects. Commit id is {objId_01}"); + } - objId_01 = Operations.Send(myObject).Result; + [Test(Description = "Pulling a commit locally"), Order(2)] + public void LocalDownload() + { + var commitPulled = Operations.Receive(objId_01).Result; - Assert.NotNull(objId_01); - TestContext.Out.WriteLine($"Written {numObjects + 1} objects. Commit id is {objId_01}"); + Assert.That(typeof(Point), Is.EqualTo(((List)commitPulled["@items"])[0].GetType())); + Assert.That(numObjects, Is.EqualTo(((List)commitPulled["@items"]).Count)); + } - } + [Test(Description = "Pushing and Pulling a commit locally")] + public void LocalUploadDownload() + { + var myObject = new Base(); + myObject["@items"] = new List(); - [Test(Description = "Pulling a commit locally"), Order(2)] - public void LocalDownload() - { - var commitPulled = Operations.Receive(objId_01).Result; + var rand = new Random(); - Assert.That(typeof(Point), Is.EqualTo(((List)commitPulled["@items"])[0].GetType())); - Assert.That(numObjects, Is.EqualTo(((List)commitPulled["@items"]).Count)); - } + for (int i = 0; i < numObjects; i++) + ((List)myObject["@items"]).Add( + new Point(i, i, i + rand.NextDouble()) { applicationId = i + "-___/---" } + ); - [Test(Description = "Pushing and Pulling a commit locally")] - public void LocalUploadDownload() - { - var myObject = new Base(); - myObject["@items"] = new List(); + objId_01 = Operations.Send(myObject).Result; - var rand = new Random(); + var commitPulled = Operations.Receive(objId_01).Result; - for (int i = 0; i < numObjects; i++) - { - ((List)myObject["@items"]).Add(new Point(i, i, i + rand.NextDouble()) { applicationId = i + "-___/---" }); - } + Assert.That(typeof(Point), Is.EqualTo(((List)commitPulled["@items"])[0].GetType())); + Assert.That(numObjects, Is.EqualTo(((List)commitPulled["@items"]).Count)); + } - objId_01 = Operations.Send(myObject).Result; + [Test(Description = "Pushing and pulling a commit locally"), Order(3)] + public async Task LocalUploadDownloadSmall() + { + var myObject = new Base(); + myObject["@items"] = new List(); - var commitPulled = Operations.Receive(objId_01).Result; + var rand = new Random(); - Assert.That(typeof(Point), Is.EqualTo(((List)commitPulled["@items"])[0].GetType())); - Assert.That(numObjects, Is.EqualTo(((List)commitPulled["@items"]).Count)); - } + for (int i = 0; i < 30; i++) + ((List)myObject["@items"]).Add( + new Point(i, i, i + rand.NextDouble()) { applicationId = i + "-ugh/---" } + ); - [Test(Description = "Pushing and pulling a commit locally"), Order(3)] - public async Task LocalUploadDownloadSmall() - { - var myObject = new Base(); - myObject["@items"] = new List(); + objId_01 = await Operations.Send(myObject).ConfigureAwait(false); - var rand = new Random(); + Assert.NotNull(objId_01); + TestContext.Out.WriteLine($"Written {numObjects + 1} objects. Commit id is {objId_01}"); - for (int i = 0; i < 30; i++) - { - ((List)myObject["@items"]).Add(new Point(i, i, i + rand.NextDouble()) { applicationId = i + "-ugh/---" }); - } + var objsPulled = await Operations.Receive(objId_01).ConfigureAwait(false); + Assert.That(((List)objsPulled["@items"]).Count, Is.EqualTo(30)); + } - objId_01 = await Operations.Send(myObject); + [Test(Description = "Pushing and pulling a commit locally"), Order(3)] + public async Task LocalUploadDownloadListDic() + { + var myList = new List { 1, 2, 3, "ciao" }; + var myDic = new Dictionary + { + { "a", myList }, + { "b", 2 }, + { "c", "ciao" } + }; - Assert.NotNull(objId_01); - TestContext.Out.WriteLine($"Written {numObjects + 1} objects. Commit id is {objId_01}"); + var myObject = new Base(); + myObject["@dictionary"] = myDic; + myObject["@list"] = myList; - var objsPulled = await Operations.Receive(objId_01); - Assert.That(((List)objsPulled["@items"]).Count, Is.EqualTo(30)); - } + objId_01 = await Operations.Send(myObject).ConfigureAwait(false); - [Test(Description = "Pushing and pulling a commit locally"), Order(3)] - public async Task LocalUploadDownloadListDic() - { + Assert.NotNull(objId_01); - var myList = new List { 1, 2, 3, "ciao" }; - var myDic = new Dictionary { { "a", myList }, { "b", 2 }, { "c", "ciao" } }; + var objsPulled = await Operations.Receive(objId_01).ConfigureAwait(false); + Assert.That( + ((List)((Dictionary)objsPulled["@dictionary"])["a"]).First(), + Is.EqualTo(1) + ); + Assert.That(((List)objsPulled["@list"]).Last(), Is.EqualTo("ciao")); + } - var myObject = new Base(); - myObject["@dictionary"] = myDic; - myObject["@list"] = myList; + [Test(Description = "Pushing and pulling a random object, with our without detachment"), Order(3)] + public async Task UploadDownloadNonCommitObject() + { + var obj = new Base(); + // Here we are creating a "non-standard" object to act as a base for our multiple objects. + ((dynamic)obj).LayerA = new List(); // Layer a and b will be stored "in" the parent object, + ((dynamic)obj).LayerB = new List(); + ((dynamic)obj)["@LayerC"] = new List(); // whereas this "layer" will be stored as references only. + ((dynamic)obj)["@LayerD"] = new Point[] { new(), new(12, 3, 4) }; + var rand = new Random(); + + for (int i = 0; i < 30; i++) + ((List)((dynamic)obj).LayerA).Add( + new Point(i, i, i + rand.NextDouble()) { applicationId = i + "foo" } + ); + + for (int i = 0; i < 30; i++) + ((List)((dynamic)obj).LayerB).Add( + new Point(i, i, i + rand.NextDouble()) { applicationId = i + "bar" } + ); + + for (int i = 0; i < 30; i++) + ((List)((dynamic)obj)["@LayerC"]).Add( + new Point(i, i, i + rand.NextDouble()) { applicationId = i + "baz" } + ); + + objId_01 = await Operations.Send(obj).ConfigureAwait(false); + + Assert.NotNull(objId_01); + TestContext.Out.WriteLine($"Written {numObjects + 1} objects. Commit id is {objId_01}"); + + var objPulled = await Operations.Receive(objId_01).ConfigureAwait(false); + + Assert.That(typeof(Base), Is.EqualTo(objPulled.GetType())); + + // Note: even if the layers were originally declared as lists of "Base" objects, on deserialisation we cannot know that, + // as it's a dynamic property. Dynamic properties, if their content value is ambigous, will default to a common-sense standard. + // This specifically manifests in the case of lists and dictionaries: List will become List, and + // Dictionary will deserialize to Dictionary. + var layerA = ((dynamic)objPulled)["LayerA"] as List; + Assert.That(layerA.Count, Is.EqualTo(30)); + + var layerC = ((dynamic)objPulled)["@LayerC"] as List; + Assert.That(layerC.Count, Is.EqualTo(30)); + Assert.That(typeof(Point), Is.EqualTo(layerC[0].GetType())); + + var layerD = ((dynamic)objPulled)["@LayerD"] as List; + Assert.That(layerD.Count, Is.EqualTo(2)); + } - objId_01 = await Operations.Send(myObject); + [Test(Description = "Should show progress!"), Order(4)] + public async Task UploadProgressReports() + { + var myObject = new Base(); + myObject["items"] = new List(); + var rand = new Random(); + + for (int i = 0; i < 30; i++) + ((List)myObject["items"]).Add( + new Point(i, i, i + rand.NextDouble()) { applicationId = i + "-fab/---" } + ); + + ConcurrentDictionary progress = null; + commitId_02 = await Operations + .Send( + myObject, + onProgressAction: (dict) => + { + progress = dict; + } + ) + .ConfigureAwait(false); + + Assert.NotNull(progress); + Assert.GreaterOrEqual(progress.Keys.Count, 1); + } - Assert.NotNull(objId_01); + [Test(Description = "Should show progress!"), Order(5)] + public async Task DownloadProgressReports() + { + ConcurrentDictionary progress = null; + var pulledCommit = await Operations + .Receive( + commitId_02, + onProgressAction: (dict) => + { + progress = dict; + } + ) + .ConfigureAwait(false); + Assert.NotNull(progress); + Assert.GreaterOrEqual(progress.Keys.Count, 1); + } - var objsPulled = await Operations.Receive(objId_01); - Assert.That(((List)((Dictionary)objsPulled["@dictionary"])["a"]).First(), Is.EqualTo(1)); - Assert.That(((List)objsPulled["@list"]).Last(), Is.EqualTo("ciao")); - } + [Test( + Description = "Should dispose of transports after a send or receive operation if so specified." + )] + public async Task ShouldDisposeTransports() + { + var @base = new Base(); + @base["test"] = "the best"; - [Test(Description = "Pushing and pulling a random object, with our without detachment"), Order(3)] - public async Task UploadDownloadNonCommitObject() - { - var obj = new Base(); - // Here we are creating a "non-standard" object to act as a base for our multiple objects. - ((dynamic)obj).LayerA = new List(); // Layer a and b will be stored "in" the parent object, - ((dynamic)obj).LayerB = new List(); - ((dynamic)obj)["@LayerC"] = new List(); // whereas this "layer" will be stored as references only. - ((dynamic)obj)["@LayerD"] = new Point[] { new Point(), new Point(12, 3, 4) }; - var rand = new Random(); - - for (int i = 0; i < 30; i++) - { - ((List)((dynamic)obj).LayerA).Add(new Point(i, i, i + rand.NextDouble()) { applicationId = i + "foo" }); - } - - for (int i = 0; i < 30; i++) - { - ((List)((dynamic)obj).LayerB).Add(new Point(i, i, i + rand.NextDouble()) { applicationId = i + "bar" }); - } - - for (int i = 0; i < 30; i++) - { - ((List)((dynamic)obj)["@LayerC"]).Add(new Point(i, i, i + rand.NextDouble()) { applicationId = i + "baz" }); - } - - objId_01 = await Operations.Send(obj); - - Assert.NotNull(objId_01); - TestContext.Out.WriteLine($"Written {numObjects + 1} objects. Commit id is {objId_01}"); - - var objPulled = await Operations.Receive(objId_01); - - Assert.That(typeof(Base), Is.EqualTo(objPulled.GetType())); - - // Note: even if the layers were originally declared as lists of "Base" objects, on deserialisation we cannot know that, - // as it's a dynamic property. Dynamic properties, if their content value is ambigous, will default to a common-sense standard. - // This specifically manifests in the case of lists and dictionaries: List will become List, and - // Dictionary will deserialize to Dictionary. - var layerA = ((dynamic)objPulled)["LayerA"] as List; - Assert.That(layerA.Count, Is.EqualTo(30)); - - var layerC = ((dynamic)objPulled)["@LayerC"] as List; - Assert.That(layerC.Count, Is.EqualTo(30)); - Assert.That(typeof(Point), Is.EqualTo(layerC[0].GetType())); - - var layerD = ((dynamic)objPulled)["@LayerD"] as List; - Assert.That(layerD.Count, Is.EqualTo(2)); - } + var myLocalTransport = new SQLiteTransport(); + var id = await Operations + .Send(@base, new List() { myLocalTransport }, false, disposeTransports: true) + .ConfigureAwait(false); - [Test(Description = "Should show progress!"), Order(4)] - public async Task UploadProgressReports() + // Send + try { - var myObject = new Base(); - myObject["items"] = new List(); - var rand = new Random(); - - for (int i = 0; i < 30; i++) - { - ((List)myObject["items"]).Add(new Point(i, i, i + rand.NextDouble()) { applicationId = i + "-fab/---" }); - } - - ConcurrentDictionary progress = null; - commitId_02 = await Operations.Send(myObject, onProgressAction: (dict) => - { - progress = dict; - }); - - Assert.NotNull(progress); - Assert.GreaterOrEqual(progress.Keys.Count, 1); + await Operations + .Send(@base, new List() { myLocalTransport }, false, disposeTransports: true) + .ConfigureAwait(false); + Assert.Fail("Send operation did not dispose of transport."); } - - [Test(Description = "Should show progress!"), Order(5)] - public async Task DownloadProgressReports() + catch (Exception) { - ConcurrentDictionary progress = null; - var pulledCommit = await Operations.Receive(commitId_02, onProgressAction: (dict) => - { - progress = dict; - }); - Assert.NotNull(progress); - Assert.GreaterOrEqual(progress.Keys.Count, 1); + // Pass } - [Test(Description = "Should dispose of transports after a send or receive operation if so specified.")] - public async Task ShouldDisposeTransports() + myLocalTransport = myLocalTransport.Clone() as SQLiteTransport; + var obj = await Operations + .Receive(id, null, myLocalTransport, disposeTransports: true) + .ConfigureAwait(false); + + try { - var @base = new Base(); - @base["test"] = "the best"; - - var myLocalTransport = new SQLiteTransport(); - var id = await Operations.Send(@base, new List() { myLocalTransport }, false, disposeTransports: true); - - // Send - try - { - await Operations.Send(@base, new List() { myLocalTransport }, false, disposeTransports: true); - Assert.Fail("Send operation did not dispose of transport."); - } - catch (Exception) - { - // Pass - } - - myLocalTransport = myLocalTransport.Clone() as SQLiteTransport; - var obj = await Operations.Receive(id, null, myLocalTransport, disposeTransports: true); - - try - { - await Operations.Receive(id, null, myLocalTransport); - Assert.Fail("Receive operation did not dispose of transport."); - } - catch - { - // Pass - } + await Operations.Receive(id, null, myLocalTransport).ConfigureAwait(false); + Assert.Fail("Receive operation did not dispose of transport."); } - - [Test(Description = "Should not dispose of transports if so specified.")] - public async Task ShouldNotDisposeTransports() + catch { - var @base = new Base(); - @base["test"] = "the best"; - - var myLocalTransport = new SQLiteTransport(); - var id = await Operations.Send(@base, new List() { myLocalTransport }, false); - await Operations.Send(@base, new List() { myLocalTransport }, false); - - var obj = await Operations.Receive(id, null, myLocalTransport); - await Operations.Receive(id, null, myLocalTransport); + // Pass } + } - //[Test] - //public async Task DiskTransportTest() - //{ - // var myObject = new Base(); - // myObject["@items"] = new List(); - // myObject["test"] = "random"; + [Test(Description = "Should not dispose of transports if so specified.")] + public async Task ShouldNotDisposeTransports() + { + var @base = new Base(); + @base["test"] = "the best"; + + var myLocalTransport = new SQLiteTransport(); + var id = await Operations + .Send(@base, new List() { myLocalTransport }, false) + .ConfigureAwait(false); + await Operations + .Send(@base, new List() { myLocalTransport }, false) + .ConfigureAwait(false); + + var obj = await Operations.Receive(id, null, myLocalTransport).ConfigureAwait(false); + await Operations.Receive(id, null, myLocalTransport).ConfigureAwait(false); + } - // var rand = new Random(); + //[Test] + //public async Task DiskTransportTest() + //{ + // var myObject = new Base(); + // myObject["@items"] = new List(); + // myObject["test"] = "random"; - // for (int i = 0; i < 100; i++) - // { - // ((List)myObject["@items"]).Add(new Point(i, i, i) { applicationId = i + "-___/---" }); - // } + // var rand = new Random(); - // var dt = new DiskTransport.DiskTransport(); - // var id = await Operations.Send(myObject, new List() { dt }, false); + // for (int i = 0; i < 100; i++) + // { + // ((List)myObject["@items"]).Add(new Point(i, i, i) { applicationId = i + "-___/---" }); + // } - // Assert.IsNotNull(id); + // var dt = new DiskTransport.DiskTransport(); + // var id = await Operations.Send(myObject, new List() { dt }, false); - // var rebase = await Operations.Receive(id, dt); + // Assert.IsNotNull(id); - // Assert.AreEqual(rebase.GetId(true), id); - //} + // var rebase = await Operations.Receive(id, dt); - } + // Assert.AreEqual(rebase.GetId(true), id); + //} } diff --git a/Core/Tests/SerializationTests.cs b/Core/Tests/SerializationTests.cs index 22ad5affb3..d99effcdec 100644 --- a/Core/Tests/SerializationTests.cs +++ b/Core/Tests/SerializationTests.cs @@ -1,287 +1,295 @@ -using System.Drawing; +using System.Drawing; using NUnit.Framework; using Speckle.Core.Api; using Speckle.Core.Models; -namespace Tests +namespace Tests; + +[TestFixture] +public class Serialization { - [TestFixture] - public class Serialization + [Test] + public void SimpleSerialization() { + var table = new DiningTable(); + ((dynamic)table)["@strangeVariable_NAme3"] = new TableLegFixture(); - [Test] - public void SimpleSerialization() - { - var table = new DiningTable(); - ((dynamic)table)["@strangeVariable_NAme3"] = new TableLegFixture(); + var result = Operations.Serialize(table); + var test = Operations.Deserialize(result); - var result = Operations.Serialize(table); - var test = Operations.Deserialize(result); + Assert.That(table.GetId(), Is.EqualTo(test.GetId())); - Assert.That(table.GetId(), Is.EqualTo(test.GetId())); + var polyline = new Polyline(); + for (int i = 0; i < 100; i++) + polyline.Points.Add(new Point() { X = i * 2, Y = i % 2 }); - var polyline = new Polyline(); - for (int i = 0; i < 100; i++) - { - polyline.Points.Add(new Point() { X = i * 2, Y = i % 2 }); - } + var strPoly = Operations.Serialize(polyline); + var dePoly = Operations.Deserialize(strPoly); - var strPoly = Operations.Serialize(polyline); - var dePoly = Operations.Deserialize(strPoly); + Assert.That(dePoly.GetId(), Is.EqualTo(polyline.GetId())); + } - Assert.That(dePoly.GetId(), Is.EqualTo(polyline.GetId())); - } + [Test] + public void DictionarySerialisation() + { + // TODO + var dict = new Dictionary(); + for (int i = 0; i < 10; i++) + dict[$"key{i}"] = new Point(i, i, i); - [Test] - public void DictionarySerialisation() - { - // TODO - var dict = new Dictionary(); - for (int i = 0; i < 10; i++) - { - dict[$"key{i}"] = new Point(i, i, i); - } + var result = Operations.Serialize(dict); + var test = Operations.DeserializeDictionary(result); - var result = Operations.Serialize(dict); - var test = Operations.DeserializeDictionary(result); + Assert.That(dict.Keys, Is.EqualTo(test.Keys)); + } - Assert.That(dict.Keys, Is.EqualTo(test.Keys)); - } + [Test] + public void IgnoreCircularReferences() + { + var pt = new Point(1, 2, 3); + pt["circle"] = pt; - [Test] - public void IgnoreCircularReferences() - { - var pt = new Point(1, 2, 3); - pt["circle"] = pt; + var test = Operations.Serialize(pt); - var test = Operations.Serialize(pt); + var result = Operations.Deserialize(test); + var circle = result["circle"]; + Assert.Null(circle); + } - var result = Operations.Deserialize(test); - var circle = result["circle"]; - Assert.Null(circle); - } + [Test] + public void InterfacePropHandling() + { + var cat = new PolygonalFeline(); - [Test] - public void InterfacePropHandling() - { - var cat = new PolygonalFeline(); + cat.Tail = new Line() { Start = new Point(0, 0, 0), End = new Point(42, 42, 42) }; - cat.Tail = new Line() + for (int i = 0; i < 10; i++) + { + cat.Claws[$"Claw number {i}"] = new Line { - Start = new Point(0, 0, 0), - End = new Point(42, 42, 42) + Start = new Point(i, i, i), + End = new Point(i + 3.14, i + 3.14, i + 3.14) }; - for (int i = 0; i < 10; i++) + if (i % 2 == 0) + { + cat.Whiskers.Add( + new Line + { + Start = new Point(i / 2, i / 2, i / 2), + End = new Point(i + 3.14, i + 3.14, i + 3.14) + } + ); + } + else { - cat.Claws[$"Claw number {i}"] = new Line { Start = new Point(i, i, i), End = new Point(i + 3.14, i + 3.14, i + 3.14) }; - - if (i % 2 == 0) - { - cat.Whiskers.Add(new Line { Start = new Point(i / 2, i / 2, i / 2), End = new Point(i + 3.14, i + 3.14, i + 3.14) }); - } - else - { - var brokenWhisker = new Polyline(); - brokenWhisker.Points.Add(new Point(-i, 0, 0)); - brokenWhisker.Points.Add(new Point(0, 0, 0)); - brokenWhisker.Points.Add(new Point(i, 0, 0)); - cat.Whiskers.Add(brokenWhisker); - } - - cat.Fur[i] = new Line { Start = new Point(i, i, i), End = new Point(i + 3.14, i + 3.14, i + 3.14) }; + var brokenWhisker = new Polyline(); + brokenWhisker.Points.Add(new Point(-i, 0, 0)); + brokenWhisker.Points.Add(new Point(0, 0, 0)); + brokenWhisker.Points.Add(new Point(i, 0, 0)); + cat.Whiskers.Add(brokenWhisker); } - var result = Operations.Serialize(cat); + cat.Fur[i] = new Line + { + Start = new Point(i, i, i), + End = new Point(i + 3.14, i + 3.14, i + 3.14) + }; + } + + var result = Operations.Serialize(cat); - var deserialisedFeline = Operations.Deserialize(result); + var deserialisedFeline = Operations.Deserialize(result); - Assert.That(deserialisedFeline.GetId(), Is.EqualTo(cat.GetId())); // If we're getting the same hash... we're probably fine! - } + Assert.That(deserialisedFeline.GetId(), Is.EqualTo(cat.GetId())); // If we're getting the same hash... we're probably fine! + } - [Test] - public void InheritanceTests() + [Test] + public void InheritanceTests() + { + var superPoint = new SuperPoint() { - var superPoint = new SuperPoint() { X = 10, Y = 10, Z = 10, W = 42 }; + X = 10, + Y = 10, + Z = 10, + W = 42 + }; - var str = Operations.Serialize(superPoint); - var sstr = Operations.Deserialize(str); + var str = Operations.Serialize(superPoint); + var sstr = Operations.Deserialize(str); - Assert.That(sstr.speckle_type, Is.EqualTo(superPoint.speckle_type)); - } + Assert.That(sstr.speckle_type, Is.EqualTo(superPoint.speckle_type)); + } - [Test] - public void ListDynamicProp() - { - var point = new Point(); - var test = new List(); + [Test] + public void ListDynamicProp() + { + var point = new Point(); + var test = new List(); - for (var i = 0; i < 100; i++) - { - test.Add(new SuperPoint { W = i }); - } + for (var i = 0; i < 100; i++) + test.Add(new SuperPoint { W = i }); - point["test"] = test; + point["test"] = test; - var str = Operations.Serialize(point); - var dsrls = Operations.Deserialize(str); + var str = Operations.Serialize(point); + var dsrls = Operations.Deserialize(str); - var list = dsrls["test"] as List; // NOTE: on dynamically added lists, we cannot infer the inner type and we always fall back to a generic list. - Assert.That(list.Count, Is.EqualTo(100)); - } + var list = dsrls["test"] as List; // NOTE: on dynamically added lists, we cannot infer the inner type and we always fall back to a generic list. + Assert.That(list.Count, Is.EqualTo(100)); + } - [Test] - public void ChunkSerialisation() - { - var baseBasedChunk = new DataChunk(); - for (var i = 0; i < 200; i++) - { - baseBasedChunk.data.Add(new SuperPoint { W = i }); - } + [Test] + public void ChunkSerialisation() + { + var baseBasedChunk = new DataChunk(); + for (var i = 0; i < 200; i++) + baseBasedChunk.data.Add(new SuperPoint { W = i }); - var stringBasedChunk = new DataChunk(); - for (var i = 0; i < 200; i++) - { - stringBasedChunk.data.Add(i + "_hai"); - } + var stringBasedChunk = new DataChunk(); + for (var i = 0; i < 200; i++) + stringBasedChunk.data.Add(i + "_hai"); - var doubleBasedChunk = new DataChunk(); - for (var i = 0; i < 200; i++) - { - doubleBasedChunk.data.Add(i + 0.33); - } + var doubleBasedChunk = new DataChunk(); + for (var i = 0; i < 200; i++) + doubleBasedChunk.data.Add(i + 0.33); - var baseChunkString = Operations.Serialize(baseBasedChunk); - var stringChunkString = Operations.Serialize(stringBasedChunk); - var doubleChunkString = Operations.Serialize(doubleBasedChunk); + var baseChunkString = Operations.Serialize(baseBasedChunk); + var stringChunkString = Operations.Serialize(stringBasedChunk); + var doubleChunkString = Operations.Serialize(doubleBasedChunk); - var baseChunkDeserialised = (DataChunk)Operations.Deserialize(baseChunkString); - var stringChunkDeserialised = (DataChunk)Operations.Deserialize(stringChunkString); - var doubleChunkDeserialised = (DataChunk)Operations.Deserialize(doubleChunkString); + var baseChunkDeserialised = (DataChunk)Operations.Deserialize(baseChunkString); + var stringChunkDeserialised = (DataChunk)Operations.Deserialize(stringChunkString); + var doubleChunkDeserialised = (DataChunk)Operations.Deserialize(doubleChunkString); - Assert.That(baseChunkDeserialised.data.Count, Is.EqualTo(baseBasedChunk.data.Count)); - Assert.That(stringChunkDeserialised.data.Count, Is.EqualTo(stringBasedChunk.data.Count)); - Assert.That(doubleChunkDeserialised.data.Count, Is.EqualTo(doubleBasedChunk.data.Count)); - } + Assert.That(baseChunkDeserialised.data.Count, Is.EqualTo(baseBasedChunk.data.Count)); + Assert.That(stringChunkDeserialised.data.Count, Is.EqualTo(stringBasedChunk.data.Count)); + Assert.That(doubleChunkDeserialised.data.Count, Is.EqualTo(doubleBasedChunk.data.Count)); + } - [Test] - public void ObjectWithChunksSerialisation() + [Test] + public void ObjectWithChunksSerialisation() + { + int MAX_NUM = 2020; + var mesh = new FakeMesh(); + + mesh.ArrayOfDoubles = new double[MAX_NUM]; + mesh.ArrayOfLegs = new TableLeg[MAX_NUM]; + var customChunk = new List(); + var defaultChunk = new List(); + + for (int i = 0; i < MAX_NUM; i++) { - int MAX_NUM = 2020; - var mesh = new FakeMesh(); + mesh.Vertices.Add(i / 2); + customChunk.Add(i / 2); + defaultChunk.Add(i / 2); + mesh.Tables.Add(new Tabletop { length = 2000 }); + mesh.ArrayOfDoubles[i] = i * 3.3; + mesh.ArrayOfLegs[i] = new TableLeg { height = 2 + i }; + } - mesh.ArrayOfDoubles = new double[MAX_NUM]; - mesh.ArrayOfLegs = new TableLeg[MAX_NUM]; - var customChunk = new List(); - var defaultChunk = new List(); + mesh["@(800)CustomChunk"] = customChunk; + mesh["@()DefaultChunk"] = defaultChunk; - for (int i = 0; i < MAX_NUM; i++) - { - mesh.Vertices.Add(i / 2); - customChunk.Add(i / 2); - defaultChunk.Add(i / 2); - mesh.Tables.Add(new Tabletop { length = 2000 }); - mesh.ArrayOfDoubles[i] = i * 3.3; - mesh.ArrayOfLegs[i] = new TableLeg { height = 2 + i }; - } + var serialised = Operations.Serialize(mesh); + var deserialised = Operations.Deserialize(serialised); - mesh["@(800)CustomChunk"] = customChunk; - mesh["@()DefaultChunk"] = defaultChunk; + Assert.That(mesh.GetId(), Is.EqualTo(deserialised.GetId())); + } - var serialised = Operations.Serialize(mesh); - var deserialised = Operations.Deserialize(serialised); + [Test] + public void EmptyListSerialisationTests() + { + // NOTE: expected behaviour is that empty lists should serialize as empty lists. Don't ask why, it's complicated. + // Regarding chunkable empty lists, to prevent empty chunks, the expected behaviour is to have an empty lists, with no chunks inside. + var test = new Base(); - Assert.That(mesh.GetId(), Is.EqualTo(deserialised.GetId())); - } + test["@(5)emptyChunks"] = new List(); + test["emptyList"] = new List(); + test["@emptyDetachableList"] = new List(); - [Test] - public void EmptyListSerialisationTests() + // Note: nested empty lists should be preserved. + test["nestedList"] = new List() { new List() { new List() } }; + test["@nestedDetachableList"] = new List() { - // NOTE: expected behaviour is that empty lists should serialize as empty lists. Don't ask why, it's complicated. - // Regarding chunkable empty lists, to prevent empty chunks, the expected behaviour is to have an empty lists, with no chunks inside. - var test = new Base(); - - test["@(5)emptyChunks"] = new List(); - test["emptyList"] = new List(); - test["@emptyDetachableList"] = new List(); - - // Note: nested empty lists should be preserved. - test["nestedList"] = new List() { new List() { new List() } }; - test["@nestedDetachableList"] = new List() { new List() { new List() } }; - - var serialised = Operations.Serialize(test); - var isCorrect = - serialised.Contains("\"@(5)emptyChunks\":[]") && - serialised.Contains("\"emptyList\":[]") && - serialised.Contains("\"@emptyDetachableList\":[]") && - serialised.Contains("\"nestedList\":[[[]]]") && - serialised.Contains("\"@nestedDetachableList\":[[[]]]"); - - Assert.That(isCorrect, Is.EqualTo(true)); - } + new List() { new List() } + }; + + var serialised = Operations.Serialize(test); + var isCorrect = + serialised.Contains("\"@(5)emptyChunks\":[]") + && serialised.Contains("\"emptyList\":[]") + && serialised.Contains("\"@emptyDetachableList\":[]") + && serialised.Contains("\"nestedList\":[[[]]]") + && serialised.Contains("\"@nestedDetachableList\":[[[]]]"); + + Assert.That(isCorrect, Is.EqualTo(true)); + } + private class DateMock : Base + { + public DateTime TestField { get; set; } + } - private class DateMock : Base - { - public DateTime TestField { get; set; } - } - [Test] - public void DateSerialisation() - { - var date = new DateTime(2020, 1, 14); - var mockBase = new DateMock { TestField = date }; + [Test] + public void DateSerialisation() + { + var date = new DateTime(2020, 1, 14); + var mockBase = new DateMock { TestField = date }; - var result = Operations.Serialize(mockBase); - var test = (DateMock)Operations.Deserialize(result); + var result = Operations.Serialize(mockBase); + var test = (DateMock)Operations.Deserialize(result); - Assert.That(test.TestField, Is.EqualTo(date)); - } + Assert.That(test.TestField, Is.EqualTo(date)); + } - private class GUIDMock : Base - { - public Guid TestField { get; set; } - } - [Test] - public void GuidSerialisation() - { - var guid = Guid.NewGuid(); - var mockBase = new GUIDMock { TestField = guid }; + private class GUIDMock : Base + { + public Guid TestField { get; set; } + } - var result = Operations.Serialize(mockBase); - var test = (GUIDMock)Operations.Deserialize(result); + [Test] + public void GuidSerialisation() + { + var guid = Guid.NewGuid(); + var mockBase = new GUIDMock { TestField = guid }; - Assert.That(test.TestField, Is.EqualTo(guid)); - } + var result = Operations.Serialize(mockBase); + var test = (GUIDMock)Operations.Deserialize(result); - private class ColorMock : Base - { - public Color TestField { get; set; } - } - [Test] - public void ColorSerialisation() - { - var color = Color.FromArgb(255, 4, 126, 251); - var mockBase = new ColorMock { TestField = color }; + Assert.That(test.TestField, Is.EqualTo(guid)); + } - var result = Operations.Serialize(mockBase); - var test = (ColorMock)Operations.Deserialize(result); + private class ColorMock : Base + { + public Color TestField { get; set; } + } - Assert.That(test.TestField, Is.EqualTo(color)); - } + [Test] + public void ColorSerialisation() + { + var color = Color.FromArgb(255, 4, 126, 251); + var mockBase = new ColorMock { TestField = color }; - private class StringDateTimeRegressionMock : Base - { - public String TestField { get; set; } - } - [Test] - public void StringDateTimeRegression() - { - var mockBase = new StringDateTimeRegressionMock { TestField = "2021-11-12T11:32:01" }; + var result = Operations.Serialize(mockBase); + var test = (ColorMock)Operations.Deserialize(result); - var result = Operations.Serialize(mockBase); - var test = (StringDateTimeRegressionMock)Operations.Deserialize(result); + Assert.That(test.TestField, Is.EqualTo(color)); + } - Assert.That(test.TestField, Is.EqualTo(mockBase.TestField)); - } + private class StringDateTimeRegressionMock : Base + { + public string TestField { get; set; } + } + + [Test] + public void StringDateTimeRegression() + { + var mockBase = new StringDateTimeRegressionMock { TestField = "2021-11-12T11:32:01" }; + + var result = Operations.Serialize(mockBase); + var test = (StringDateTimeRegressionMock)Operations.Deserialize(result); + + Assert.That(test.TestField, Is.EqualTo(mockBase.TestField)); } } diff --git a/Core/Tests/SerializerNonBreakingChanges.cs b/Core/Tests/SerializerNonBreakingChanges.cs index f4dc30e525..de2a70de88 100644 --- a/Core/Tests/SerializerNonBreakingChanges.cs +++ b/Core/Tests/SerializerNonBreakingChanges.cs @@ -1,4 +1,4 @@ -using System.Drawing; +using System.Drawing; using NUnit.Framework; using Speckle.Core.Api; using Speckle.Core.Models; @@ -10,13 +10,15 @@ namespace Tests.Models; /// This doesn't guarantee things work this way for SpecklePy /// Nor does it encompass other tricks (like deserialize callback, or computed json ignored properties) /// -[TestFixture, Description("For certain types, changing property from one type to another should be implicitly backwards compatible")] +[ + TestFixture, + Description( + "For certain types, changing property from one type to another should be implicitly backwards compatible" + ) +] public class SerializerNonBreakingChanges : PrimitiveTestFixture { - - [Test] - [TestCaseSource(nameof(Int8TestCases))] - [TestCaseSource(nameof(Int32TestCases))] + [Test, TestCaseSource(nameof(Int8TestCases)), TestCaseSource(nameof(Int32TestCases))] public void IntToColor(int argb) { var from = new IntValueMock { value = argb }; @@ -24,10 +26,8 @@ public void IntToColor(int argb) var res = from.SerializeAsTAndDeserialize(); Assert.That(res.value.ToArgb(), Is.EqualTo(argb)); } - - [Test] - [TestCaseSource(nameof(Int8TestCases))] - [TestCaseSource(nameof(Int32TestCases))] + + [Test, TestCaseSource(nameof(Int8TestCases)), TestCaseSource(nameof(Int32TestCases))] public void ColorToInt(int argb) { var from = new ColorValueMock { value = Color.FromArgb(argb) }; @@ -36,10 +36,12 @@ public void ColorToInt(int argb) Assert.That(res.value, Is.EqualTo(argb)); } - [Test] - [TestCaseSource(nameof(Int8TestCases))] - [TestCaseSource(nameof(Int32TestCases))] - [TestCaseSource(nameof(Int64TestCases))] + [ + Test, + TestCaseSource(nameof(Int8TestCases)), + TestCaseSource(nameof(Int32TestCases)), + TestCaseSource(nameof(Int64TestCases)) + ] public void IntToDouble(long testCase) { var from = new IntValueMock { value = testCase }; @@ -47,11 +49,13 @@ public void IntToDouble(long testCase) var res = from.SerializeAsTAndDeserialize(); Assert.That(res.value, Is.EqualTo(testCase)); } - - [Test] - [TestCaseSource(nameof(Int8TestCases))] - [TestCaseSource(nameof(Int32TestCases))] - [TestCaseSource(nameof(Int64TestCases))] + + [ + Test, + TestCaseSource(nameof(Int8TestCases)), + TestCaseSource(nameof(Int32TestCases)), + TestCaseSource(nameof(Int64TestCases)) + ] public void IntToString(long testCase) { var from = new IntValueMock { value = testCase }; @@ -63,22 +67,20 @@ public void IntToString(long testCase) private static double[][] ArrayTestCases = { new double[] { }, - new double[] { 0, 1, int.MaxValue, int.MinValue, }, - new double[] { default, double.Epsilon, double.MaxValue, double.MinValue }, + new double[] { 0, 1, int.MaxValue, int.MinValue }, + new double[] { default, double.Epsilon, double.MaxValue, double.MinValue } }; - [Test] - [TestCaseSource(nameof(ArrayTestCases))] + [Test, TestCaseSource(nameof(ArrayTestCases))] public void ArrayToList(double[] testCase) { - var from = new ArrayDoubleValueMock { value = testCase }; + var from = new ArrayDoubleValueMock { value = testCase }; var res = from.SerializeAsTAndDeserialize(); Assert.That(res.value, Is.EquivalentTo(testCase)); } - - [Test] - [TestCaseSource(nameof(ArrayTestCases))] + + [Test, TestCaseSource(nameof(ArrayTestCases))] public void ListToArray(double[] testCase) { var from = new ListDoubleValueMock { value = testCase.ToList() }; @@ -86,25 +88,24 @@ public void ListToArray(double[] testCase) var res = from.SerializeAsTAndDeserialize(); Assert.That(res.value, Is.EquivalentTo(testCase)); } - + [Test, TestCaseSource(nameof(MyEnums))] public void EnumToInt(MyEnum testCase) { - var from = new EnumValueMock{ value = testCase }; + var from = new EnumValueMock { value = testCase }; var res = from.SerializeAsTAndDeserialize(); Assert.That(res.value, Is.EqualTo((int)testCase)); } - + [Test, TestCaseSource(nameof(MyEnums))] public void IntToEnum(MyEnum testCase) { - var from = new IntValueMock { value = (int)testCase}; + var from = new IntValueMock { value = (int)testCase }; var res = from.SerializeAsTAndDeserialize(); Assert.That(res.value, Is.EqualTo(testCase)); } - } /// @@ -112,30 +113,22 @@ public void IntToEnum(MyEnum testCase) /// This doesn't guarantee things work this way for SpecklePy /// Nor does it encompass other tricks (like deserialize callback, or computed json ignored properties) /// -[TestFixture, Description("For certain types, changing property from one type to another is a breaking change, and not backwards/forwards compatible")] +[ + TestFixture, + Description( + "For certain types, changing property from one type to another is a breaking change, and not backwards/forwards compatible" + ) +] public class SerializerBreakingChanges : PrimitiveTestFixture { - [Test, Description("Deserialization of a JTokenType.Float to a .NET short/int/long should throw exception")] - [TestCaseSource(nameof(Float64TestCases))] - [TestCase(1e+30)] - public void DoubleToInt_ShouldThrow(double testCase) - { - var from = new DoubleValueMock { value = testCase }; - Assert.Throws( - () => from.SerializeAsTAndDeserialize() - ); - } - [Test] public void StringToInt_ShouldThrow() { var from = new StringValueMock(); from.value = "testValue"; - Assert.Throws( - () => from.SerializeAsTAndDeserialize() - ); + Assert.Throws(() => from.SerializeAsTAndDeserialize()); } - + [Test, TestCaseSource(nameof(MyEnums))] public void StringToEnum_ShouldThrow(MyEnum testCase) { @@ -146,8 +139,20 @@ public void StringToEnum_ShouldThrow(MyEnum testCase) var res = from.SerializeAsTAndDeserialize(); }); } - + [ + Test, + Description( + "Deserialization of a JTokenType.Float to a .NET short/int/long should throw exception" + ), + TestCaseSource(nameof(Float64TestCases)), + TestCase(1e+30) + ] + public void DoubleToInt_ShouldThrow(double testCase) + { + var from = new DoubleValueMock { value = testCase }; + Assert.Throws(() => from.SerializeAsTAndDeserialize()); + } } public class TValueMock : SerializerMock @@ -192,35 +197,37 @@ public class EnumValueMock : SerializerMock public enum MyEnum { - Zero, - One, - Two, + Zero, + One, + Two, Three, Neg = -1, Min = int.MinValue, - Max = int.MaxValue, + Max = int.MaxValue } public abstract class SerializerMock : Base { public string _speckle_type; - public SerializerMock() + protected SerializerMock() { _speckle_type = base.speckle_type; } - public void SerializeAs() where T : Base, new() + public override string speckle_type => _speckle_type; + + public void SerializeAs() + where T : Base, new() { - T target = new T(); + T target = new(); _speckle_type = target.speckle_type; } - public override string speckle_type => _speckle_type; - - internal TTo SerializeAsTAndDeserialize() where TTo : Base, new() + internal TTo SerializeAsTAndDeserialize() + where TTo : Base, new() { - this.SerializeAs(); + SerializeAs(); var json = Operations.Serialize(this); @@ -231,17 +238,29 @@ public SerializerMock() } } -public class PrimitiveTestFixture +public abstract class PrimitiveTestFixture { - public static SByte[] Int8TestCases = { default, sbyte.MaxValue, sbyte.MinValue }; - public static Int16[] Int16TestCases = { short.MaxValue, short.MinValue }; - public static Int32[] Int32TestCases = { int.MinValue, int.MaxValue }; - public static Int64[] Int64TestCases = { long.MaxValue, long.MinValue }; - - public static Double[] Float64TestCases = { default, double.Epsilon, double.MaxValue, double.MinValue }; - public static Single[] Float32TestCases = { default, float.Epsilon, float.MaxValue, float.MinValue }; + public static sbyte[] Int8TestCases = { default, sbyte.MaxValue, sbyte.MinValue }; + public static short[] Int16TestCases = { short.MaxValue, short.MinValue }; + public static int[] Int32TestCases = { int.MinValue, int.MaxValue }; + public static long[] Int64TestCases = { long.MaxValue, long.MinValue }; + + public static double[] Float64TestCases = + { + default, + double.Epsilon, + double.MaxValue, + double.MinValue + }; + public static float[] Float32TestCases = + { + default, + float.Epsilon, + float.MaxValue, + float.MinValue + }; public static Half[] Float16TestCases = { default, Half.Epsilon, Half.MaxValue, Half.MinValue }; - public static Single[] FloatIntegralTestCases = { 0, 1, int.MaxValue, int.MinValue }; - + public static float[] FloatIntegralTestCases = { 0, 1, int.MaxValue, int.MinValue }; + public static MyEnum[] MyEnums => Enum.GetValues(typeof(MyEnum)).Cast().ToArray(); } diff --git a/Core/Tests/SpeckleType.cs b/Core/Tests/SpeckleType.cs index ddc56f664b..905270ae20 100644 --- a/Core/Tests/SpeckleType.cs +++ b/Core/Tests/SpeckleType.cs @@ -1,27 +1,28 @@ using NUnit.Framework; using Speckle.Core.Models; -namespace Tests +namespace Tests; + +[TestFixture] +public class SpeckleTypeTests { - [TestFixture] - public class SpeckleTypeTests + [Test, TestCaseSource(nameof(Cases))] + public void SpeckleTypeIsProperlyBuilt(Base foo, string expected_type) + { + Assert.That(expected_type, Is.EqualTo(foo.speckle_type)); + } + + private static object[] Cases = { - [Test] - [TestCaseSource(nameof(Cases))] - public void SpeckleTypeIsProperlyBuilt(Base foo, string expected_type) - { - Assert.AreEqual(foo.speckle_type, expected_type); - } + new object[] { new Base(), "Base" }, + new object[] { new Foo(), "Tests.Foo" }, + new object[] { new Bar(), "Tests.Foo:Tests.Bar" }, + new object[] { new Baz(), "Tests.Foo:Tests.Bar:Tests.Baz" } + }; +} - static object[] Cases = { - new object[] {new Base(), "Base"}, - new object[] {new Foo(), "Tests.Foo"}, - new object[] {new Bar(), "Tests.Foo:Tests.Bar"}, - new object[] {new Baz(), "Tests.Foo:Tests.Bar:Tests.Baz"}, - }; +public class Foo : Base { } - } - public class Foo: Base {} - public class Bar: Foo {} - public class Baz: Bar {} -} \ No newline at end of file +public class Bar : Foo { } + +public class Baz : Bar { } diff --git a/Core/Tests/TestKit.cs b/Core/Tests/TestKit.cs index 02b8424674..accc529857 100644 --- a/Core/Tests/TestKit.cs +++ b/Core/Tests/TestKit.cs @@ -1,235 +1,232 @@ -using Newtonsoft.Json; +using Newtonsoft.Json; using Speckle.Core.Kits; using Speckle.Core.Models; -namespace Tests -{ - /// - /// Simple speckle kit (no conversions) used in tests. - /// - public class TestKit : ISpeckleKit - { - public IEnumerable Types => GetType().Assembly.GetTypes().Where(type => type.IsSubclassOf(typeof(Base))); +namespace Tests; - public string Description => "Simple object model for with some types for tests."; +/// +/// Simple speckle kit (no conversions) used in tests. +/// +public class TestKit : ISpeckleKit +{ + public TestKit() { } - public string Name => nameof(TestKit); + public IEnumerable Types => + GetType().Assembly.GetTypes().Where(type => type.IsSubclassOf(typeof(Base))); - public string Author => "Dimitrie"; + public string Description => "Simple object model for with some types for tests."; - public string WebsiteOrEmail => "hello@Speckle.Core.works"; + public string Name => nameof(TestKit); - public IEnumerable Converters { get => new List(); } + public string Author => "Dimitrie"; - public TestKit() { } + public string WebsiteOrEmail => "hello@Speckle.Core.works"; - public Base ToSpeckle(object @object) - { - throw new NotImplementedException(); - } + public IEnumerable Converters => new List(); - public bool CanConvertToSpeckle(object @object) - { - throw new NotImplementedException(); - } + public ISpeckleConverter LoadConverter(string app) + { + return null; + } - public object ToNative(Base @object) - { - throw new NotImplementedException(); - } + public Base ToSpeckle(object @object) + { + throw new NotImplementedException(); + } - public bool CanConvertToNative(Base @object) - { - throw new NotImplementedException(); - } + public bool CanConvertToSpeckle(object @object) + { + throw new NotImplementedException(); + } - public IEnumerable GetServicedApplications() - { - throw new NotImplementedException(); - } + public object ToNative(Base @object) + { + throw new NotImplementedException(); + } - public void SetContextDocument(object @object) - { - throw new NotImplementedException(); - } + public bool CanConvertToNative(Base @object) + { + throw new NotImplementedException(); + } - public ISpeckleConverter LoadConverter(string app) - { - return null; - } + public IEnumerable GetServicedApplications() + { + throw new NotImplementedException(); } - public class FakeMesh : Base + public void SetContextDocument(object @object) { - [DetachProperty] - [Chunkable] - public List Vertices { get; set; } = new List(); + throw new NotImplementedException(); + } +} - [DetachProperty] - [Chunkable(1000)] - public double[] ArrayOfDoubles { get; set; } +public class FakeMesh : Base +{ + public FakeMesh() { } - [DetachProperty] - [Chunkable(1000)] - public TableLeg[] ArrayOfLegs { get; set; } + [DetachProperty, Chunkable] + public List Vertices { get; set; } = new(); - [DetachProperty] - [Chunkable(2500)] - public List Tables { get; set; } = new List(); + [DetachProperty, Chunkable(1000)] + public double[] ArrayOfDoubles { get; set; } - public FakeMesh() { } - } + [DetachProperty, Chunkable(1000)] + public TableLeg[] ArrayOfLegs { get; set; } - public class DiningTable : Base + [DetachProperty, Chunkable(2500)] + public List Tables { get; set; } = new(); +} + +public class DiningTable : Base +{ + public DiningTable() { - [DetachProperty] - public TableLeg LegOne { get; set; } + LegOne = new TableLeg() { height = 2 * 3, radius = 10 }; + LegTwo = new TableLeg() { height = 1, radius = 5 }; - [DetachProperty] - public TableLeg LegTwo { get; set; } + MoreLegs.Add(new TableLeg() { height = 4 }); + MoreLegs.Add(new TableLeg() { height = 10 }); - [DetachProperty] - public List MoreLegs { get; set; } = new List(); + Tabletop = new Tabletop() + { + length = 200, + width = 12, + thickness = 3 + }; + } - [DetachProperty] - public Tabletop Tabletop { get; set; } + [DetachProperty] + public TableLeg LegOne { get; set; } - public string TableModel { get; set; } = "Sample Table"; + [DetachProperty] + public TableLeg LegTwo { get; set; } - public DiningTable() - { - LegOne = new TableLeg() { height = 2 * 3, radius = 10 }; - LegTwo = new TableLeg() { height = 1, radius = 5 }; + [DetachProperty] + public List MoreLegs { get; set; } = new(); - MoreLegs.Add(new TableLeg() { height = 4 }); - MoreLegs.Add(new TableLeg() { height = 10 }); + [DetachProperty] + public Tabletop Tabletop { get; set; } - Tabletop = new Tabletop() { length = 200, width = 12, thickness = 3 }; - } - } + public string TableModel { get; set; } = "Sample Table"; +} - public class Tabletop : Base - { - public double length { get; set; } - public double width { get; set; } - public double thickness { get; set; } +public class Tabletop : Base +{ + public Tabletop() { } - public Tabletop() { } - } + public double length { get; set; } + public double width { get; set; } + public double thickness { get; set; } +} - public class TableLeg : Base - { - public double height { get; set; } - public double radius { get; set; } +public class TableLeg : Base +{ + public TableLeg() { } - [DetachProperty] - public TableLegFixture fixture { get; set; } = new TableLegFixture(); + public double height { get; set; } + public double radius { get; set; } - public TableLeg() { } - } + [DetachProperty] + public TableLegFixture fixture { get; set; } = new(); +} - public class TableLegFixture : Base - { - public string nails { get; set; } = "MANY NAILS WOW "; +public class TableLegFixture : Base +{ + public TableLegFixture() { } - public TableLegFixture() { } - } + public string nails { get; set; } = "MANY NAILS WOW "; +} + +public class Point : Base +{ + public Point() { } - public class Point : Base + public Point(double X, double Y, double Z) { - public double X { get; set; } - public double Y { get; set; } - public double Z { get; set; } + this.X = X; + this.Y = Y; + this.Z = Z; + } - public Point() { } + public double X { get; set; } + public double Y { get; set; } + public double Z { get; set; } +} - public Point(double X, double Y, double Z) - { - this.X = X; - this.Y = Y; - this.Z = Z; - } - } +public class SuperPoint : Point +{ + public SuperPoint() { } - public class SuperPoint : Point - { - public double W { get; set; } + public double W { get; set; } +} - public SuperPoint() { } - } +public class Mesh : Base +{ + public List Faces = new(); - public class Mesh : Base - { - [JsonIgnore] - public List Points = new List(); + [JsonIgnore] + public List Points = new(); + + public Mesh() { } - public List Vertices + public List Vertices + { + get => Points.SelectMany(pt => new List() { pt.X, pt.Y, pt.Z }).ToList(); + set { - get => Points.SelectMany(pt => new List() { pt.X, pt.Y, pt.Z }).ToList(); - set - { - for (int i = 0; i < value.Count; i += 3) - { - Points.Add(new Point(value[i], value[i + 1], value[i + 2])); - } - } + for (int i = 0; i < value.Count; i += 3) + Points.Add(new Point(value[i], value[i + 1], value[i + 2])); } + } +} - public List Faces = new List(); +public interface ICurve +{ + // Just for fun +} - public Mesh() { } - } +/// +/// Store individual points in a list structure for developer ergonomics. Nevertheless, for performance reasons (hashing, serialisation & storage) expose the same list of points as a typed array. +/// +public class Polyline : Base, ICurve +{ + [JsonIgnore] + public List Points = new(); - public interface ICurve - { - // Just for fun - } + public Polyline() { } - /// - /// Store individual points in a list structure for developer ergonomics. Nevertheless, for performance reasons (hashing, serialisation & storage) expose the same list of points as a typed array. - /// - public class Polyline : Base, ICurve + public List Vertices { - [JsonIgnore] - public List Points = new List(); - - public List Vertices + get => Points.SelectMany(pt => new List() { pt.X, pt.Y, pt.Z }).ToList(); + set { - get => Points.SelectMany(pt => new List() { pt.X, pt.Y, pt.Z }).ToList(); - set - { - for (int i = 0; i < value.Count; i += 3) - { - Points.Add(new Point(value[i], value[i + 1], value[i + 2])); - } - } + for (int i = 0; i < value.Count; i += 3) + Points.Add(new Point(value[i], value[i + 1], value[i + 2])); } - - public Polyline() { } } +} - public class Line : Base, ICurve - { - public Point Start { get; set; } - public Point End { get; set; } +public class Line : Base, ICurve +{ + public Line() { } - public Line() { } - } + public Point Start { get; set; } + public Point End { get; set; } +} - /// - /// This class exists to purely test some weird cases in which Intefaces might trash serialisation. - /// - public class PolygonalFeline : Base - { - public List Whiskers { get; set; } = new List(); +/// +/// This class exists to purely test some weird cases in which Intefaces might trash serialisation. +/// +public class PolygonalFeline : Base +{ + public PolygonalFeline() { } - public Dictionary Claws { get; set; } = new Dictionary(); + public List Whiskers { get; set; } = new(); - [DetachProperty] - public ICurve Tail { get; set; } + public Dictionary Claws { get; set; } = new(); - public ICurve[] Fur { get; set; } = new ICurve[1000]; + [DetachProperty] + public ICurve Tail { get; set; } - public PolygonalFeline() { } - } + public ICurve[] Fur { get; set; } = new ICurve[1000]; } diff --git a/Core/Tests/TraversalTests.cs b/Core/Tests/TraversalTests.cs index b2764252d7..974c22cd64 100644 --- a/Core/Tests/TraversalTests.cs +++ b/Core/Tests/TraversalTests.cs @@ -1,19 +1,18 @@ -using NUnit.Framework; +using NUnit.Framework; using Speckle.Core.Models; using Speckle.Core.Models.Extensions; -namespace Tests.Models +namespace Tests.Models; + +[TestFixture, TestOf(typeof(BaseExtensions))] +public class TraversalTests { - [TestFixture, TestOf(typeof(BaseExtensions))] - public class TraversalTests + [Test, Description("Tests that provided breaker rules are respected")] + public void TestFlattenWithBreaker() { - - [Test] - [Description("Tests that provided breaker rules are respected")] - public void TestFlattenWithBreaker() - { - //Setup - Base root = new Base() + //Setup + Base root = + new() { id = "root", ["child"] = new Base() @@ -22,99 +21,94 @@ public void TestFlattenWithBreaker() ["child"] = new Base() { id = "break on me, go no further", - ["child"] = new Base() - { - id = "should have ignored me" - } + ["child"] = new Base() { id = "should have ignored me" } } } }; - bool BreakRule(Base b) - { - return b.id.Contains("break on me"); - } - - //Flatten - var ret = root.Flatten(BreakRule).ToList(); + bool BreakRule(Base b) => b.id.Contains("break on me"); - //Test - Assert.That(ret, Has.Count.EqualTo(3)); - Assert.That(ret, Is.Unique); - Assert.That(ret.Where(BreakRule), Is.Not.Empty); - Assert.That(ret.Where(x => x.id.Contains("should have ignored me")), Is.Empty); - } + //Flatten + var ret = root.Flatten(BreakRule).ToList(); + //Test + Assert.That(ret, Has.Count.EqualTo(3)); + Assert.That(ret, Is.Unique); + Assert.That(ret.Where(BreakRule), Is.Not.Empty); + Assert.That(ret.Where(x => x.id.Contains("should have ignored me")), Is.Empty); + } - [Test] - [TestCase(5, 5)] - [TestCase(5, 10)] - [TestCase(10, 5)] - [Description("Tests breaking after a fixed number of items")] - public void TestBreakerFixed(int nestDepth, int flattenDepth) + [ + Test, + TestCase(5, 5), + TestCase(5, 10), + TestCase(10, 5), + Description("Tests breaking after a fixed number of items") + ] + public void TestBreakerFixed(int nestDepth, int flattenDepth) + { + //Setup + Base rootObject = new() { id = "0" }; + Base lastNode = rootObject; + for (int i = 1; i < nestDepth; i++) { - //Setup - Base rootObject = new Base() { id = "0" }; - Base lastNode = rootObject; - for (int i = 1; i < nestDepth; i++) - { - Base newNode = new Base() { id = $"{i}" }; - lastNode["child"] = newNode; - lastNode = newNode; - } - - //Flatten - int counter = 0; - var ret = rootObject.Flatten(b => ++counter >= flattenDepth).ToList(); ; - - //Test - Assert.That(ret, Has.Count.EqualTo(Math.Min(flattenDepth, nestDepth))); - Assert.That(ret, Is.Unique); + Base newNode = new() { id = $"{i}" }; + lastNode["child"] = newNode; + lastNode = newNode; } + //Flatten + int counter = 0; + var ret = rootObject.Flatten(b => ++counter >= flattenDepth).ToList(); + ; - [Test, Timeout(2000)] - [Description("Tests that the flatten function does not get stuck on circular references")] - public void TestCircularReference() - { - //Setup - Base objectA = new Base() { id = "a" }; - Base objectB = new Base() { id = "b" }; - Base objectC = new Base() { id = "c" }; - - objectA["child"] = objectB; - objectB["child"] = objectC; - objectC["child"] = objectA; - - - //Flatten - var ret = objectA.Flatten().ToList(); + //Test + Assert.That(ret, Has.Count.EqualTo(Math.Min(flattenDepth, nestDepth))); + Assert.That(ret, Is.Unique); + } - //Test - Assert.That(ret, Is.Unique); - Assert.That(ret, Is.EquivalentTo(new[] { objectA, objectB, objectC })); - Assert.That(ret, Has.Count.EqualTo(3)); - } + [ + Test, + Timeout(2000), + Description("Tests that the flatten function does not get stuck on circular references") + ] + public void TestCircularReference() + { + //Setup + Base objectA = new() { id = "a" }; + Base objectB = new() { id = "b" }; + Base objectC = new() { id = "c" }; + + objectA["child"] = objectB; + objectB["child"] = objectC; + objectC["child"] = objectA; + + //Flatten + var ret = objectA.Flatten().ToList(); + + //Test + Assert.That(ret, Is.Unique); + Assert.That(ret, Is.EquivalentTo(new[] { objectA, objectB, objectC })); + Assert.That(ret, Has.Count.EqualTo(3)); + } - [Test] - [Description("Tests that the flatten function correctly handles (non circular) duplicates")] - public void TestDuplicates() - { - //Setup - Base objectA = new Base() { id = "a" }; - Base objectB = new Base() { id = "b" }; + [Test, Description("Tests that the flatten function correctly handles (non circular) duplicates")] + public void TestDuplicates() + { + //Setup + Base objectA = new() { id = "a" }; + Base objectB = new() { id = "b" }; - objectA["child1"] = objectB; - objectA["child2"] = objectB; + objectA["child1"] = objectB; + objectA["child2"] = objectB; - //Flatten - var ret = objectA.Flatten().ToList(); ; + //Flatten + var ret = objectA.Flatten().ToList(); + ; - //Test - Assert.That(ret, Is.Unique); - Assert.That(ret, Is.EquivalentTo(new[] { objectA, objectB })); - Assert.That(ret, Has.Count.EqualTo(2)); - } + //Test + Assert.That(ret, Is.Unique); + Assert.That(ret, Is.EquivalentTo(new[] { objectA, objectB })); + Assert.That(ret, Has.Count.EqualTo(2)); } - } diff --git a/Core/Tests/Wrapper.cs b/Core/Tests/Wrapper.cs index d2a7d7a530..1861f06efa 100644 --- a/Core/Tests/Wrapper.cs +++ b/Core/Tests/Wrapper.cs @@ -1,56 +1,65 @@ -using NUnit.Framework; +using NUnit.Framework; using Speckle.Core.Credentials; -namespace Tests +namespace Tests; + +[TestFixture] +public class WrapperTests { - [TestFixture] - public class WrapperTests + [Test] + public void ParseStream() + { + var wrapper = new StreamWrapper("https://testing.speckle.dev/streams/a75ab4f10f"); + Assert.That(wrapper.Type, Is.EqualTo(StreamWrapperType.Stream)); + } + + [Test] + public void ParseBranch() + { + var wrapperCrazy = new StreamWrapper( + "https://testing.speckle.dev/streams/4c3ce1459c/branches/%F0%9F%8D%95%E2%AC%85%F0%9F%8C%9F%20you%20wat%3F" + ); + Assert.That(wrapperCrazy.BranchName, Is.EqualTo("🍕⬅🌟 you wat?")); + Assert.That(wrapperCrazy.Type, Is.EqualTo(StreamWrapperType.Branch)); + + wrapperCrazy = new StreamWrapper( + "https://testing.speckle.dev/streams/4c3ce1459c/branches/next%20level" + ); + Assert.That(wrapperCrazy.BranchName, Is.EqualTo("next level")); + Assert.That(wrapperCrazy.Type, Is.EqualTo(StreamWrapperType.Branch)); + } + + [Test] + public void ParseObject() + { + var wrapper = new StreamWrapper( + "https://testing.speckle.dev/streams/a75ab4f10f/objects/5530363e6d51c904903dafc3ea1d2ec6" + ); + Assert.That(wrapper.Type, Is.EqualTo(StreamWrapperType.Object)); + } + + [Test] + public void ParseCommit() + { + var wrapper = new StreamWrapper( + "https://testing.speckle.dev/streams/4c3ce1459c/commits/8b9b831792" + ); + Assert.That(wrapper.Type, Is.EqualTo(StreamWrapperType.Commit)); + } + + [Test] + public void ParseGlobalAsBranch() + { + var wrapper = new StreamWrapper("https://testing.speckle.dev/streams/0c6ad366c4/globals/"); + Assert.That(wrapper.Type, Is.EqualTo(StreamWrapperType.Branch)); + } + + [Test] + public void ParseGlobalAsCommit() { - [Test] - public void ParseStream() - { - var wrapper = new StreamWrapper("https://testing.speckle.dev/streams/a75ab4f10f"); - Assert.That(wrapper.Type, Is.EqualTo(StreamWrapperType.Stream)); - } - - [Test] - public void ParseBranch() - { - var wrapperCrazy = new StreamWrapper("https://testing.speckle.dev/streams/4c3ce1459c/branches/%F0%9F%8D%95%E2%AC%85%F0%9F%8C%9F%20you%20wat%3F"); - Assert.That(wrapperCrazy.BranchName, Is.EqualTo("🍕⬅🌟 you wat?")); - Assert.That(wrapperCrazy.Type, Is.EqualTo(StreamWrapperType.Branch)); - - wrapperCrazy = new StreamWrapper("https://testing.speckle.dev/streams/4c3ce1459c/branches/next%20level"); - Assert.That(wrapperCrazy.BranchName, Is.EqualTo("next level")); - Assert.That(wrapperCrazy.Type, Is.EqualTo(StreamWrapperType.Branch)); - } - - [Test] - public void ParseObject() - { - var wrapper = new StreamWrapper("https://testing.speckle.dev/streams/a75ab4f10f/objects/5530363e6d51c904903dafc3ea1d2ec6"); - Assert.That(wrapper.Type, Is.EqualTo(StreamWrapperType.Object)); - } - - [Test] - public void ParseCommit() - { - var wrapper = new StreamWrapper("https://testing.speckle.dev/streams/4c3ce1459c/commits/8b9b831792"); - Assert.That(wrapper.Type, Is.EqualTo(StreamWrapperType.Commit)); - } - - [Test] - public void ParseGlobalAsBranch() - { - var wrapper = new StreamWrapper("https://testing.speckle.dev/streams/0c6ad366c4/globals/"); - Assert.That(wrapper.Type, Is.EqualTo(StreamWrapperType.Branch)); - } - - [Test] - public void ParseGlobalAsCommit() - { - var wrapper = new StreamWrapper("https://testing.speckle.dev/streams/0c6ad366c4/globals/abd3787893"); - Assert.That(wrapper.Type, Is.EqualTo(StreamWrapperType.Commit)); - } + var wrapper = new StreamWrapper( + "https://testing.speckle.dev/streams/0c6ad366c4/globals/abd3787893" + ); + Assert.That(wrapper.Type, Is.EqualTo(StreamWrapperType.Commit)); } } diff --git a/Core/Transports/DiskTransport/DiskTransport.cs b/Core/Transports/DiskTransport/DiskTransport.cs index e667227be1..7648350469 100644 --- a/Core/Transports/DiskTransport/DiskTransport.cs +++ b/Core/Transports/DiskTransport/DiskTransport.cs @@ -1,4 +1,4 @@ -using System; +using System; using System.Collections.Generic; using System.Diagnostics; using System.IO; @@ -9,141 +9,157 @@ using Speckle.Core.Transports; using Speckle.Newtonsoft.Json; -namespace DiskTransport +namespace DiskTransport; + +/// +/// Writes speckle objects to disk. +/// +public class DiskTransport : ICloneable, ITransport { - /// - /// Writes speckle objects to disk. - /// - public class DiskTransport : ICloneable, ITransport + public DiskTransport(string basePath = null) { - public string TransportName { get; set; } = "Disk"; - public Dictionary TransportContext => - new Dictionary - { - { "name", TransportName }, - { "type", this.GetType().Name }, - { "basePath", RootPath }, - }; - - public CancellationToken CancellationToken { get; set; } + if (basePath == null) + basePath = Path.Combine(SpecklePathProvider.UserSpeckleFolderPath, "DiskTransportFiles"); - public Action OnProgressAction { get; set; } + RootPath = Path.Combine(basePath); - public Action OnErrorAction { get; set; } + Directory.CreateDirectory(RootPath); + } - public string RootPath { get; set; } + public string RootPath { get; set; } - public int SavedObjectCount { get; private set; } = 0; + public object Clone() + { + return new DiskTransport() + { + RootPath = RootPath, + CancellationToken = CancellationToken, + OnErrorAction = OnErrorAction, + OnProgressAction = OnProgressAction, + TransportName = TransportName + }; + } - public TimeSpan Elapsed { get; set; } = TimeSpan.Zero; + public string TransportName { get; set; } = "Disk"; - public DiskTransport(string basePath = null) + public Dictionary TransportContext => + new() { - if (basePath == null) - basePath = Path.Combine(SpecklePathProvider.UserSpeckleFolderPath, "DiskTransportFiles"); + { "name", TransportName }, + { "type", GetType().Name }, + { "basePath", RootPath } + }; - RootPath = Path.Combine(basePath); + public CancellationToken CancellationToken { get; set; } - Directory.CreateDirectory(RootPath); - } + public Action OnProgressAction { get; set; } - public void BeginWrite() - { - SavedObjectCount = 0; - } + public Action OnErrorAction { get; set; } - public void EndWrite() { } - - public string GetObject(string id) - { - if (CancellationToken.IsCancellationRequested) return null; // Check for cancellation + public int SavedObjectCount { get; private set; } = 0; - var filePath = Path.Combine(RootPath, id); - if (File.Exists(filePath)) - { - return File.ReadAllText(filePath, Encoding.UTF8); - } + public TimeSpan Elapsed { get; set; } = TimeSpan.Zero; - return null; - } + public void BeginWrite() + { + SavedObjectCount = 0; + } - public void SaveObject(string id, string serializedObject) - { - var stopwatch = Stopwatch.StartNew(); - if (CancellationToken.IsCancellationRequested) return; // Check for cancellation + public void EndWrite() { } - var filePath = Path.Combine(RootPath, id); - if (File.Exists(filePath)) return; + public string GetObject(string id) + { + if (CancellationToken.IsCancellationRequested) + return null; // Check for cancellation - File.WriteAllText(filePath, serializedObject, Encoding.UTF8); - SavedObjectCount++; - OnProgressAction?.Invoke(TransportName, SavedObjectCount); - stopwatch.Stop(); - Elapsed += stopwatch.Elapsed; - } + var filePath = Path.Combine(RootPath, id); + if (File.Exists(filePath)) + return File.ReadAllText(filePath, Encoding.UTF8); - public void SaveObject(string id, ITransport sourceTransport) - { - if (CancellationToken.IsCancellationRequested) return; // Check for cancellation + return null; + } - var serializedObject = sourceTransport.GetObject(id); - SaveObject(id, serializedObject); - } + public void SaveObject(string id, string serializedObject) + { + var stopwatch = Stopwatch.StartNew(); + if (CancellationToken.IsCancellationRequested) + return; // Check for cancellation - public async Task WriteComplete() - { + var filePath = Path.Combine(RootPath, id); + if (File.Exists(filePath)) return; - } - public async Task CopyObjectAndChildren(string id, ITransport targetTransport, Action onTotalChildrenCountKnown = null) - { - if (CancellationToken.IsCancellationRequested) return null; // Check for cancellation + File.WriteAllText(filePath, serializedObject, Encoding.UTF8); + SavedObjectCount++; + OnProgressAction?.Invoke(TransportName, SavedObjectCount); + stopwatch.Stop(); + Elapsed += stopwatch.Elapsed; + } - var parent = GetObject(id); + public void SaveObject(string id, ITransport sourceTransport) + { + if (CancellationToken.IsCancellationRequested) + return; // Check for cancellation - targetTransport.SaveObject(id, parent); + var serializedObject = sourceTransport.GetObject(id); + SaveObject(id, serializedObject); + } - var partial = JsonConvert.DeserializeObject(parent); + public async Task WriteComplete() + { + return; + } + + public async Task CopyObjectAndChildren( + string id, + ITransport targetTransport, + Action onTotalChildrenCountKnown = null + ) + { + if (CancellationToken.IsCancellationRequested) + return null; // Check for cancellation - if (partial.__closure == null || partial.__closure.Count == 0) return parent; + var parent = GetObject(id); - int i = 0; - foreach (var kvp in partial.__closure) - { - if (CancellationToken.IsCancellationRequested) return null; // Check for cancellation + targetTransport.SaveObject(id, parent); - var child = GetObject(kvp.Key); - targetTransport.SaveObject(kvp.Key, child); - OnProgressAction?.Invoke($"{TransportName}", i++); - } + var partial = JsonConvert.DeserializeObject(parent); + if (partial.__closure == null || partial.__closure.Count == 0) return parent; - } - public override string ToString() + int i = 0; + foreach (var kvp in partial.__closure) { - return $"Disk Transport @{RootPath}"; - } + if (CancellationToken.IsCancellationRequested) + return null; // Check for cancellation - public async Task> HasObjects(List objectIds) - { - Dictionary ret = new Dictionary(); - foreach (string objectId in objectIds) - { - var filePath = Path.Combine(RootPath, objectId); - ret[objectId] = File.Exists(filePath); - } - return ret; + var child = GetObject(kvp.Key); + targetTransport.SaveObject(kvp.Key, child); + OnProgressAction?.Invoke($"{TransportName}", i++); } - public object Clone() - { - return new DiskTransport() { RootPath = RootPath, CancellationToken = CancellationToken, OnErrorAction = OnErrorAction, OnProgressAction = OnProgressAction, TransportName = TransportName }; - } + return parent; + } - class Placeholder + public async Task> HasObjects(List objectIds) + { + Dictionary ret = new(); + foreach (string objectId in objectIds) { - public Dictionary __closure { get; set; } = new Dictionary(); + var filePath = Path.Combine(RootPath, objectId); + ret[objectId] = File.Exists(filePath); } + return ret; + } + + public override string ToString() + { + return $"Disk Transport @{RootPath}"; + } + + private class Placeholder + { + public Dictionary __closure { get; set; } = new(); } } diff --git a/Core/Transports/MongoDBTransport/MongoDB.cs b/Core/Transports/MongoDBTransport/MongoDB.cs index 7e1751c0e4..d309685e6d 100644 --- a/Core/Transports/MongoDBTransport/MongoDB.cs +++ b/Core/Transports/MongoDBTransport/MongoDB.cs @@ -1,278 +1,265 @@ -using System; +using System; using System.Collections.Concurrent; using System.Collections.Generic; +using System.Text; using System.Threading; using System.Threading.Tasks; using System.Timers; using MongoDB.Bson; using MongoDB.Driver; -using Serilog; using Speckle.Core.Logging; +using Timer = System.Timers.Timer; -namespace Speckle.Core.Transports +namespace Speckle.Core.Transports; + +// If data storage accessed by transports will always use the hash and content field names, move this enum to ITransport instead. +public enum Field { - // If data storage accessed by transports will always use the hash and content field names, move this enum to ITransport instead. - public enum Field - { - hash, - content - } + hash, + content +} - // Question: the benefit of noSQL is the use of unstructured collections of variable documents. - // Explore storing partially serialized Speckle objects with dynamically generated fields instead of just a content string? - public class MongoDBTransport : IDisposable, ITransport +// Question: the benefit of noSQL is the use of unstructured collections of variable documents. +// Explore storing partially serialized Speckle objects with dynamically generated fields instead of just a content string? +public class MongoDBTransport : IDisposable, ITransport +{ + private bool IS_WRITING = false; + private int MAX_TRANSACTION_SIZE = 1000; + private int PollInterval = 500; + + private ConcurrentQueue<(string, string, int)> Queue = new(); + + /// + /// Timer that ensures queue is consumed if less than MAX_TRANSACTION_SIZE objects are being sent. + /// + /// Is this to prevent requests to read an object before it is written, or to handle read/write locks? + /// If this is can differ per transport, better to use Database.currentOp() to determine if write operations are waiting for a lock. + private Timer WriteTimer; + + public MongoDBTransport( + string connectionString = "mongodb://localhost:27017", + string applicationName = "Speckle", + string scope = "Objects" + ) { - public string TransportName { get; set; } = "MongoTransport"; + SpeckleLog.Logger.Information("Creating new MongoDB Transport"); - public Dictionary TransportContext => - new Dictionary - { - { "name", TransportName }, - { "type", this.GetType().Name }, - }; + ConnectionString = connectionString; + Client = new MongoClient(ConnectionString); + Database = (MongoDatabaseBase)Client.GetDatabase(applicationName); + Collection = Database.GetCollection(scope); - public CancellationToken CancellationToken { get; set; } + Initialize(); - public string ConnectionString { get; set; } + WriteTimer = new Timer() + { + AutoReset = true, + Enabled = false, + Interval = PollInterval + }; + WriteTimer.Elapsed += WriteTimerElapsed; + } - private MongoClient Client { get; set; } - private IMongoDatabase Database { get; set; } - private IMongoCollection Collection { get; set; } + public string ConnectionString { get; set; } - private ConcurrentQueue<(string, string, int)> Queue = - new ConcurrentQueue<(string, string, int)>(); + private MongoClient Client { get; set; } + private IMongoDatabase Database { get; set; } + private IMongoCollection Collection { get; set; } - public Action OnProgressAction { get; set; } + public void Dispose() + { + // MongoDB collection connection should dispose automatically - public Action OnErrorAction { get; set; } - public int SavedObjectCount { get; private set; } + // Time out locking could be added if an expected use case is multiple clients writing to the same server + } - // not implementing this properly - public TimeSpan Elapsed => TimeSpan.Zero; + public string TransportName { get; set; } = "MongoTransport"; - /// - /// Timer that ensures queue is consumed if less than MAX_TRANSACTION_SIZE objects are being sent. - /// - /// Is this to prevent requests to read an object before it is written, or to handle read/write locks? - /// If this is can differ per transport, better to use Database.currentOp() to determine if write operations are waiting for a lock. - private System.Timers.Timer WriteTimer; - private int PollInterval = 500; + public Dictionary TransportContext => + new() { { "name", TransportName }, { "type", GetType().Name } }; - private bool IS_WRITING = false; - private int MAX_TRANSACTION_SIZE = 1000; + public CancellationToken CancellationToken { get; set; } - public MongoDBTransport( - string connectionString = "mongodb://localhost:27017", - string applicationName = "Speckle", - string scope = "Objects" - ) - { - SpeckleLog.Logger.Information("Creating new MongoDB Transport"); + public Action OnProgressAction { get; set; } - ConnectionString = connectionString; - Client = new MongoClient(ConnectionString); - Database = (MongoDatabaseBase)Client.GetDatabase(applicationName); - Collection = Database.GetCollection(scope); + public Action OnErrorAction { get; set; } + public int SavedObjectCount { get; private set; } - Initialize(); + // not implementing this properly + public TimeSpan Elapsed => TimeSpan.Zero; - WriteTimer = new System.Timers.Timer() - { - AutoReset = true, - Enabled = false, - Interval = PollInterval - }; - WriteTimer.Elapsed += WriteTimerElapsed; - } + public void BeginWrite() + { + SavedObjectCount = 0; + } - private void Initialize() - { - // Assumes mongoDB server is running - // Mongo database and collection should be created automatically if it doesn't already exist + public void EndWrite() { } - // Check if the connection is successful - bool isMongoLive = Database.RunCommandAsync((Command)"{ping:1}").Wait(1000); - if (!isMongoLive) - { - OnErrorAction(TransportName, new Exception("The Mongo database could not be reached.")); - } - } + public Task> HasObjects(List objectIds) + { + throw new NotImplementedException(); + } - public void BeginWrite() - { - SavedObjectCount = 0; - } + private void Initialize() + { + // Assumes mongoDB server is running + // Mongo database and collection should be created automatically if it doesn't already exist - public void EndWrite() { } + // Check if the connection is successful + bool isMongoLive = Database.RunCommandAsync((Command)"{ping:1}").Wait(1000); + if (!isMongoLive) + OnErrorAction(TransportName, new Exception("The Mongo database could not be reached.")); + } - #region Writes + /// + /// Returns all the objects in the store. + /// + /// + internal IEnumerable GetAllObjects() + { + var documents = Collection.Find(new BsonDocument()).ToList(); + List documentContents = new(); + foreach (BsonDocument document in documents) + documentContents.Add(document[Field.content.ToString()].AsString); + return documentContents; + } - /// - /// Awaits until write completion (ie, the current queue is fully consumed). - /// - /// - public async Task WriteComplete() - { - await Utilities.WaitUntil( + /// + /// Deletes an object. Note: do not use for any speckle object transport, as it will corrupt the database. + /// + /// + internal void DeleteObject(string hash) + { + var filter = Builders.Filter.Eq(Field.hash.ToString(), hash); + Collection.DeleteOne(filter); + } + + #region Writes + + /// + /// Awaits until write completion (ie, the current queue is fully consumed). + /// + /// + public async Task WriteComplete() + { + await Utilities + .WaitUntil( () => { return GetWriteCompletionStatus(); }, 500 - ); - } - - /// - /// Returns true if the current write queue is empty and committed. - /// - /// - /// - /// Mongo has intent shared and intent exclusive client operations. - /// Each category shares a lock, with intent exclusive operations prioritized. - /// Would change to Database.currentOp() to determine if write operations are waiting for a lock, if the WriteTimer is deprecated - /// - public bool GetWriteCompletionStatus() - { - Console.WriteLine($"write completion {Queue.Count == 0 && !IS_WRITING}"); - return Queue.Count == 0 && !IS_WRITING; - } - - private void WriteTimerElapsed(object sender, ElapsedEventArgs e) - { - WriteTimer.Enabled = false; - if (!IS_WRITING && Queue.Count != 0) - ConsumeQueue(); - } - - private void ConsumeQueue() - { - IS_WRITING = true; - var i = 0; - ValueTuple result; - - while (i < MAX_TRANSACTION_SIZE && Queue.TryPeek(out result)) - { - Queue.TryDequeue(out result); - var document = new BsonDocument - { - { Field.hash.ToString(), result.Item1 }, - { Field.content.ToString(), result.Item2 } - }; - Collection.InsertOne(document); - } - - if (Queue.Count > 0) - ConsumeQueue(); - - IS_WRITING = false; - } + ) + .ConfigureAwait(false); + } - /// - /// Adds an object to the saving queue. - /// - /// - /// - public void SaveObject(string hash, string serializedObject) - { - Queue.Enqueue( - (hash, serializedObject, System.Text.Encoding.UTF8.GetByteCount(serializedObject)) - ); + /// + /// Returns true if the current write queue is empty and committed. + /// + /// + /// + /// Mongo has intent shared and intent exclusive client operations. + /// Each category shares a lock, with intent exclusive operations prioritized. + /// Would change to Database.currentOp() to determine if write operations are waiting for a lock, if the WriteTimer is deprecated + /// + public bool GetWriteCompletionStatus() + { + Console.WriteLine($"write completion {Queue.Count == 0 && !IS_WRITING}"); + return Queue.Count == 0 && !IS_WRITING; + } - WriteTimer.Enabled = true; - WriteTimer.Start(); - } + private void WriteTimerElapsed(object sender, ElapsedEventArgs e) + { + WriteTimer.Enabled = false; + if (!IS_WRITING && Queue.Count != 0) + ConsumeQueue(); + } - public void SaveObject(string hash, ITransport sourceTransport) - { - var serializedObject = sourceTransport.GetObject(hash); - Queue.Enqueue( - (hash, serializedObject, System.Text.Encoding.UTF8.GetByteCount(serializedObject)) - ); - } + private void ConsumeQueue() + { + IS_WRITING = true; + var i = 0; + ValueTuple result; - /// - /// Directly saves the object in the db. - /// - /// - /// - public void SaveObjectSync(string hash, string serializedObject) + while (i < MAX_TRANSACTION_SIZE && Queue.TryPeek(out result)) { + Queue.TryDequeue(out result); var document = new BsonDocument { - { Field.hash.ToString(), hash }, - { Field.content.ToString(), serializedObject } + { Field.hash.ToString(), result.Item1 }, + { Field.content.ToString(), result.Item2 } }; Collection.InsertOne(document); } - #endregion + if (Queue.Count > 0) + ConsumeQueue(); - #region Reads - - /// - /// Gets an object. - /// - /// - /// - public string GetObject(string hash) - { - var filter = Builders.Filter.Eq(Field.hash.ToString(), hash); - BsonDocument objectDocument = Collection.Find(filter).FirstOrDefault(); - if (objectDocument != null) - { - return objectDocument[Field.content.ToString()].AsString; - } + IS_WRITING = false; + } - // pass on the duty of null checks to consumers - return null; - } + /// + /// Adds an object to the saving queue. + /// + /// + /// + public void SaveObject(string hash, string serializedObject) + { + Queue.Enqueue((hash, serializedObject, Encoding.UTF8.GetByteCount(serializedObject))); - public async Task CopyObjectAndChildren( - string hash, - ITransport targetTransport, - Action onTotalChildrenCountKnown = null - ) - { - throw new NotImplementedException(); - } + WriteTimer.Enabled = true; + WriteTimer.Start(); + } - #endregion + public void SaveObject(string hash, ITransport sourceTransport) + { + var serializedObject = sourceTransport.GetObject(hash); + Queue.Enqueue((hash, serializedObject, Encoding.UTF8.GetByteCount(serializedObject))); + } - /// - /// Returns all the objects in the store. - /// - /// - internal IEnumerable GetAllObjects() + /// + /// Directly saves the object in the db. + /// + /// + /// + public void SaveObjectSync(string hash, string serializedObject) + { + var document = new BsonDocument { - var documents = Collection.Find(new BsonDocument()).ToList(); - List documentContents = new List(); - foreach (BsonDocument document in documents) - { - documentContents.Add(document[Field.content.ToString()].AsString); - } - return documentContents; - } + { Field.hash.ToString(), hash }, + { Field.content.ToString(), serializedObject } + }; + Collection.InsertOne(document); + } - /// - /// Deletes an object. Note: do not use for any speckle object transport, as it will corrupt the database. - /// - /// - internal void DeleteObject(string hash) - { - var filter = Builders.Filter.Eq(Field.hash.ToString(), hash); - Collection.DeleteOne(filter); - } + #endregion - public void Dispose() - { - // MongoDB collection connection should dispose automatically + #region Reads - // Time out locking could be added if an expected use case is multiple clients writing to the same server - } + /// + /// Gets an object. + /// + /// + /// + public string GetObject(string hash) + { + var filter = Builders.Filter.Eq(Field.hash.ToString(), hash); + BsonDocument objectDocument = Collection.Find(filter).FirstOrDefault(); + if (objectDocument != null) + return objectDocument[Field.content.ToString()].AsString; - public Task> HasObjects(List objectIds) - { - throw new NotImplementedException(); - } + // pass on the duty of null checks to consumers + return null; } + + public async Task CopyObjectAndChildren( + string hash, + ITransport targetTransport, + Action onTotalChildrenCountKnown = null + ) + { + throw new NotImplementedException(); + } + + #endregion } diff --git a/Directory.Build.targets b/Directory.Build.targets index 762da98295..3a20134bf9 100644 --- a/Directory.Build.targets +++ b/Directory.Build.targets @@ -11,7 +11,7 @@ repository. Such as, copying a converter to the Kit folder, or cleaning up the entire monorepo. --> - + diff --git a/Objects/Tests/Tests.csproj b/Objects/Tests/Tests.csproj index 00b13a3bf0..f07a608ec2 100644 --- a/Objects/Tests/Tests.csproj +++ b/Objects/Tests/Tests.csproj @@ -1,25 +1,25 @@ - - net6.0 - false - Objects.Tests - + + net6.0 + false + Objects.Tests + - - - - - - - all - runtime; build; native; contentfiles; analyzers; buildtransitive - - - + + + + + + + all + runtime; build; native; contentfiles; analyzers; buildtransitive + + + - - - + + + \ No newline at end of file diff --git a/SDK.slnf b/SDK.slnf index 1bbffca40b..ff81da5312 100644 --- a/SDK.slnf +++ b/SDK.slnf @@ -2,14 +2,14 @@ "solution": { "path": "All.sln", "projects": [ - "Core/Core/Core.csproj", - "Core/IntegrationTests/TestsIntegration.csproj", - "Core/Tests/TestsUnit.csproj", - "Core/Transports/DiskTransport/DiskTransport.csproj", - "Core/Transports/MongoDBTransport/MongoDBTransport.csproj", - "DesktopUI2/DesktopUI2/DesktopUI2.csproj", - "Objects/Objects/Objects.csproj", - "Objects/Tests/Tests.csproj" + "Core\\Core\\Core.csproj", + "Core\\IntegrationTests\\TestsIntegration.csproj", + "Core\\Tests\\TestsUnit.csproj", + "Core\\Transports\\DiskTransport\\DiskTransport.csproj", + "Core\\Transports\\MongoDBTransport\\MongoDBTransport.csproj", + "DesktopUI2\\DesktopUI2\\DesktopUI2.csproj", + "Objects\\Objects\\Objects.csproj", + "Objects\\Tests\\Tests.csproj" ] } }