From afde379cdace8bb7863b5bade926e213c12a85e0 Mon Sep 17 00:00:00 2001 From: Adam Hathcock Date: Fri, 21 Jun 2024 08:42:42 +0100 Subject: [PATCH] trying to move core --- .csharpierrc.yaml | 7 + .editorconfig | 305 ++++++ .gitattributes | 5 + .gitignore | 21 + CONTRIBUTING.md | 50 + Directory.Build.props | 30 + Directory.Packages.props | 28 + ISSUE_TEMPLATE.md | 17 + LICENSE | 201 ++++ README.md | 112 ++- Speckle.Sdk.sln | 32 + global.json | 6 + notes/sqlite-performance.md | 356 +++++++ src/Directory.Build.props | 12 + src/MongoDBTransport/MongoDB.cs | 272 ++++++ src/MongoDBTransport/MongoDBTransport.csproj | 26 + src/Objects/.editorconfig | 10 + .../BuiltElements/AdvanceSteel/AsteelBeam.cs | 36 + .../BuiltElements/AdvanceSteel/AsteelBolt.cs | 27 + .../AdvanceSteel/AsteelGrating.cs | 18 + .../BuiltElements/AdvanceSteel/AsteelPlate.cs | 27 + .../AdvanceSteel/AsteelPolyBeam.cs | 7 + .../AdvanceSteel/AsteelSectionProfile.cs | 12 + .../AdvanceSteel/AsteelSectionProfileDB.cs | 5 + .../BuiltElements/AdvanceSteel/AsteelSlab.cs | 27 + .../AdvanceSteel/AsteelSpecialPart.cs | 18 + .../AdvanceSteel/AsteelStraightBeam.cs | 7 + .../BuiltElements/AdvanceSteel/Enums.cs | 1 + .../AdvanceSteel/IAsteelObject.cs | 10 + src/Objects/BuiltElements/Alignment.cs | 38 + .../BuiltElements/Archicad/ArchicadBeam.cs | 134 +++ .../BuiltElements/Archicad/ArchicadColumn.cs | 144 +++ .../BuiltElements/Archicad/ArchicadFloor.cs | 99 ++ .../BuiltElements/Archicad/ArchicadLevel.cs | 24 + .../BuiltElements/Archicad/ArchicadRoof.cs | 277 ++++++ .../BuiltElements/Archicad/ArchicadRoom.cs | 32 + .../BuiltElements/Archicad/ArchicadWall.cs | 123 +++ .../BuiltElements/Archicad/AssemblySegment.cs | 61 ++ .../BuiltElements/Archicad/Classification.cs | 21 + .../Archicad/ComponentProperties.cs | 50 + .../BuiltElements/Archicad/DirectShape.cs | 27 + .../BuiltElements/Archicad/ElementShape.cs | 55 ++ .../BuiltElements/Archicad/Fenestration.cs | 77 ++ .../BuiltElements/Archicad/Property.cs | 54 ++ .../BuiltElements/Archicad/PropertyGroup.cs | 48 + src/Objects/BuiltElements/Area.cs | 38 + src/Objects/BuiltElements/Beam.cs | 26 + src/Objects/BuiltElements/Brace.cs | 24 + src/Objects/BuiltElements/CableTray.cs | 18 + src/Objects/BuiltElements/Ceiling.cs | 34 + .../BuiltElements/Civil/CivilAlignment.cs | 17 + .../BuiltElements/Civil/CivilProfile.cs | 23 + src/Objects/BuiltElements/Column.cs | 26 + src/Objects/BuiltElements/Conduit.cs | 17 + src/Objects/BuiltElements/Duct.cs | 66 ++ src/Objects/BuiltElements/Featureline.cs | 31 + src/Objects/BuiltElements/Floor.cs | 36 + src/Objects/BuiltElements/GridLine.cs | 36 + src/Objects/BuiltElements/Level.cs | 29 + src/Objects/BuiltElements/Network.cs | 88 ++ src/Objects/BuiltElements/Opening.cs | 19 + src/Objects/BuiltElements/Pipe.cs | 36 + src/Objects/BuiltElements/Profile.cs | 21 + src/Objects/BuiltElements/Rebar.cs | 158 ++++ .../BuiltElements/Revit/AdaptiveComponent.cs | 40 + .../BuiltElements/Revit/BuildingPad.cs | 25 + .../BuiltElements/Revit/Curve/ModelCurves.cs | 81 ++ .../BuiltElements/Revit/DirectShape.cs | 61 ++ src/Objects/BuiltElements/Revit/Enums.cs | 180 ++++ .../BuiltElements/Revit/FamilyInstance.cs | 55 ++ .../BuiltElements/Revit/FreeformElement.cs | 142 +++ .../Revit/Interfaces/IHasMEPConnectors.cs | 8 + .../BuiltElements/Revit/MEPFamilyInstance.cs | 15 + src/Objects/BuiltElements/Revit/Parameter.cs | 46 + .../BuiltElements/Revit/ParameterUpdater.cs | 21 + .../BuiltElements/Revit/ProjectInfo.cs | 11 + src/Objects/BuiltElements/Revit/RevitBeam.cs | 38 + src/Objects/BuiltElements/Revit/RevitBrace.cs | 33 + .../BuiltElements/Revit/RevitCableTray.cs | 15 + .../BuiltElements/Revit/RevitCeiling.cs | 74 ++ .../BuiltElements/Revit/RevitColumn.cs | 114 +++ .../BuiltElements/Revit/RevitConduit.cs | 19 + .../Revit/RevitCurtainWallPanel.cs | 3 + src/Objects/BuiltElements/Revit/RevitDuct.cs | 163 ++++ .../BuiltElements/Revit/RevitElement.cs | 20 + .../BuiltElements/Revit/RevitElementType.cs | 40 + src/Objects/BuiltElements/Revit/RevitFloor.cs | 53 ++ src/Objects/BuiltElements/Revit/RevitLevel.cs | 57 ++ .../BuiltElements/Revit/RevitMEPConnector.cs | 17 + .../BuiltElements/Revit/RevitNetwork.cs | 76 ++ .../BuiltElements/Revit/RevitOpening.cs | 106 +++ src/Objects/BuiltElements/Revit/RevitPipe.cs | 78 ++ .../BuiltElements/Revit/RevitRailing.cs | 49 + src/Objects/BuiltElements/Revit/RevitRebar.cs | 85 ++ .../Revit/RevitRoof/RevitRoof.cs | 96 ++ src/Objects/BuiltElements/Revit/RevitStair.cs | 79 ++ .../BuiltElements/Revit/RevitTopography.cs | 22 + .../BuiltElements/Revit/RevitToposolid.cs | 43 + src/Objects/BuiltElements/Revit/RevitWall.cs | 243 +++++ src/Objects/BuiltElements/Revit/RevitWire.cs | 40 + src/Objects/BuiltElements/Revit/RevitZone.cs | 15 + .../Revit/StructuralConnectionHandler.cs | 18 + src/Objects/BuiltElements/Roof.cs | 31 + src/Objects/BuiltElements/Room.cs | 62 ++ src/Objects/BuiltElements/Space.cs | 77 ++ src/Objects/BuiltElements/Station.cs | 13 + src/Objects/BuiltElements/Structure.cs | 16 + .../TeklaStructures/BeamPosition.cs | 13 + .../BuiltElements/TeklaStructures/Bolts.cs | 43 + .../BuiltElements/TeklaStructures/Enums.cs | 86 ++ .../BuiltElements/TeklaStructures/Fitting.cs | 8 + .../TeklaStructures/TeklaBeam.cs | 59 ++ .../TeklaStructures/TeklaContourPlate.cs | 68 ++ .../TeklaStructures/TeklaModel.cs | 13 + .../TeklaStructures/TeklaOpening.cs | 18 + .../TeklaStructures/TeklaRebar.cs | 45 + .../BuiltElements/TeklaStructures/Welds.cs | 30 + src/Objects/BuiltElements/Topography.cs | 28 + src/Objects/BuiltElements/View.cs | 27 + src/Objects/BuiltElements/Wall.cs | 43 + src/Objects/BuiltElements/Wire.cs | 20 + src/Objects/BuiltElements/Zone.cs | 24 + src/Objects/EncodingOptimisations.cs | 102 ++ src/Objects/GIS/CRS.cs | 14 + src/Objects/GIS/GisFeature.cs | 43 + src/Objects/GIS/GisMultipatchGeometry.cs | 18 + src/Objects/GIS/GisPolygonGeometry.cs | 17 + src/Objects/GIS/GisPolygonGeometry3d.cs | 3 + src/Objects/GIS/GisTopography.cs | 3 + src/Objects/GIS/NonGeometryElement.cs | 10 + src/Objects/GIS/PolygonElement.cs | 13 + src/Objects/GIS/RasterElement.cs | 52 ++ src/Objects/GIS/RasterLayer.cs | 18 + src/Objects/GIS/VectorLayer.cs | 20 + src/Objects/Geometry/Arc.cs | 293 ++++++ .../Geometry/Autocad/AutocadPolycurve.cs | 86 ++ src/Objects/Geometry/Box.cs | 77 ++ src/Objects/Geometry/Brep.cs | 710 ++++++++++++++ src/Objects/Geometry/BrepEdge.cs | 58 ++ src/Objects/Geometry/BrepFace.cs | 40 + src/Objects/Geometry/BrepLoop.cs | 35 + src/Objects/Geometry/BrepTrim.cs | 65 ++ src/Objects/Geometry/Circle.cs | 98 ++ src/Objects/Geometry/ControlPoint.cs | 76 ++ src/Objects/Geometry/Curve.cs | 203 ++++ src/Objects/Geometry/Ellipse.cs | 121 +++ src/Objects/Geometry/Extrusion.cs | 41 + src/Objects/Geometry/Line.cs | 133 +++ src/Objects/Geometry/Mesh.cs | 254 +++++ src/Objects/Geometry/Plane.cs | 133 +++ src/Objects/Geometry/Point.cs | 242 +++++ src/Objects/Geometry/Pointcloud.cs | 108 +++ src/Objects/Geometry/Polycurve.cs | 166 ++++ src/Objects/Geometry/Polyline.cs | 266 ++++++ src/Objects/Geometry/PolylineExtensions.cs | 22 + src/Objects/Geometry/Spiral.cs | 40 + src/Objects/Geometry/Surface.cs | 258 +++++ src/Objects/Geometry/Vector.cs | 261 ++++++ src/Objects/Interfaces.cs | 114 +++ src/Objects/Objects.csproj | 36 + src/Objects/ObjectsKit.cs | 152 +++ src/Objects/Organization/DataTable.cs | 45 + .../Organization/Deprecated/Collection.cs | 11 + src/Objects/Organization/Model.cs | 72 ++ src/Objects/Other/Block.cs | 45 + src/Objects/Other/Civil/CivilDataField.cs | 20 + src/Objects/Other/DataField.cs | 27 + src/Objects/Other/Dimension.cs | 97 ++ src/Objects/Other/DisplayStyle.cs | 23 + src/Objects/Other/Hatch.cs | 43 + src/Objects/Other/Instance.cs | 164 ++++ src/Objects/Other/MappedBlockWrapper.cs | 19 + src/Objects/Other/Material.cs | 20 + src/Objects/Other/MaterialQuantity.cs | 33 + src/Objects/Other/RenderMaterial.cs | 60 ++ src/Objects/Other/Revit/RevitInstance.cs | 70 ++ src/Objects/Other/Revit/RevitMaterial.cs | 44 + src/Objects/Other/Text.cs | 19 + src/Objects/Other/Transform.cs | 465 ++++++++++ src/Objects/Primitive/Chunk.cs | 9 + src/Objects/Primitive/Interval.cs | 27 + src/Objects/Primitive/Interval2d.cs | 23 + src/Objects/Structural/Analysis/Model.cs | 63 ++ src/Objects/Structural/Analysis/ModelInfo.cs | 52 ++ .../Structural/Analysis/ModelSettings.cs | 48 + src/Objects/Structural/Analysis/ModelUnits.cs | 107 +++ src/Objects/Structural/Analysis/UnitTypes.cs | 7 + src/Objects/Structural/Axis.cs | 15 + .../Structural/CSI/Analysis/CSIStories.cs | 47 + .../Structural/CSI/Analysis/ETABSAnalysis.cs | 93 ++ .../Structural/CSI/Analysis/ETABSAreaType.cs | 37 + .../CSI/Analysis/ETABSLoadingType.cs | 7 + .../Structural/CSI/Geometry/CSIElement1D.cs | 123 +++ .../Structural/CSI/Geometry/CSIElement2D.cs | 51 + .../Structural/CSI/Geometry/CSIGridLines.cs | 16 + .../Structural/CSI/Geometry/CSINode.cs | 62 ++ .../Structural/CSI/Geometry/CSIPier.cs | 65 ++ .../Structural/CSI/Geometry/CSISpandrel.cs | 68 ++ .../Structural/CSI/Geometry/CSITendon.cs | 22 + .../Structural/CSI/Loading/CSIWindLoading.cs | 11 + .../Structural/CSI/Materials/CSIConcrete.cs | 14 + .../Structural/CSI/Materials/CSIRebar.cs | 5 + .../Structural/CSI/Materials/CSISteel.cs | 13 + .../Structural/CSI/Properties/CSIDiaphragm.cs | 19 + .../CSI/Properties/CSILinkProperty.cs | 45 + .../CSI/Properties/CSIProperty2D.cs | 265 ++++++ .../CSI/Properties/CSISpringProperty.cs | 103 ++ .../CSI/Properties/CSITendonProperty.cs | 9 + .../CSI/Properties/ETABSProperty.cs | 38 + .../GSA/Analysis/GSAAnalysisCase.cs | 44 + .../Structural/GSA/Analysis/GSAStage.cs | 43 + .../Structural/GSA/Analysis/GSATask.cs | 53 ++ .../Structural/GSA/Bridge/GSAAlignment.cs | 46 + .../Structural/GSA/Bridge/GSAInfluence.cs | 31 + .../Structural/GSA/Bridge/GSAInfluenceBeam.cs | 41 + .../Structural/GSA/Bridge/GSAInfluenceNode.cs | 42 + src/Objects/Structural/GSA/Bridge/GSAPath.cs | 62 ++ .../Structural/GSA/Bridge/GSAUserVehicle.cs | 43 + .../Structural/GSA/Geometry/GSAAssembly.cs | 67 ++ .../Structural/GSA/Geometry/GSAElement1D.cs | 81 ++ .../Structural/GSA/Geometry/GSAElement2D.cs | 43 + .../Structural/GSA/Geometry/GSAElement3D.cs | 40 + .../GSA/Geometry/GSAGeneralisedRestraint.cs | 45 + .../Structural/GSA/Geometry/GSAGridLine.cs | 19 + .../Structural/GSA/Geometry/GSAGridPlane.cs | 27 + .../Structural/GSA/Geometry/GSAGridSurface.cs | 62 ++ .../Structural/GSA/Geometry/GSAMember1D.cs | 78 ++ .../Structural/GSA/Geometry/GSAMember2D.cs | 43 + .../Structural/GSA/Geometry/GSANode.cs | 56 ++ .../GSA/Geometry/GSARigidConstraint.cs | 88 ++ .../Structural/GSA/Geometry/GSAStorey.cs | 34 + .../Structural/GSA/Loading/GSALoadBeam.cs | 79 ++ .../Structural/GSA/Loading/GSALoadCase.cs | 38 + .../GSA/Loading/GSALoadCombination.cs | 35 + .../Structural/GSA/Loading/GSALoadFace.cs | 43 + .../Structural/GSA/Loading/GSALoadGravity.cs | 73 ++ .../Structural/GSA/Loading/GSALoadGrid.cs | 23 + .../Structural/GSA/Loading/GSALoadGridArea.cs | 34 + .../Structural/GSA/Loading/GSALoadGridLine.cs | 35 + .../GSA/Loading/GSALoadGridPoint.cs | 31 + .../Structural/GSA/Loading/GSALoadNode.cs | 58 ++ .../GSA/Loading/GSALoadThermal2d.cs | 35 + .../Structural/GSA/Loading/GSAPolyline.cs | 36 + .../Structural/GSA/Materials/GSAConcrete.cs | 88 ++ .../Structural/GSA/Materials/GSAMaterial.cs | 48 + .../Structural/GSA/Materials/GSASteel.cs | 63 ++ .../GSA/Properties/GSAProperty1D.cs | 40 + .../GSA/Properties/GSAProperty2D.cs | 30 + src/Objects/Structural/Geometry/Axis.cs | 22 + src/Objects/Structural/Geometry/Element1D.cs | 138 +++ src/Objects/Structural/Geometry/Element2D.cs | 52 ++ src/Objects/Structural/Geometry/Element3D.cs | 50 + .../Structural/Geometry/ElementType.cs | 35 + src/Objects/Structural/Geometry/MemberType.cs | 13 + .../Structural/Geometry/MemberType1D.cs | 17 + src/Objects/Structural/Geometry/Node.cs | 71 ++ src/Objects/Structural/Geometry/Restraint.cs | 88 ++ .../Structural/Geometry/RestraintType.cs | 24 + src/Objects/Structural/Geometry/Storey.cs | 26 + src/Objects/Structural/Loading/Load.cs | 26 + src/Objects/Structural/Loading/LoadBeam.cs | 114 +++ src/Objects/Structural/Loading/LoadCase.cs | 39 + .../Structural/Loading/LoadCombination.cs | 45 + src/Objects/Structural/Loading/LoadFace.cs | 114 +++ src/Objects/Structural/Loading/LoadGravity.cs | 88 ++ src/Objects/Structural/Loading/LoadNode.cs | 70 ++ src/Objects/Structural/Loading/Loads.cs | 74 ++ src/Objects/Structural/MaterialType.cs | 17 + src/Objects/Structural/Materials/Concrete.cs | 62 ++ src/Objects/Structural/Materials/Steel.cs | 50 + .../Materials/StructuralMaterial.cs | 81 ++ src/Objects/Structural/Materials/Timber.cs | 47 + .../Properties/Profiles/SectionProfile.cs | 251 +++++ src/Objects/Structural/Properties/Property.cs | 17 + .../Structural/Properties/Property1D.cs | 38 + .../Structural/Properties/Property2D.cs | 42 + .../Structural/Properties/Property3D.cs | 33 + .../Structural/Properties/PropertyDamper.cs | 49 + .../Structural/Properties/PropertyMass.cs | 56 ++ .../Structural/Properties/PropertySpring.cs | 107 +++ src/Objects/Structural/PropertyType.cs | 79 ++ .../Structural/Results/AnalyticalResults.cs | 13 + src/Objects/Structural/Results/Result.cs | 27 + src/Objects/Structural/Results/Result1D.cs | 170 ++++ src/Objects/Structural/Results/Result2D.cs | 210 +++++ src/Objects/Structural/Results/Result3D.cs | 110 +++ src/Objects/Structural/Results/ResultAll.cs | 45 + .../Structural/Results/ResultGlobal.cs | 156 ++++ src/Objects/Structural/Results/ResultNode.cs | 211 +++++ src/Objects/Utils/MeshTriangulationHelper.cs | 273 ++++++ src/Objects/Utils/Parameters.cs | 37 + src/Speckle.Core.Transports/DiskTransport.cs | 159 ++++ .../Speckle.Core.Transports.csproj | 18 + .../packages.lock.json | 460 +++++++++ src/Speckle.Core/Api/Exceptions.cs | 104 +++ src/Speckle.Core/Api/GraphQL/.editorconfig | 10 + .../Client.ActivityOperations.cs | 67 ++ .../Client.BranchOperations.cs | 222 +++++ .../Client.CommentOperations.cs | 103 ++ .../Client.CommitOperations.cs | 161 ++++ .../Client.ObjectOperations.cs | 71 ++ .../Client.ObsoleteOperations.cs | 250 +++++ .../Client.ServerOperations.cs | 41 + .../Client.StreamOperations.cs | 513 ++++++++++ .../Client.UserOperations.cs | 99 ++ .../Client.Subscriptions.Branch.cs | 87 ++ .../Client.Subscriptions.Commit.cs | 87 ++ .../Client.Subscriptions.Stream.cs | 110 +++ src/Speckle.Core/Api/GraphQL/Client.cs | 385 ++++++++ .../GraphQL/GraphQLHttpClientExtensions.cs | 56 ++ src/Speckle.Core/Api/GraphQL/Models.cs | 583 ++++++++++++ .../Serializer/ConstantCaseEnumConverter.cs | 43 + .../Api/GraphQL/Serializer/MapConverter.cs | 88 ++ .../Serializer/NewtonsoftJsonSerializer.cs | 84 ++ .../Api/GraphQL/SubscriptionModels.cs | 91 ++ src/Speckle.Core/Api/Helpers.cs | 308 ++++++ .../Operations/Operations.Receive.Obsolete.cs | 533 +++++++++++ .../Api/Operations/Operations.Receive.cs | 206 ++++ .../Operations/Operations.Send.Obsolete.cs | 241 +++++ .../Api/Operations/Operations.Send.cs | 168 ++++ .../Api/Operations/Operations.Serialize.cs | 151 +++ src/Speckle.Core/Api/Operations/Operations.cs | 62 ++ src/Speckle.Core/Api/ServerLimits.cs | 14 + src/Speckle.Core/Credentials/Account.cs | 125 +++ .../Credentials/AccountManager.cs | 878 ++++++++++++++++++ src/Speckle.Core/Credentials/Exceptions.cs | 26 + src/Speckle.Core/Credentials/Responses.cs | 64 ++ src/Speckle.Core/Credentials/StreamWrapper.cs | 504 ++++++++++ .../Credentials/graphql.config.yml | 2 + src/Speckle.Core/Helpers/Constants.cs | 26 + src/Speckle.Core/Helpers/Crypt.cs | 56 ++ src/Speckle.Core/Helpers/Http.cs | 251 +++++ src/Speckle.Core/Helpers/Path.cs | 175 ++++ src/Speckle.Core/Helpers/State.cs | 89 ++ src/Speckle.Core/Kits/Applications.cs | 264 ++++++ src/Speckle.Core/Kits/Attributes.cs | 66 ++ .../Kits/ConverterInterfaces/IFinalizable.cs | 6 + src/Speckle.Core/Kits/Exceptions.cs | 140 +++ src/Speckle.Core/Kits/ISpeckleConverter.cs | 152 +++ src/Speckle.Core/Kits/ISpeckleKit.cs | 48 + src/Speckle.Core/Kits/KitDeclaration.cs | 30 + src/Speckle.Core/Kits/KitManager.cs | 339 +++++++ src/Speckle.Core/Kits/Units.cs | 323 +++++++ src/Speckle.Core/Logging/Analytics.cs | 347 +++++++ src/Speckle.Core/Logging/CumulativeTimer.cs | 85 ++ src/Speckle.Core/Logging/ExceptionHelpers.cs | 47 + src/Speckle.Core/Logging/LoggingHelpers.cs | 16 + src/Speckle.Core/Logging/Setup.cs | 92 ++ src/Speckle.Core/Logging/SpeckleException.cs | 38 + src/Speckle.Core/Logging/SpeckleLog.cs | 331 +++++++ .../Logging/SpeckleNonUserFacingException.cs | 17 + src/Speckle.Core/Models/ApplicationObject.cs | 140 +++ src/Speckle.Core/Models/Attributes.cs | 41 + src/Speckle.Core/Models/Base.cs | 308 ++++++ src/Speckle.Core/Models/Blob.cs | 74 ++ src/Speckle.Core/Models/Collection.cs | 48 + .../Models/CommitObjectBuilder.cs | 185 ++++ src/Speckle.Core/Models/DynamicBase.cs | 339 +++++++ .../Models/DynamicBaseMemberType.cs | 45 + .../Models/Extensions/BaseExtensions.cs | 292 ++++++ src/Speckle.Core/Models/Extras.cs | 304 ++++++ .../Models/GraphTraversal/DefaultTraversal.cs | 212 +++++ .../Models/GraphTraversal/GraphTraversal.cs | 151 +++ .../Models/GraphTraversal/ITraversalRule.cs | 52 ++ .../Models/GraphTraversal/RuleBuilder.cs | 105 +++ .../TraversalContextExtensions.cs | 59 ++ .../GraphTraversal/TraversalContexts.cs | 57 ++ .../Models/InvalidPropNameException.cs | 18 + .../Models/NestingInstructions.cs | 19 + src/Speckle.Core/Models/Utilities.cs | 249 +++++ .../Serialisation/BaseObjectDeserializerV2.cs | 403 ++++++++ .../Serialisation/BaseObjectSerializer.cs | 719 ++++++++++++++ .../Serialisation/BaseObjectSerializerV2.cs | 535 +++++++++++ .../BaseObjectSerializationUtilities.cs | 370 ++++++++ .../SerializationUtilities/CallsiteCache.cs | 45 + .../DeserializationWorkerThreads.cs | 108 +++ .../SerializationUtilities/OperationTask.cs | 87 ++ .../SerializationUtilities/ValueConverter.cs | 314 +++++++ .../SpeckleSerializerException.cs | 26 + src/Speckle.Core/Speckle.Core.csproj | 40 + src/Speckle.Core/Transports/Exceptions.cs | 23 + src/Speckle.Core/Transports/ITransport.cs | 124 +++ src/Speckle.Core/Transports/Memory.cs | 142 +++ src/Speckle.Core/Transports/SQLite.cs | 478 ++++++++++ src/Speckle.Core/Transports/Server.cs | 634 +++++++++++++ .../Transports/ServerUtils/GzipContent.cs | 62 ++ .../Transports/ServerUtils/IServerApi.cs | 22 + .../ServerUtils/ParallelServerAPI.cs | 305 ++++++ .../Transports/ServerUtils/ServerAPI.cs | 509 ++++++++++ src/Speckle.Core/Transports/ServerV2.cs | 445 +++++++++ .../Transports/TransportHelpers.cs | 84 ++ src/Speckle.Core/Transports/Utilities.cs | 31 + src/Speckle.Core/packages.lock.json | 440 +++++++++ tests/Speckle.Core.Tests.Integration/Api.cs | 498 ++++++++++ .../Credentials/UserServerInfoTests.cs | 87 ++ .../Fixtures.cs | 141 +++ .../GraphQLCLient.cs | 64 ++ .../ServerTransportTests.cs | 167 ++++ .../Speckle.Core.Tests.Integration.csproj | 43 + .../Subscriptions/Branches.cs | 122 +++ .../Subscriptions/Commits.cs | 161 ++++ .../Subscriptions/Streams.cs | 113 +++ .../Speckle.Core.Tests.Integration/Usings.cs | 1 + .../Api/Operations/ReceiveFromSQLite.cs | 44 + .../Api/Operations/TraverseCommit.cs | 38 + .../Speckle.Core.Tests.Performance/Program.cs | 11 + .../RegressionTestConfig.cs | 61 ++ .../DeserializationWorkerThreads.cs | 43 + .../Speckle.Core.Tests.Performance.csproj | 23 + .../TestDataHelper.cs | 54 ++ .../Api/GraphQLClient.cs | 124 +++ .../Api/HelpersTests.cs | 35 + .../Api/Operations/ClosureTests.cs | 120 +++ .../OperationsReceiveTests.Exceptional.cs | 41 + .../Api/Operations/OperationsReceiveTests.cs | 84 ++ .../Api/Operations/SendReceiveLocal.cs | 278 ++++++ .../Api/Operations/SerializationTests.cs | 279 ++++++ .../AccountServerMigrationTests.cs | 90 ++ .../Credentials/Accounts.cs | 121 +++ .../Credentials/FE2WrapperTests.cs | 68 ++ .../Credentials/StreamWrapperTests.cs | 82 ++ tests/Speckle.Core.Tests.Unit/Fixtures.cs | 55 ++ tests/Speckle.Core.Tests.Unit/Helpers/Path.cs | 92 ++ .../Kits/KitManagerTests.cs | 19 + tests/Speckle.Core.Tests.Unit/Kits/TestKit.cs | 185 ++++ .../Logging/SpeckleLogTests.cs | 130 +++ .../Models/BaseTests.cs | 238 +++++ .../Models/Extensions/BaseExtensionsTests.cs | 63 ++ .../Models/Extensions/ExceptionTests.cs | 26 + .../GraphTraversal/GraphTraversalTests.cs | 140 +++ .../TraversalContextExtensionsTests.cs | 60 ++ .../GraphTraversal/TraversalMockObjects.cs | 16 + .../Speckle.Core.Tests.Unit/Models/Hashing.cs | 85 ++ .../Models/SpeckleType.cs | 34 + .../Models/TraversalTests.cs | 102 ++ .../Models/UtilitiesTests.cs | 71 ++ .../ObjectModelDeprecationTests.cs | 35 + .../SerializerBreakingChanges.cs | 47 + .../SerializerNonBreakingChanges.cs | 292 ++++++ .../Speckle.Core.Tests.Unit.csproj | 24 + .../Transports/DiskTransportTests.cs | 36 + .../Transports/MemoryTransportTests.cs | 20 + .../Transports/SQLiteTransportTests.cs | 164 ++++ .../Transports/TransportTests.cs | 182 ++++ .../packages.lock.json | 428 +++++++++ .../Tests/Objects.Tests.Unit/GenericTests.cs | 54 ++ .../Objects.Tests.Unit/Geometry/ArcTests.cs | 23 + .../Objects.Tests.Unit/Geometry/MeshTests.cs | 44 + .../Objects.Tests.Unit/Geometry/PointTests.cs | 53 ++ .../Geometry/TransformTests.cs | 129 +++ .../Objects.Tests.Unit/NUnit_Fixtures.cs | 22 + .../Objects.Tests.Unit.csproj | 40 + .../Utils/MeshTriangulationHelperTests.cs | 92 ++ .../Utils/ShallowCopyTests.cs | 26 + 454 files changed, 43777 insertions(+), 2 deletions(-) create mode 100644 .csharpierrc.yaml create mode 100644 .editorconfig create mode 100644 .gitattributes create mode 100644 .gitignore create mode 100644 CONTRIBUTING.md create mode 100644 Directory.Build.props create mode 100644 Directory.Packages.props create mode 100644 ISSUE_TEMPLATE.md create mode 100644 LICENSE create mode 100644 Speckle.Sdk.sln create mode 100644 global.json create mode 100644 notes/sqlite-performance.md create mode 100644 src/Directory.Build.props create mode 100644 src/MongoDBTransport/MongoDB.cs create mode 100644 src/MongoDBTransport/MongoDBTransport.csproj create mode 100644 src/Objects/.editorconfig create mode 100644 src/Objects/BuiltElements/AdvanceSteel/AsteelBeam.cs create mode 100644 src/Objects/BuiltElements/AdvanceSteel/AsteelBolt.cs create mode 100644 src/Objects/BuiltElements/AdvanceSteel/AsteelGrating.cs create mode 100644 src/Objects/BuiltElements/AdvanceSteel/AsteelPlate.cs create mode 100644 src/Objects/BuiltElements/AdvanceSteel/AsteelPolyBeam.cs create mode 100644 src/Objects/BuiltElements/AdvanceSteel/AsteelSectionProfile.cs create mode 100644 src/Objects/BuiltElements/AdvanceSteel/AsteelSectionProfileDB.cs create mode 100644 src/Objects/BuiltElements/AdvanceSteel/AsteelSlab.cs create mode 100644 src/Objects/BuiltElements/AdvanceSteel/AsteelSpecialPart.cs create mode 100644 src/Objects/BuiltElements/AdvanceSteel/AsteelStraightBeam.cs create mode 100644 src/Objects/BuiltElements/AdvanceSteel/Enums.cs create mode 100644 src/Objects/BuiltElements/AdvanceSteel/IAsteelObject.cs create mode 100644 src/Objects/BuiltElements/Alignment.cs create mode 100644 src/Objects/BuiltElements/Archicad/ArchicadBeam.cs create mode 100644 src/Objects/BuiltElements/Archicad/ArchicadColumn.cs create mode 100644 src/Objects/BuiltElements/Archicad/ArchicadFloor.cs create mode 100644 src/Objects/BuiltElements/Archicad/ArchicadLevel.cs create mode 100644 src/Objects/BuiltElements/Archicad/ArchicadRoof.cs create mode 100644 src/Objects/BuiltElements/Archicad/ArchicadRoom.cs create mode 100644 src/Objects/BuiltElements/Archicad/ArchicadWall.cs create mode 100644 src/Objects/BuiltElements/Archicad/AssemblySegment.cs create mode 100644 src/Objects/BuiltElements/Archicad/Classification.cs create mode 100644 src/Objects/BuiltElements/Archicad/ComponentProperties.cs create mode 100644 src/Objects/BuiltElements/Archicad/DirectShape.cs create mode 100644 src/Objects/BuiltElements/Archicad/ElementShape.cs create mode 100644 src/Objects/BuiltElements/Archicad/Fenestration.cs create mode 100644 src/Objects/BuiltElements/Archicad/Property.cs create mode 100644 src/Objects/BuiltElements/Archicad/PropertyGroup.cs create mode 100644 src/Objects/BuiltElements/Area.cs create mode 100644 src/Objects/BuiltElements/Beam.cs create mode 100644 src/Objects/BuiltElements/Brace.cs create mode 100644 src/Objects/BuiltElements/CableTray.cs create mode 100644 src/Objects/BuiltElements/Ceiling.cs create mode 100644 src/Objects/BuiltElements/Civil/CivilAlignment.cs create mode 100644 src/Objects/BuiltElements/Civil/CivilProfile.cs create mode 100644 src/Objects/BuiltElements/Column.cs create mode 100644 src/Objects/BuiltElements/Conduit.cs create mode 100644 src/Objects/BuiltElements/Duct.cs create mode 100644 src/Objects/BuiltElements/Featureline.cs create mode 100644 src/Objects/BuiltElements/Floor.cs create mode 100644 src/Objects/BuiltElements/GridLine.cs create mode 100644 src/Objects/BuiltElements/Level.cs create mode 100644 src/Objects/BuiltElements/Network.cs create mode 100644 src/Objects/BuiltElements/Opening.cs create mode 100644 src/Objects/BuiltElements/Pipe.cs create mode 100644 src/Objects/BuiltElements/Profile.cs create mode 100644 src/Objects/BuiltElements/Rebar.cs create mode 100644 src/Objects/BuiltElements/Revit/AdaptiveComponent.cs create mode 100644 src/Objects/BuiltElements/Revit/BuildingPad.cs create mode 100644 src/Objects/BuiltElements/Revit/Curve/ModelCurves.cs create mode 100644 src/Objects/BuiltElements/Revit/DirectShape.cs create mode 100644 src/Objects/BuiltElements/Revit/Enums.cs create mode 100644 src/Objects/BuiltElements/Revit/FamilyInstance.cs create mode 100644 src/Objects/BuiltElements/Revit/FreeformElement.cs create mode 100644 src/Objects/BuiltElements/Revit/Interfaces/IHasMEPConnectors.cs create mode 100644 src/Objects/BuiltElements/Revit/MEPFamilyInstance.cs create mode 100644 src/Objects/BuiltElements/Revit/Parameter.cs create mode 100644 src/Objects/BuiltElements/Revit/ParameterUpdater.cs create mode 100644 src/Objects/BuiltElements/Revit/ProjectInfo.cs create mode 100644 src/Objects/BuiltElements/Revit/RevitBeam.cs create mode 100644 src/Objects/BuiltElements/Revit/RevitBrace.cs create mode 100644 src/Objects/BuiltElements/Revit/RevitCableTray.cs create mode 100644 src/Objects/BuiltElements/Revit/RevitCeiling.cs create mode 100644 src/Objects/BuiltElements/Revit/RevitColumn.cs create mode 100644 src/Objects/BuiltElements/Revit/RevitConduit.cs create mode 100644 src/Objects/BuiltElements/Revit/RevitCurtainWallPanel.cs create mode 100644 src/Objects/BuiltElements/Revit/RevitDuct.cs create mode 100644 src/Objects/BuiltElements/Revit/RevitElement.cs create mode 100644 src/Objects/BuiltElements/Revit/RevitElementType.cs create mode 100644 src/Objects/BuiltElements/Revit/RevitFloor.cs create mode 100644 src/Objects/BuiltElements/Revit/RevitLevel.cs create mode 100644 src/Objects/BuiltElements/Revit/RevitMEPConnector.cs create mode 100644 src/Objects/BuiltElements/Revit/RevitNetwork.cs create mode 100644 src/Objects/BuiltElements/Revit/RevitOpening.cs create mode 100644 src/Objects/BuiltElements/Revit/RevitPipe.cs create mode 100644 src/Objects/BuiltElements/Revit/RevitRailing.cs create mode 100644 src/Objects/BuiltElements/Revit/RevitRebar.cs create mode 100644 src/Objects/BuiltElements/Revit/RevitRoof/RevitRoof.cs create mode 100644 src/Objects/BuiltElements/Revit/RevitStair.cs create mode 100644 src/Objects/BuiltElements/Revit/RevitTopography.cs create mode 100644 src/Objects/BuiltElements/Revit/RevitToposolid.cs create mode 100644 src/Objects/BuiltElements/Revit/RevitWall.cs create mode 100644 src/Objects/BuiltElements/Revit/RevitWire.cs create mode 100644 src/Objects/BuiltElements/Revit/RevitZone.cs create mode 100644 src/Objects/BuiltElements/Revit/StructuralConnectionHandler.cs create mode 100644 src/Objects/BuiltElements/Roof.cs create mode 100644 src/Objects/BuiltElements/Room.cs create mode 100644 src/Objects/BuiltElements/Space.cs create mode 100644 src/Objects/BuiltElements/Station.cs create mode 100644 src/Objects/BuiltElements/Structure.cs create mode 100644 src/Objects/BuiltElements/TeklaStructures/BeamPosition.cs create mode 100644 src/Objects/BuiltElements/TeklaStructures/Bolts.cs create mode 100644 src/Objects/BuiltElements/TeklaStructures/Enums.cs create mode 100644 src/Objects/BuiltElements/TeklaStructures/Fitting.cs create mode 100644 src/Objects/BuiltElements/TeklaStructures/TeklaBeam.cs create mode 100644 src/Objects/BuiltElements/TeklaStructures/TeklaContourPlate.cs create mode 100644 src/Objects/BuiltElements/TeklaStructures/TeklaModel.cs create mode 100644 src/Objects/BuiltElements/TeklaStructures/TeklaOpening.cs create mode 100644 src/Objects/BuiltElements/TeklaStructures/TeklaRebar.cs create mode 100644 src/Objects/BuiltElements/TeklaStructures/Welds.cs create mode 100644 src/Objects/BuiltElements/Topography.cs create mode 100644 src/Objects/BuiltElements/View.cs create mode 100644 src/Objects/BuiltElements/Wall.cs create mode 100644 src/Objects/BuiltElements/Wire.cs create mode 100644 src/Objects/BuiltElements/Zone.cs create mode 100644 src/Objects/EncodingOptimisations.cs create mode 100644 src/Objects/GIS/CRS.cs create mode 100644 src/Objects/GIS/GisFeature.cs create mode 100644 src/Objects/GIS/GisMultipatchGeometry.cs create mode 100644 src/Objects/GIS/GisPolygonGeometry.cs create mode 100644 src/Objects/GIS/GisPolygonGeometry3d.cs create mode 100644 src/Objects/GIS/GisTopography.cs create mode 100644 src/Objects/GIS/NonGeometryElement.cs create mode 100644 src/Objects/GIS/PolygonElement.cs create mode 100644 src/Objects/GIS/RasterElement.cs create mode 100644 src/Objects/GIS/RasterLayer.cs create mode 100644 src/Objects/GIS/VectorLayer.cs create mode 100644 src/Objects/Geometry/Arc.cs create mode 100644 src/Objects/Geometry/Autocad/AutocadPolycurve.cs create mode 100644 src/Objects/Geometry/Box.cs create mode 100644 src/Objects/Geometry/Brep.cs create mode 100644 src/Objects/Geometry/BrepEdge.cs create mode 100644 src/Objects/Geometry/BrepFace.cs create mode 100644 src/Objects/Geometry/BrepLoop.cs create mode 100644 src/Objects/Geometry/BrepTrim.cs create mode 100644 src/Objects/Geometry/Circle.cs create mode 100644 src/Objects/Geometry/ControlPoint.cs create mode 100644 src/Objects/Geometry/Curve.cs create mode 100644 src/Objects/Geometry/Ellipse.cs create mode 100644 src/Objects/Geometry/Extrusion.cs create mode 100644 src/Objects/Geometry/Line.cs create mode 100644 src/Objects/Geometry/Mesh.cs create mode 100644 src/Objects/Geometry/Plane.cs create mode 100644 src/Objects/Geometry/Point.cs create mode 100644 src/Objects/Geometry/Pointcloud.cs create mode 100644 src/Objects/Geometry/Polycurve.cs create mode 100644 src/Objects/Geometry/Polyline.cs create mode 100644 src/Objects/Geometry/PolylineExtensions.cs create mode 100644 src/Objects/Geometry/Spiral.cs create mode 100644 src/Objects/Geometry/Surface.cs create mode 100644 src/Objects/Geometry/Vector.cs create mode 100644 src/Objects/Interfaces.cs create mode 100644 src/Objects/Objects.csproj create mode 100644 src/Objects/ObjectsKit.cs create mode 100644 src/Objects/Organization/DataTable.cs create mode 100644 src/Objects/Organization/Deprecated/Collection.cs create mode 100644 src/Objects/Organization/Model.cs create mode 100644 src/Objects/Other/Block.cs create mode 100644 src/Objects/Other/Civil/CivilDataField.cs create mode 100644 src/Objects/Other/DataField.cs create mode 100644 src/Objects/Other/Dimension.cs create mode 100644 src/Objects/Other/DisplayStyle.cs create mode 100644 src/Objects/Other/Hatch.cs create mode 100644 src/Objects/Other/Instance.cs create mode 100644 src/Objects/Other/MappedBlockWrapper.cs create mode 100644 src/Objects/Other/Material.cs create mode 100644 src/Objects/Other/MaterialQuantity.cs create mode 100644 src/Objects/Other/RenderMaterial.cs create mode 100644 src/Objects/Other/Revit/RevitInstance.cs create mode 100644 src/Objects/Other/Revit/RevitMaterial.cs create mode 100644 src/Objects/Other/Text.cs create mode 100644 src/Objects/Other/Transform.cs create mode 100644 src/Objects/Primitive/Chunk.cs create mode 100644 src/Objects/Primitive/Interval.cs create mode 100644 src/Objects/Primitive/Interval2d.cs create mode 100644 src/Objects/Structural/Analysis/Model.cs create mode 100644 src/Objects/Structural/Analysis/ModelInfo.cs create mode 100644 src/Objects/Structural/Analysis/ModelSettings.cs create mode 100644 src/Objects/Structural/Analysis/ModelUnits.cs create mode 100644 src/Objects/Structural/Analysis/UnitTypes.cs create mode 100644 src/Objects/Structural/Axis.cs create mode 100644 src/Objects/Structural/CSI/Analysis/CSIStories.cs create mode 100644 src/Objects/Structural/CSI/Analysis/ETABSAnalysis.cs create mode 100644 src/Objects/Structural/CSI/Analysis/ETABSAreaType.cs create mode 100644 src/Objects/Structural/CSI/Analysis/ETABSLoadingType.cs create mode 100644 src/Objects/Structural/CSI/Geometry/CSIElement1D.cs create mode 100644 src/Objects/Structural/CSI/Geometry/CSIElement2D.cs create mode 100644 src/Objects/Structural/CSI/Geometry/CSIGridLines.cs create mode 100644 src/Objects/Structural/CSI/Geometry/CSINode.cs create mode 100644 src/Objects/Structural/CSI/Geometry/CSIPier.cs create mode 100644 src/Objects/Structural/CSI/Geometry/CSISpandrel.cs create mode 100644 src/Objects/Structural/CSI/Geometry/CSITendon.cs create mode 100644 src/Objects/Structural/CSI/Loading/CSIWindLoading.cs create mode 100644 src/Objects/Structural/CSI/Materials/CSIConcrete.cs create mode 100644 src/Objects/Structural/CSI/Materials/CSIRebar.cs create mode 100644 src/Objects/Structural/CSI/Materials/CSISteel.cs create mode 100644 src/Objects/Structural/CSI/Properties/CSIDiaphragm.cs create mode 100644 src/Objects/Structural/CSI/Properties/CSILinkProperty.cs create mode 100644 src/Objects/Structural/CSI/Properties/CSIProperty2D.cs create mode 100644 src/Objects/Structural/CSI/Properties/CSISpringProperty.cs create mode 100644 src/Objects/Structural/CSI/Properties/CSITendonProperty.cs create mode 100644 src/Objects/Structural/CSI/Properties/ETABSProperty.cs create mode 100644 src/Objects/Structural/GSA/Analysis/GSAAnalysisCase.cs create mode 100644 src/Objects/Structural/GSA/Analysis/GSAStage.cs create mode 100644 src/Objects/Structural/GSA/Analysis/GSATask.cs create mode 100644 src/Objects/Structural/GSA/Bridge/GSAAlignment.cs create mode 100644 src/Objects/Structural/GSA/Bridge/GSAInfluence.cs create mode 100644 src/Objects/Structural/GSA/Bridge/GSAInfluenceBeam.cs create mode 100644 src/Objects/Structural/GSA/Bridge/GSAInfluenceNode.cs create mode 100644 src/Objects/Structural/GSA/Bridge/GSAPath.cs create mode 100644 src/Objects/Structural/GSA/Bridge/GSAUserVehicle.cs create mode 100644 src/Objects/Structural/GSA/Geometry/GSAAssembly.cs create mode 100644 src/Objects/Structural/GSA/Geometry/GSAElement1D.cs create mode 100644 src/Objects/Structural/GSA/Geometry/GSAElement2D.cs create mode 100644 src/Objects/Structural/GSA/Geometry/GSAElement3D.cs create mode 100644 src/Objects/Structural/GSA/Geometry/GSAGeneralisedRestraint.cs create mode 100644 src/Objects/Structural/GSA/Geometry/GSAGridLine.cs create mode 100644 src/Objects/Structural/GSA/Geometry/GSAGridPlane.cs create mode 100644 src/Objects/Structural/GSA/Geometry/GSAGridSurface.cs create mode 100644 src/Objects/Structural/GSA/Geometry/GSAMember1D.cs create mode 100644 src/Objects/Structural/GSA/Geometry/GSAMember2D.cs create mode 100644 src/Objects/Structural/GSA/Geometry/GSANode.cs create mode 100644 src/Objects/Structural/GSA/Geometry/GSARigidConstraint.cs create mode 100644 src/Objects/Structural/GSA/Geometry/GSAStorey.cs create mode 100644 src/Objects/Structural/GSA/Loading/GSALoadBeam.cs create mode 100644 src/Objects/Structural/GSA/Loading/GSALoadCase.cs create mode 100644 src/Objects/Structural/GSA/Loading/GSALoadCombination.cs create mode 100644 src/Objects/Structural/GSA/Loading/GSALoadFace.cs create mode 100644 src/Objects/Structural/GSA/Loading/GSALoadGravity.cs create mode 100644 src/Objects/Structural/GSA/Loading/GSALoadGrid.cs create mode 100644 src/Objects/Structural/GSA/Loading/GSALoadGridArea.cs create mode 100644 src/Objects/Structural/GSA/Loading/GSALoadGridLine.cs create mode 100644 src/Objects/Structural/GSA/Loading/GSALoadGridPoint.cs create mode 100644 src/Objects/Structural/GSA/Loading/GSALoadNode.cs create mode 100644 src/Objects/Structural/GSA/Loading/GSALoadThermal2d.cs create mode 100644 src/Objects/Structural/GSA/Loading/GSAPolyline.cs create mode 100644 src/Objects/Structural/GSA/Materials/GSAConcrete.cs create mode 100644 src/Objects/Structural/GSA/Materials/GSAMaterial.cs create mode 100644 src/Objects/Structural/GSA/Materials/GSASteel.cs create mode 100644 src/Objects/Structural/GSA/Properties/GSAProperty1D.cs create mode 100644 src/Objects/Structural/GSA/Properties/GSAProperty2D.cs create mode 100644 src/Objects/Structural/Geometry/Axis.cs create mode 100644 src/Objects/Structural/Geometry/Element1D.cs create mode 100644 src/Objects/Structural/Geometry/Element2D.cs create mode 100644 src/Objects/Structural/Geometry/Element3D.cs create mode 100644 src/Objects/Structural/Geometry/ElementType.cs create mode 100644 src/Objects/Structural/Geometry/MemberType.cs create mode 100644 src/Objects/Structural/Geometry/MemberType1D.cs create mode 100644 src/Objects/Structural/Geometry/Node.cs create mode 100644 src/Objects/Structural/Geometry/Restraint.cs create mode 100644 src/Objects/Structural/Geometry/RestraintType.cs create mode 100644 src/Objects/Structural/Geometry/Storey.cs create mode 100644 src/Objects/Structural/Loading/Load.cs create mode 100644 src/Objects/Structural/Loading/LoadBeam.cs create mode 100644 src/Objects/Structural/Loading/LoadCase.cs create mode 100644 src/Objects/Structural/Loading/LoadCombination.cs create mode 100644 src/Objects/Structural/Loading/LoadFace.cs create mode 100644 src/Objects/Structural/Loading/LoadGravity.cs create mode 100644 src/Objects/Structural/Loading/LoadNode.cs create mode 100644 src/Objects/Structural/Loading/Loads.cs create mode 100644 src/Objects/Structural/MaterialType.cs create mode 100644 src/Objects/Structural/Materials/Concrete.cs create mode 100644 src/Objects/Structural/Materials/Steel.cs create mode 100644 src/Objects/Structural/Materials/StructuralMaterial.cs create mode 100644 src/Objects/Structural/Materials/Timber.cs create mode 100644 src/Objects/Structural/Properties/Profiles/SectionProfile.cs create mode 100644 src/Objects/Structural/Properties/Property.cs create mode 100644 src/Objects/Structural/Properties/Property1D.cs create mode 100644 src/Objects/Structural/Properties/Property2D.cs create mode 100644 src/Objects/Structural/Properties/Property3D.cs create mode 100644 src/Objects/Structural/Properties/PropertyDamper.cs create mode 100644 src/Objects/Structural/Properties/PropertyMass.cs create mode 100644 src/Objects/Structural/Properties/PropertySpring.cs create mode 100644 src/Objects/Structural/PropertyType.cs create mode 100644 src/Objects/Structural/Results/AnalyticalResults.cs create mode 100644 src/Objects/Structural/Results/Result.cs create mode 100644 src/Objects/Structural/Results/Result1D.cs create mode 100644 src/Objects/Structural/Results/Result2D.cs create mode 100644 src/Objects/Structural/Results/Result3D.cs create mode 100644 src/Objects/Structural/Results/ResultAll.cs create mode 100644 src/Objects/Structural/Results/ResultGlobal.cs create mode 100644 src/Objects/Structural/Results/ResultNode.cs create mode 100644 src/Objects/Utils/MeshTriangulationHelper.cs create mode 100644 src/Objects/Utils/Parameters.cs create mode 100644 src/Speckle.Core.Transports/DiskTransport.cs create mode 100644 src/Speckle.Core.Transports/Speckle.Core.Transports.csproj create mode 100644 src/Speckle.Core.Transports/packages.lock.json create mode 100644 src/Speckle.Core/Api/Exceptions.cs create mode 100644 src/Speckle.Core/Api/GraphQL/.editorconfig create mode 100644 src/Speckle.Core/Api/GraphQL/Client.GraphqlCleintOperations/Client.ActivityOperations.cs create mode 100644 src/Speckle.Core/Api/GraphQL/Client.GraphqlCleintOperations/Client.BranchOperations.cs create mode 100644 src/Speckle.Core/Api/GraphQL/Client.GraphqlCleintOperations/Client.CommentOperations.cs create mode 100644 src/Speckle.Core/Api/GraphQL/Client.GraphqlCleintOperations/Client.CommitOperations.cs create mode 100644 src/Speckle.Core/Api/GraphQL/Client.GraphqlCleintOperations/Client.ObjectOperations.cs create mode 100644 src/Speckle.Core/Api/GraphQL/Client.GraphqlCleintOperations/Client.ObsoleteOperations.cs create mode 100644 src/Speckle.Core/Api/GraphQL/Client.GraphqlCleintOperations/Client.ServerOperations.cs create mode 100644 src/Speckle.Core/Api/GraphQL/Client.GraphqlCleintOperations/Client.StreamOperations.cs create mode 100644 src/Speckle.Core/Api/GraphQL/Client.GraphqlCleintOperations/Client.UserOperations.cs create mode 100644 src/Speckle.Core/Api/GraphQL/Client.Subscriptions/Client.Subscriptions.Branch.cs create mode 100644 src/Speckle.Core/Api/GraphQL/Client.Subscriptions/Client.Subscriptions.Commit.cs create mode 100644 src/Speckle.Core/Api/GraphQL/Client.Subscriptions/Client.Subscriptions.Stream.cs create mode 100644 src/Speckle.Core/Api/GraphQL/Client.cs create mode 100644 src/Speckle.Core/Api/GraphQL/GraphQLHttpClientExtensions.cs create mode 100644 src/Speckle.Core/Api/GraphQL/Models.cs create mode 100644 src/Speckle.Core/Api/GraphQL/Serializer/ConstantCaseEnumConverter.cs create mode 100644 src/Speckle.Core/Api/GraphQL/Serializer/MapConverter.cs create mode 100644 src/Speckle.Core/Api/GraphQL/Serializer/NewtonsoftJsonSerializer.cs create mode 100644 src/Speckle.Core/Api/GraphQL/SubscriptionModels.cs create mode 100644 src/Speckle.Core/Api/Helpers.cs create mode 100644 src/Speckle.Core/Api/Operations/Operations.Receive.Obsolete.cs create mode 100644 src/Speckle.Core/Api/Operations/Operations.Receive.cs create mode 100644 src/Speckle.Core/Api/Operations/Operations.Send.Obsolete.cs create mode 100644 src/Speckle.Core/Api/Operations/Operations.Send.cs create mode 100644 src/Speckle.Core/Api/Operations/Operations.Serialize.cs create mode 100644 src/Speckle.Core/Api/Operations/Operations.cs create mode 100644 src/Speckle.Core/Api/ServerLimits.cs create mode 100644 src/Speckle.Core/Credentials/Account.cs create mode 100644 src/Speckle.Core/Credentials/AccountManager.cs create mode 100644 src/Speckle.Core/Credentials/Exceptions.cs create mode 100644 src/Speckle.Core/Credentials/Responses.cs create mode 100644 src/Speckle.Core/Credentials/StreamWrapper.cs create mode 100644 src/Speckle.Core/Credentials/graphql.config.yml create mode 100644 src/Speckle.Core/Helpers/Constants.cs create mode 100644 src/Speckle.Core/Helpers/Crypt.cs create mode 100644 src/Speckle.Core/Helpers/Http.cs create mode 100644 src/Speckle.Core/Helpers/Path.cs create mode 100644 src/Speckle.Core/Helpers/State.cs create mode 100644 src/Speckle.Core/Kits/Applications.cs create mode 100644 src/Speckle.Core/Kits/Attributes.cs create mode 100644 src/Speckle.Core/Kits/ConverterInterfaces/IFinalizable.cs create mode 100644 src/Speckle.Core/Kits/Exceptions.cs create mode 100644 src/Speckle.Core/Kits/ISpeckleConverter.cs create mode 100644 src/Speckle.Core/Kits/ISpeckleKit.cs create mode 100644 src/Speckle.Core/Kits/KitDeclaration.cs create mode 100644 src/Speckle.Core/Kits/KitManager.cs create mode 100644 src/Speckle.Core/Kits/Units.cs create mode 100644 src/Speckle.Core/Logging/Analytics.cs create mode 100644 src/Speckle.Core/Logging/CumulativeTimer.cs create mode 100644 src/Speckle.Core/Logging/ExceptionHelpers.cs create mode 100644 src/Speckle.Core/Logging/LoggingHelpers.cs create mode 100644 src/Speckle.Core/Logging/Setup.cs create mode 100644 src/Speckle.Core/Logging/SpeckleException.cs create mode 100644 src/Speckle.Core/Logging/SpeckleLog.cs create mode 100644 src/Speckle.Core/Logging/SpeckleNonUserFacingException.cs create mode 100644 src/Speckle.Core/Models/ApplicationObject.cs create mode 100644 src/Speckle.Core/Models/Attributes.cs create mode 100644 src/Speckle.Core/Models/Base.cs create mode 100644 src/Speckle.Core/Models/Blob.cs create mode 100644 src/Speckle.Core/Models/Collection.cs create mode 100644 src/Speckle.Core/Models/CommitObjectBuilder.cs create mode 100644 src/Speckle.Core/Models/DynamicBase.cs create mode 100644 src/Speckle.Core/Models/DynamicBaseMemberType.cs create mode 100644 src/Speckle.Core/Models/Extensions/BaseExtensions.cs create mode 100644 src/Speckle.Core/Models/Extras.cs create mode 100644 src/Speckle.Core/Models/GraphTraversal/DefaultTraversal.cs create mode 100644 src/Speckle.Core/Models/GraphTraversal/GraphTraversal.cs create mode 100644 src/Speckle.Core/Models/GraphTraversal/ITraversalRule.cs create mode 100644 src/Speckle.Core/Models/GraphTraversal/RuleBuilder.cs create mode 100644 src/Speckle.Core/Models/GraphTraversal/TraversalContextExtensions.cs create mode 100644 src/Speckle.Core/Models/GraphTraversal/TraversalContexts.cs create mode 100644 src/Speckle.Core/Models/InvalidPropNameException.cs create mode 100644 src/Speckle.Core/Models/NestingInstructions.cs create mode 100644 src/Speckle.Core/Models/Utilities.cs create mode 100644 src/Speckle.Core/Serialisation/BaseObjectDeserializerV2.cs create mode 100644 src/Speckle.Core/Serialisation/BaseObjectSerializer.cs create mode 100644 src/Speckle.Core/Serialisation/BaseObjectSerializerV2.cs create mode 100644 src/Speckle.Core/Serialisation/SerializationUtilities/BaseObjectSerializationUtilities.cs create mode 100644 src/Speckle.Core/Serialisation/SerializationUtilities/CallsiteCache.cs create mode 100644 src/Speckle.Core/Serialisation/SerializationUtilities/DeserializationWorkerThreads.cs create mode 100644 src/Speckle.Core/Serialisation/SerializationUtilities/OperationTask.cs create mode 100644 src/Speckle.Core/Serialisation/SerializationUtilities/ValueConverter.cs create mode 100644 src/Speckle.Core/Serialisation/SpeckleSerializerException.cs create mode 100644 src/Speckle.Core/Speckle.Core.csproj create mode 100644 src/Speckle.Core/Transports/Exceptions.cs create mode 100644 src/Speckle.Core/Transports/ITransport.cs create mode 100644 src/Speckle.Core/Transports/Memory.cs create mode 100644 src/Speckle.Core/Transports/SQLite.cs create mode 100644 src/Speckle.Core/Transports/Server.cs create mode 100644 src/Speckle.Core/Transports/ServerUtils/GzipContent.cs create mode 100644 src/Speckle.Core/Transports/ServerUtils/IServerApi.cs create mode 100644 src/Speckle.Core/Transports/ServerUtils/ParallelServerAPI.cs create mode 100644 src/Speckle.Core/Transports/ServerUtils/ServerAPI.cs create mode 100644 src/Speckle.Core/Transports/ServerV2.cs create mode 100644 src/Speckle.Core/Transports/TransportHelpers.cs create mode 100644 src/Speckle.Core/Transports/Utilities.cs create mode 100644 src/Speckle.Core/packages.lock.json create mode 100644 tests/Speckle.Core.Tests.Integration/Api.cs create mode 100644 tests/Speckle.Core.Tests.Integration/Credentials/UserServerInfoTests.cs create mode 100644 tests/Speckle.Core.Tests.Integration/Fixtures.cs create mode 100644 tests/Speckle.Core.Tests.Integration/GraphQLCLient.cs create mode 100644 tests/Speckle.Core.Tests.Integration/ServerTransportTests.cs create mode 100644 tests/Speckle.Core.Tests.Integration/Speckle.Core.Tests.Integration.csproj create mode 100644 tests/Speckle.Core.Tests.Integration/Subscriptions/Branches.cs create mode 100644 tests/Speckle.Core.Tests.Integration/Subscriptions/Commits.cs create mode 100644 tests/Speckle.Core.Tests.Integration/Subscriptions/Streams.cs create mode 100644 tests/Speckle.Core.Tests.Integration/Usings.cs create mode 100644 tests/Speckle.Core.Tests.Performance/Api/Operations/ReceiveFromSQLite.cs create mode 100644 tests/Speckle.Core.Tests.Performance/Api/Operations/TraverseCommit.cs create mode 100644 tests/Speckle.Core.Tests.Performance/Program.cs create mode 100644 tests/Speckle.Core.Tests.Performance/RegressionTestConfig.cs create mode 100644 tests/Speckle.Core.Tests.Performance/Serialisation/DeserializationWorkerThreads.cs create mode 100644 tests/Speckle.Core.Tests.Performance/Speckle.Core.Tests.Performance.csproj create mode 100644 tests/Speckle.Core.Tests.Performance/TestDataHelper.cs create mode 100644 tests/Speckle.Core.Tests.Unit/Api/GraphQLClient.cs create mode 100644 tests/Speckle.Core.Tests.Unit/Api/HelpersTests.cs create mode 100644 tests/Speckle.Core.Tests.Unit/Api/Operations/ClosureTests.cs create mode 100644 tests/Speckle.Core.Tests.Unit/Api/Operations/OperationsReceiveTests.Exceptional.cs create mode 100644 tests/Speckle.Core.Tests.Unit/Api/Operations/OperationsReceiveTests.cs create mode 100644 tests/Speckle.Core.Tests.Unit/Api/Operations/SendReceiveLocal.cs create mode 100644 tests/Speckle.Core.Tests.Unit/Api/Operations/SerializationTests.cs create mode 100644 tests/Speckle.Core.Tests.Unit/Credentials/AccountServerMigrationTests.cs create mode 100644 tests/Speckle.Core.Tests.Unit/Credentials/Accounts.cs create mode 100644 tests/Speckle.Core.Tests.Unit/Credentials/FE2WrapperTests.cs create mode 100644 tests/Speckle.Core.Tests.Unit/Credentials/StreamWrapperTests.cs create mode 100644 tests/Speckle.Core.Tests.Unit/Fixtures.cs create mode 100644 tests/Speckle.Core.Tests.Unit/Helpers/Path.cs create mode 100644 tests/Speckle.Core.Tests.Unit/Kits/KitManagerTests.cs create mode 100644 tests/Speckle.Core.Tests.Unit/Kits/TestKit.cs create mode 100644 tests/Speckle.Core.Tests.Unit/Logging/SpeckleLogTests.cs create mode 100644 tests/Speckle.Core.Tests.Unit/Models/BaseTests.cs create mode 100644 tests/Speckle.Core.Tests.Unit/Models/Extensions/BaseExtensionsTests.cs create mode 100644 tests/Speckle.Core.Tests.Unit/Models/Extensions/ExceptionTests.cs create mode 100644 tests/Speckle.Core.Tests.Unit/Models/GraphTraversal/GraphTraversalTests.cs create mode 100644 tests/Speckle.Core.Tests.Unit/Models/GraphTraversal/TraversalContextExtensionsTests.cs create mode 100644 tests/Speckle.Core.Tests.Unit/Models/GraphTraversal/TraversalMockObjects.cs create mode 100644 tests/Speckle.Core.Tests.Unit/Models/Hashing.cs create mode 100644 tests/Speckle.Core.Tests.Unit/Models/SpeckleType.cs create mode 100644 tests/Speckle.Core.Tests.Unit/Models/TraversalTests.cs create mode 100644 tests/Speckle.Core.Tests.Unit/Models/UtilitiesTests.cs create mode 100644 tests/Speckle.Core.Tests.Unit/Serialisation/ObjectModelDeprecationTests.cs create mode 100644 tests/Speckle.Core.Tests.Unit/Serialisation/SerializerBreakingChanges.cs create mode 100644 tests/Speckle.Core.Tests.Unit/Serialisation/SerializerNonBreakingChanges.cs create mode 100644 tests/Speckle.Core.Tests.Unit/Speckle.Core.Tests.Unit.csproj create mode 100644 tests/Speckle.Core.Tests.Unit/Transports/DiskTransportTests.cs create mode 100644 tests/Speckle.Core.Tests.Unit/Transports/MemoryTransportTests.cs create mode 100644 tests/Speckle.Core.Tests.Unit/Transports/SQLiteTransportTests.cs create mode 100644 tests/Speckle.Core.Tests.Unit/Transports/TransportTests.cs create mode 100644 tests/Speckle.Core.Tests.Unit/packages.lock.json create mode 100644 tests/Tests/Objects.Tests.Unit/GenericTests.cs create mode 100644 tests/Tests/Objects.Tests.Unit/Geometry/ArcTests.cs create mode 100644 tests/Tests/Objects.Tests.Unit/Geometry/MeshTests.cs create mode 100644 tests/Tests/Objects.Tests.Unit/Geometry/PointTests.cs create mode 100644 tests/Tests/Objects.Tests.Unit/Geometry/TransformTests.cs create mode 100644 tests/Tests/Objects.Tests.Unit/NUnit_Fixtures.cs create mode 100644 tests/Tests/Objects.Tests.Unit/Objects.Tests.Unit.csproj create mode 100644 tests/Tests/Objects.Tests.Unit/Utils/MeshTriangulationHelperTests.cs create mode 100644 tests/Tests/Objects.Tests.Unit/Utils/ShallowCopyTests.cs diff --git a/.csharpierrc.yaml b/.csharpierrc.yaml new file mode 100644 index 00000000..152a7153 --- /dev/null +++ b/.csharpierrc.yaml @@ -0,0 +1,7 @@ +printWidth: 120 +useTabs: false +tabWidth: 2 +preprocessorSymbolSets: + - "" + - "DEBUG" + - "DEBUG,CODE_STYLE" diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 00000000..aad43bcb --- /dev/null +++ b/.editorconfig @@ -0,0 +1,305 @@ +root = true +# Don't use tabs for indentation. +[*] +indent_style = space + +# Microsoft .NET properties +csharp_using_directive_placement = outside_namespace:silent + +dotnet_style_parentheses_in_arithmetic_binary_operators = never_if_unnecessary:none +dotnet_style_parentheses_in_other_binary_operators = never_if_unnecessary:none +dotnet_style_parentheses_in_relational_binary_operators = never_if_unnecessary:none + + +# Standard properties +insert_final_newline = true + +# (Please don't specify an indent_size here; that has too many unintended consequences.) + +# Code files +[*.{cs,csx,vb,vbx}] +indent_size = 2 +charset = utf-8 + +# Xml project files +[*.{csproj,vbproj,vcxproj,vcxproj.filters,proj,projitems,shproj}] +indent_size = 2 +space_after_last_pi_attribute = false +# Xml config files +[*.{props,targets,ruleset,config,nuspec,resx,vsixmanifest,vsct}] +indent_size = 2 +space_after_last_pi_attribute = false + +# JSON files +[*.json] +indent_size = 2 + +# Dotnet code style settings: +[*.{cs,vb}] +# Sort using and Import directives with System.* appearing first +dotnet_sort_system_directives_first = true +dotnet_separate_import_directive_groups = false + +# Avoid "this." and "Me." if not necessary +dotnet_style_qualification_for_field = false:suggestion +dotnet_style_qualification_for_property = false:suggestion +dotnet_style_qualification_for_method = false:suggestion +dotnet_style_qualification_for_event = false:suggestion + +# Use language keywords instead of framework type names for type references +dotnet_style_predefined_type_for_locals_parameters_members = true:suggestion +dotnet_style_predefined_type_for_member_access = true:suggestion +# Parentheses preferences +dotnet_style_parentheses_in_arithmetic_binary_operators = never_if_unnecessary:silent +dotnet_style_parentheses_in_relational_binary_operators = never_if_unnecessary:silent +dotnet_style_parentheses_in_other_binary_operators = never_if_unnecessary:silent +dotnet_style_parentheses_in_other_operators = never_if_unnecessary:silent + +# Modifier preferences +dotnet_style_require_accessibility_modifiers = for_non_interface_members:silent +dotnet_style_readonly_field = true:suggestion + +# Expression-level preferences +dotnet_style_object_initializer = true:suggestion +dotnet_style_collection_initializer = true:suggestion +dotnet_style_coalesce_expression = true:suggestion +dotnet_style_null_propagation = true:suggestion +dotnet_style_explicit_tuple_names = true:suggestion +dotnet_style_prefer_is_null_check_over_reference_equality_method = true:silent +dotnet_style_prefer_inferred_tuple_names = true:suggestion +dotnet_style_prefer_inferred_anonymous_type_member_names = true:suggestion +dotnet_style_prefer_auto_properties = true:warning +dotnet_style_prefer_conditional_expression_over_assignment = true:silent +dotnet_style_prefer_conditional_expression_over_return = true:silent + + +# CSharp code style settings: +[*.cs] +# Prefer "var" everywhere +csharp_style_var_elsewhere = false:none +csharp_style_var_for_built_in_types = false:none +csharp_style_var_when_type_is_apparent = false:none + +# Prefer method-like constructs to have a block body +csharp_style_expression_bodied_methods = true:suggestion +csharp_style_expression_bodied_constructors = false:suggestion +csharp_style_expression_bodied_operators = true:suggestion + +# Prefer property-like constructs to have an expression-body +csharp_style_expression_bodied_properties = true:suggestion +csharp_style_expression_bodied_indexers = true:suggestion +csharp_style_expression_bodied_accessors = true:suggestion + +# Suggest more modern language features when available +csharp_style_pattern_matching_over_is_with_cast_check = true:suggestion +csharp_style_pattern_matching_over_as_with_null_check = true:suggestion +csharp_style_inlined_variable_declaration = true:suggestion +csharp_style_throw_expression = true:suggestion +csharp_style_conditional_delegate_call = true:suggestion +csharp_style_namespace_declarations = file_scoped + +# Newline settings +csharp_new_line_before_open_brace = all +csharp_new_line_before_else = true +csharp_new_line_before_catch = true +csharp_new_line_before_finally = true +csharp_new_line_before_members_in_object_initializers = true +csharp_new_line_before_members_in_anonymous_types = true + +# Space preferences +csharp_space_after_cast = false +csharp_space_after_keywords_in_control_flow_statements = true +csharp_space_between_method_call_parameter_list_parentheses = false +csharp_space_between_method_declaration_parameter_list_parentheses = false +csharp_space_between_parentheses = false +csharp_space_before_colon_in_inheritance_clause = true +csharp_space_after_colon_in_inheritance_clause = true +csharp_space_around_binary_operators = before_and_after +csharp_space_between_method_declaration_empty_parameter_list_parentheses = false +csharp_space_between_method_call_name_and_opening_parenthesis = false +csharp_space_between_method_call_empty_parameter_list_parentheses = false + +# Wrapping preferences +csharp_preserve_single_line_statements = true +csharp_preserve_single_line_blocks = true + + + +# SYMBOL NAMING RULES +# Copied from https://github.com/dotnet/roslyn/blob/main/.editorconfig +# Adapted rules: +# - Constants are ALL_UPPER +# - Non-private fields are PascalCase + +# Non-private fields are PascalCase +dotnet_naming_rule.non_private_readonly_fields_should_be_pascal_case.severity = warning +dotnet_naming_rule.non_private_readonly_fields_should_be_pascal_case.symbols = non_private_readonly_fields +dotnet_naming_rule.non_private_readonly_fields_should_be_pascal_case.style = non_private_readonly_field_style + +dotnet_naming_symbols.non_private_readonly_fields.applicable_kinds = field +dotnet_naming_symbols.non_private_readonly_fields.applicable_accessibilities = public, protected, internal, protected_internal, private_protected + +dotnet_naming_style.non_private_readonly_field_style.capitalization = pascal_case + +# Constants are ALL_UPPER +dotnet_naming_rule.constants_should_be_all_upper.severity = warning +dotnet_naming_rule.constants_should_be_all_upper.symbols = constants +dotnet_naming_rule.constants_should_be_all_upper.style = constant_style + +dotnet_naming_symbols.constants.applicable_kinds = field, local +dotnet_naming_symbols.constants.required_modifiers = const + +dotnet_naming_style.constant_style.capitalization = all_upper + +# Private static fields are camelCase and start with s_ +dotnet_naming_rule.static_fields_should_be_camel_case.severity = warning +dotnet_naming_rule.static_fields_should_be_camel_case.symbols = static_fields +dotnet_naming_rule.static_fields_should_be_camel_case.style = static_field_style + +dotnet_naming_symbols.static_fields.applicable_accessibilities = private +dotnet_naming_symbols.static_fields.applicable_kinds = field +dotnet_naming_symbols.static_fields.required_modifiers = static + +dotnet_naming_style.static_field_style.capitalization = camel_case +dotnet_naming_style.static_field_style.required_prefix = s_ + + +# Instance fields are camelCase and start with _ +dotnet_naming_rule.instance_fields_should_be_camel_case.severity = warning +dotnet_naming_rule.instance_fields_should_be_camel_case.symbols = instance_fields +dotnet_naming_rule.instance_fields_should_be_camel_case.style = instance_field_style + +dotnet_naming_symbols.instance_fields.applicable_kinds = field + +dotnet_naming_style.instance_field_style.capitalization = camel_case +dotnet_naming_style.instance_field_style.required_prefix = _ + +# Locals and parameters are camelCase +dotnet_naming_rule.locals_should_be_camel_case.severity = warning +dotnet_naming_rule.locals_should_be_camel_case.symbols = locals_and_parameters +dotnet_naming_rule.locals_should_be_camel_case.style = camel_case_style + +dotnet_naming_symbols.locals_and_parameters.applicable_kinds = parameter, local + +dotnet_naming_style.camel_case_style.capitalization = camel_case + +# Local functions are PascalCase +dotnet_naming_rule.local_functions_should_be_pascal_case.severity = warning +dotnet_naming_rule.local_functions_should_be_pascal_case.symbols = local_functions +dotnet_naming_rule.local_functions_should_be_pascal_case.style = local_function_style + +dotnet_naming_symbols.local_functions.applicable_kinds = local_function + +dotnet_naming_style.local_function_style.capitalization = pascal_case + +# By default, name items with PascalCase +dotnet_naming_rule.members_should_be_pascal_case.severity = warning +dotnet_naming_rule.members_should_be_pascal_case.symbols = all_members +dotnet_naming_rule.members_should_be_pascal_case.style = pascal_case_style + +dotnet_naming_symbols.all_members.applicable_kinds = * + +dotnet_naming_style.pascal_case_style.capitalization = pascal_case + + +# Analyzer settings +dotnet_analyzer_diagnostic.category-Style.severity = warning # All rules will use this severity unless overriden +dotnet_diagnostic.ide0055.severity = none # Formatting rule: Incompatible with CSharpier +dotnet_diagnostic.ide0007.severity = none # Use var instead of explicit type: Preference +dotnet_diagnostic.ide0009.severity = none # Add this or Me qualification: Preference +dotnet_diagnostic.ide0200.severity = none # Remove unnecessary lambda expression: may be performance reasons not to +dotnet_diagnostic.ide0058.severity = none # Remove unnecessary expression value: Subjective +dotnet_diagnostic.ide0010.severity = none # Add missing cases to switch statement: Too verbose +dotnet_diagnostic.ide0200.severity = none # Remove unnecessary lambda expression: may be performance reasons not to +dotnet_diagnostic.ide0058.severity = none # Remove unnecessary expression value: Subjective +dotnet_diagnostic.ide0001.severity = suggestion # Name can be simplified: Non enforceable in build +dotnet_diagnostic.ide0046.severity = suggestion # Use conditional expression for return: Subjective +dotnet_diagnostic.ide0045.severity = suggestion # Use conditional expression for assignment: Subjective +dotnet_diagnostic.ide0078.severity = suggestion # Use pattern matching: Subjective +dotnet_diagnostic.ide0260.severity = suggestion # Use pattern matching: Subjective +dotnet_diagnostic.ide0022.severity = suggestion # Use expression body for method: Subjective +dotnet_diagnostic.ide0061.severity = suggestion # Use expression body for local functions: Subjective +dotnet_diagnostic.ide0063.severity = suggestion # Using directive can be simplified +dotnet_diagnostic.ide0066.severity = suggestion # Use switch expression: Subjective +dotnet_diagnostic.ide0029.severity = suggestion # Null check can be simplified: Subjective +dotnet_diagnostic.ide0030.severity = suggestion # Null check can be simplified: Subjective +dotnet_diagnostic.ide0270.severity = suggestion # Null check can be simplified: Subjective +dotnet_diagnostic.ide0042.severity = suggestion # Deconstruct variable declaration: Subjective +dotnet_diagnostic.ide0039.severity = suggestion # Use local function instead of lambda: Subjective +dotnet_diagnostic.ide0029.severity = suggestion # Null check can be simplified: Subjective +dotnet_diagnostic.ide0030.severity = suggestion # Null check can be simplified: Subjective +dotnet_diagnostic.ide0270.severity = suggestion # Null check can be simplified: Subjective +dotnet_diagnostic.ide0042.severity = suggestion # Deconstruct variable declaration: Subjective +dotnet_diagnostic.ide0028.severity = suggestion # Use collection initializers: Subjective +dotnet_diagnostic.ide0072.severity = suggestion # Populate switch statement: Subjective +dotnet_diagnostic.ide0074.severity = suggestion # Use compound assignment: Subjective + +# Maintainability rules +dotnet_diagnostic.ca1501.severity = warning # Avoid excessive inheritance +dotnet_diagnostic.ca1502.severity = warning # Avoid excessive complexity +dotnet_diagnostic.ca1505.severity = warning # Avoid unmaintainable code +dotnet_diagnostic.ca1506.severity = warning # Avoid excessive class coupling +dotnet_diagnostic.ca1507.severity = warning # Use nameof in place of string +dotnet_diagnostic.ca1508.severity = warning # Avoid dead conditional code +dotnet_diagnostic.ca1509.severity = warning # Invalid entry in code metrics configuration file +dotnet_diagnostic.ca1861.severity = none # Prefer 'static readonly' fields over constant array arguments if the called method is called repeatedly and is not mutating the passed array (https://learn.microsoft.com/dotnet/fundamentals/code-analysis/quality-rules/ca1861) + + +# Performance rules +dotnet_diagnostic.ca1849.severity = suggestion # Call async methods when in an async method: May decrease performance +dotnet_diagnostic.ca1822.severity = suggestion # Mark member as static +dotnet_diagnostic.ca1859.severity = suggestion # Use concrete types when possible for improved performance + +# Design rule +dotnet_diagnostic.ca1002.severity = suggestion # Do not expose generic lists +dotnet_diagnostic.ca1051.severity = warning # Do not declare visible instance fields +dotnet_diagnostic.ca1056.severity = suggestion # URI properties should not be strings +dotnet_diagnostic.ca1062.severity = none # Public method must check all parameters for null + +# Naming +dotnet_diagnostic.ca1707.severity = none # Remove underscores in names + +# Usage +dotnet_diagnostic.ca2227.severity = suggestion # Collection props should be read-only + +dotnet_code_quality.ca1051.exclude_structs = true # CA1051 is excluded in structs +dotnet_code_quality.dispose_ownership_transfer_at_constructor = true # CA2000 has a lot of false positives without this +dotnet_code_quality.dispose_ownership_transfer_at_method_call = true # CA2000 has a lot of false positives without this +dotnet_code_quality.dispose_analysis_kind = NonExceptionPathsOnlyNotDisposed # CA2000 has a lot of false positives without this + +# NUnit +dotnet_diagnostic.NUnit2001.severity = warning # Consider using Assert.That(expr, Is.False) instead of Assert.False(expr) +dotnet_diagnostic.NUnit2002.severity = warning # Consider using Assert.That(expr, Is.False) instead of Assert.IsFalse(expr) +dotnet_diagnostic.NUnit2003.severity = warning # Consider using Assert.That(expr, Is.True) instead of Assert.IsTrue(expr) +dotnet_diagnostic.NUnit2004.severity = warning # Consider using Assert.That(expr, Is.True) instead of Assert.True(expr) +dotnet_diagnostic.NUnit2005.severity = warning # Consider using Assert.That(actual, Is.EqualTo(expected)) instead of Assert.AreEqual(expected, actual) +dotnet_diagnostic.NUnit2006.severity = warning # Consider using Assert.That(actual, Is.Not.EqualTo(expected)) instead of Assert.AreNotEqual(expected, actual) + +dotnet_diagnostic.NUnit2010.severity = warning # Use EqualConstraint for better assertion messages in case of failure +dotnet_diagnostic.NUnit2011.severity = warning # Use ContainsConstraint for better assertion messages in case of failure +dotnet_diagnostic.NUnit2011.severity = warning # Use StartsWithConstraint for better assertion messages in case of failure +dotnet_diagnostic.NUnit2011.severity = warning # Use EndsWithConstraint for better assertion messages in case of failure +dotnet_diagnostic.NUnit2014.severity = warning # Use SomeItemsConstraint for better assertion messages in case of failure + +dotnet_diagnostic.NUnit2015.severity = warning # Consider using Assert.That(actual, Is.SameAs(expected)) instead of Assert.AreSame(expected, actual) +dotnet_diagnostic.NUnit2016.severity = warning # Consider using Assert.That(expr, Is.Null) instead of Assert.Null(expr) +dotnet_diagnostic.NUnit2017.severity = warning # Consider using Assert.That(expr, Is.Null) instead of Assert.IsNull(expr) +dotnet_diagnostic.NUnit2018.severity = warning # Consider using Assert.That(expr, Is.Not.Null) instead of Assert.NotNull(expr) +dotnet_diagnostic.NUnit2028.severity = warning # Consider using Assert.That(actual, Is.GreaterThanOrEqualTo(expected)) instead of Assert.GreaterOrEqual(actual, expected) +dotnet_diagnostic.NUnit2027.severity = warning # Consider using Assert.That(actual, Is.GreaterThan(expected)) instead of Assert.Greater(actual, expected) +dotnet_diagnostic.NUnit2029.severity = warning # Consider using Assert.That(actual, Is.LessThan(expected)) instead of Assert.Less(actual, expected) +dotnet_diagnostic.NUnit2030.severity = warning # Consider using Assert.That(actual, Is.LessThanOrEqualTo(expected)) instead of Assert.LessOrEqual(actual, expected) +dotnet_diagnostic.NUnit2031.severity = warning # Consider using Assert.That(actual, Is.Not.SameAs(expected)) instead of Assert.AreNotSame(expected, actual) +dotnet_diagnostic.NUnit2032.severity = warning # Consider using Assert.That(expr, Is.Zero) instead of Assert.Zero(expr) +dotnet_diagnostic.NUnit2033.severity = warning # Consider using Assert.That(expr, Is.Not.Zero) instead of Assert.NotZero(expr) +dotnet_diagnostic.NUnit2034.severity = warning # Consider using Assert.That(expr, Is.NaN) instead of Assert.IsNaN(expr) +dotnet_diagnostic.NUnit2035.severity = warning # Consider using Assert.That(collection, Is.Empty) instead of Assert.IsEmpty(collection) +dotnet_diagnostic.NUnit2036.severity = warning # Consider using Assert.That(collection, Is.Not.Empty) instead of Assert.IsNotEmpty(collection) +dotnet_diagnostic.NUnit2037.severity = warning # Consider using Assert.That(collection, Does.Contain(instance)) instead of Assert.Contains(instance, collection) +dotnet_diagnostic.NUnit2038.severity = warning # Consider using Assert.That(actual, Is.InstanceOf(expected)) instead of Assert.IsInstanceOf(expected, actual) +dotnet_diagnostic.NUnit2039.severity = warning # Consider using Assert.That(actual, Is.Not.InstanceOf(expected)) instead of Assert.IsNotInstanceOf(expected, actual) + +[*.{appxmanifest,asax,ascx,aspx,axaml,build,c,c++,cc,cginc,compute,cp,cpp,cs,cshtml,cu,cuh,cxx,dtd,fs,fsi,fsscript,fsx,fx,fxh,h,hh,hlsl,hlsli,hlslinc,hpp,hxx,inc,inl,ino,ipp,ixx,master,ml,mli,mpp,mq4,mq5,mqh,nuspec,paml,razor,resw,resx,shader,skin,tpp,usf,ush,vb,xaml,xamlx,xoml,xsd}] +indent_style = space +indent_size = 2 +tab_width = 2 diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 00000000..0a96b77c --- /dev/null +++ b/.gitattributes @@ -0,0 +1,5 @@ +# Set the default behavior, in case people don't have core.autocrlf set. +* text=auto + +# need original files to be windows +*.txt text eol=crlf \ No newline at end of file diff --git a/.gitignore b/.gitignore new file mode 100644 index 00000000..d6a5a1d1 --- /dev/null +++ b/.gitignore @@ -0,0 +1,21 @@ +**/bin/* +**/obj/* +_ReSharper.SharpCompress/ +bin/ +*.suo +*.user +TestArchives/Scratch/ +TestArchives/Scratch2/ +TestResults/ +*.nupkg +packages/*/ +project.lock.json +tests/TestArchives/Scratch +.vs +tools +.vscode +.idea/ + +.DS_Store +*.snupkg +/tests/TestArchives/6d23a38c-f064-4ef1-ad89-b942396f53b9/Scratch diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 00000000..b7fa3e30 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,50 @@ +# Speckle Contribution Guidelines + +## Introduction + +Thank you for reading this! Speckle's a rather wide network of parts that depend on each other, either directly, indirectly or even just cosmetically. + +> **Speckle** is a quite large ecosystem of moving parts. Any changes may have unintended effects, that can cause problems quickly for many people (and processes) that rely on Speckle. + +This means that what might look like a simple quick change in one repo may have a big hidden cost that propagates around other parts of the project. We're all here to help each other, and this guide is meant to help you get started and promote a framework that can untangle all these dependecies through discussion! + +## Bugs & Issues 🐞 + +### Found a new bug? + +- First step is to check whether this is a new bug! We encourage you to search through the issues of the project in question **and** associated repos! + +- If you come up with nothing, **open a new issue with a clear title and description**, as much relevant information as possible: system configuration, code samples & steps to reproduce the problem. + +- Can't mention this often enough: tells us how to reproduce the problem! We will ignore or flag as such issues without reproduction steps. + +- Try to reference & note all potentially affected projects. + +### Sending a PR for Bug Fixes + +You fixed something! Great! We hope you logged it first :) Make sure though that you've covered the lateral thinking needed for a bug report, as described above, also in your implementation! If there any tests, make sure they all pass. If there are none, it means they're missing - so add them! + +## New Features 🎉 + +The golden rule is to Discuss First! + +- Before embarking on adding a new feature, suggest it first as an issue with the `enhancement` label and/or title - this will allow relevant people to pitch in +- We'll now discuss your requirements and see how and if they fit within the Speckle ecosystem. +- The last step is to actually start writing code & submit a PR so we can follow along! +- All new features should, if and where possible, come with tests. We won't merge without! + +> Many clients may potentially have overlapping scopes, some features might already be in dev somewhere else, or might have been postponed to the next major release due to api instability in that area. For example, adding a delete stream button in the accounts panel in rhino: this feature was planned for speckle admin, and the whole functionality of the accounts panel in rhino is to be greatly reduced! + +## Cosmetic Patches ✨ + +Changes that are cosmetic in nature and do not add anything substantial to the stability or functionality of Speckle **will generally not be accepted**. + +Why? However trivial the changes might seem, there might be subtle reasons for the original code to be as it is. Furthermore, there are a lot of potential hidden costs (that even maintainers themselves are not aware of fully!) and they eat up review time unncessarily. + +> **Examples**: modifying the colour of an UI element in one client may have a big hidden cost and need propagation in several other clients that implement a similar ui element. Changing the default port or specifiying `localhost` instead of `0.0.0.0` breaks cross-vm debugging and developing. + +## Wrap up + +Don't worry if you get things wrong. We all do, including project owners: this document should've been here a long time ago. There's plenty of room for discussion on our community [forum](https://discourse.speckle.works). + +🙌❤️💙💚💜🙌 diff --git a/Directory.Build.props b/Directory.Build.props new file mode 100644 index 00000000..465817cf --- /dev/null +++ b/Directory.Build.props @@ -0,0 +1,30 @@ + + + Speckle + Speckle + Copyright (c) AEC Systems Ltd + + + + latest + enable + enable + Recommended + true + true + true + true + true + False + False + true + true + + + + preview.0 + + + + + diff --git a/Directory.Packages.props b/Directory.Packages.props new file mode 100644 index 00000000..1b4645b0 --- /dev/null +++ b/Directory.Packages.props @@ -0,0 +1,28 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/ISSUE_TEMPLATE.md b/ISSUE_TEMPLATE.md new file mode 100644 index 00000000..715765d3 --- /dev/null +++ b/ISSUE_TEMPLATE.md @@ -0,0 +1,17 @@ +If it's your first time here - or you forgot about them - make sure you read the [contribution guidelines](CONTRIBUTING.md), and then feel free to delete this line! + +### Expected vs. Actual Behavior + +Describe the problem here. + +### Reproduction Steps & System Config (win, osx, web, etc.) + +Let us know how we can reproduce this, and attach relevant files (if any). + +### Proposed Solution (if any) + +Let us know what how you would solve this. + +#### Optional: Affected Projects + +Does this issue propagate to other dependencies or dependents? If so, list them here! diff --git a/LICENSE b/LICENSE new file mode 100644 index 00000000..f83b45e3 --- /dev/null +++ b/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2020 AEC Systems + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/README.md b/README.md index 234b90f0..20fe2c06 100644 --- a/README.md +++ b/README.md @@ -1,2 +1,110 @@ -# speckle-sharp-sdk -The speckle core +# Core + +[![Twitter Follow](https://img.shields.io/twitter/follow/SpeckleSystems?style=social)](https://twitter.com/SpeckleSystems) [![Community forum users](https://img.shields.io/discourse/users?server=https%3A%2F%2Fdiscourse.speckle.works&style=flat-square&logo=discourse&logoColor=white)](https://discourse.speckle.works) [![website](https://img.shields.io/badge/https://-speckle.systems-royalblue?style=flat-square)](https://speckle.systems) [![docs](https://img.shields.io/badge/docs-speckle.guide-orange?style=flat-square&logo=read-the-docs&logoColor=white)](https://speckle.guide/dev/) + +### **Disclaimer** + +This is an early alpha release, not meant for use in production! We're working to stabilise the 2.0 API, and until then there will be breaking changes. You have been warned! + +## Introduction + +### Core + +Core is the .NET SDK for Speckle 2.0. It uses .NET Standard 2.0 and has been tested on Windows and MacOS. + +## Documentation + +Comprehensive developer and user documentation can be found in our: + +#### 📚 [Speckle Docs website](https://speckle.guide/dev/) + +## Developing & Debugging + +### Building + +Make sure you clone this repository together with its submodules: `git clone https://github.com/specklesystems/Core.git -recursive`. +Afterwards, just restore all the NuGet packages and hit Build! + +### Developing + +This project is evolving fast, to better understand how to use Core we suggest checking out the Unit and Integration tests. Running the integration tests locally requires a local server running on your computer. + +We'll be also adding [preliminary documentation on our forum](https://discourse.speckle.works/c/speckle-insider/10). + +### Tests + +There are two test projects, one for unit tests and one for integration tests. The latter needs a server running locally in order to run. + +## Contributing + +Before embarking on submitting a patch, please make sure you read: + +- [Contribution Guidelines](CONTRIBUTING.md), +- [Code of Conduct](CODE_OF_CONDUCT.md) + +## Community + +The Speckle Community hangs out on [the forum](https://discourse.speckle.works), do join and introduce yourself & feel free to ask us questions! + +## License + +Unless otherwise described, the code in this repository is licensed under the Apache-2.0 license. + +# Objects + +[![Twitter Follow](https://img.shields.io/twitter/follow/SpeckleSystems?style=social)](https://twitter.com/SpeckleSystems) [![Community forum users](https://img.shields.io/discourse/users?server=https%3A%2F%2Fdiscourse.speckle.works&style=flat-square&logo=discourse&logoColor=white)](https://discourse.speckle.works) [![website](https://img.shields.io/badge/https://-speckle.systems-royalblue?style=flat-square)](https://speckle.systems) [![docs](https://img.shields.io/badge/docs-speckle.guide-orange?style=flat-square&logo=read-the-docs&logoColor=white)](https://speckle.guide/dev/) + +**Status** + +![.NET Core](https://github.com/specklesystems/Objects/workflows/.NET%20Core/badge.svg) + + +## Introduction + +Before venturing any further please make sure to check the following: + +- [Code of Conduct](CODE_OF_CONDUCT.md), +- [Contribution Guidelines](CONTRIBUTING.md), +- [License](LICENSE) + +### Objects + +The Speckle 2.0 object model: geometry and element base classes. It uses .NET Standard 2.0 and has been tested on Windows and MacOS. + +**NOTE:** this is the default object model we ship with Speckle. You can develop your own or fork this and extend it too! + +More info on Objects and Kits 2.0 can be found in [this community forum thread](https://discourse.speckle.works/t/introducing-kits-2-0/710/34). + +## Documentation + +Comprehensive developer and user documentation can be found in our: + +#### 📚 [Speckle Docs website](https://speckle.guide/dev/) + +## Developing & Debugging + +### Building + +Just restore all the NuGet packages and hit Build! + +### Developing + +Objects is just a set of Data Transfer Objects, it's quite straightforward to understand how they work! + +#### Host application support + +In order to better support interop between the various AEC host applications and Speckle, Objects also contains classes that help to deal with native object types and their properties. + +For example, you'll see a `\Revit` folder. That contains a series of classes that extend the basic ones with a series of default Revit properties. This is the approach we'll follow with other host applications as well. + +## Contributing + +Please make sure you read the [contribution guidelines](CONTRIBUTING.md) for an overview of the best practices we try to follow. + +## Community + +The Speckle Community hangs out on [the forum](https://discourse.speckle.works), do join and introduce yourself & feel free to ask us questions! + +## License + +Unless otherwise described, the code in this repository is licensed under the Apache-2.0 license. diff --git a/Speckle.Sdk.sln b/Speckle.Sdk.sln new file mode 100644 index 00000000..557ff598 --- /dev/null +++ b/Speckle.Sdk.sln @@ -0,0 +1,32 @@ + +Microsoft Visual Studio Solution File, Format Version 12.00 +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Speckle.Core", "src\Speckle.Core\Speckle.Core.csproj", "{A413E196-3696-4F48-B635-04B5F76BF9C9}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Speckle.Core.Tests.Unit", "tests\Speckle.Core.Tests.Unit\Speckle.Core.Tests.Unit.csproj", "{99AE2273-12C5-4A9D-9FDD-19F8B394B5E2}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Speckle.Core.Transports", "src\Speckle.Core.Transports\Speckle.Core.Transports.csproj", "{6845F190-036C-4AEF-B267-23EE84DBD2A6}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Release|Any CPU = Release|Any CPU + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {8781B61F-0308-488A-BEB2-1939E7CEEBE9}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {8781B61F-0308-488A-BEB2-1939E7CEEBE9}.Debug|Any CPU.Build.0 = Debug|Any CPU + {8781B61F-0308-488A-BEB2-1939E7CEEBE9}.Release|Any CPU.ActiveCfg = Release|Any CPU + {8781B61F-0308-488A-BEB2-1939E7CEEBE9}.Release|Any CPU.Build.0 = Release|Any CPU + {A413E196-3696-4F48-B635-04B5F76BF9C9}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {A413E196-3696-4F48-B635-04B5F76BF9C9}.Debug|Any CPU.Build.0 = Debug|Any CPU + {A413E196-3696-4F48-B635-04B5F76BF9C9}.Release|Any CPU.ActiveCfg = Release|Any CPU + {A413E196-3696-4F48-B635-04B5F76BF9C9}.Release|Any CPU.Build.0 = Release|Any CPU + {99AE2273-12C5-4A9D-9FDD-19F8B394B5E2}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {99AE2273-12C5-4A9D-9FDD-19F8B394B5E2}.Debug|Any CPU.Build.0 = Debug|Any CPU + {99AE2273-12C5-4A9D-9FDD-19F8B394B5E2}.Release|Any CPU.ActiveCfg = Release|Any CPU + {99AE2273-12C5-4A9D-9FDD-19F8B394B5E2}.Release|Any CPU.Build.0 = Release|Any CPU + {6845F190-036C-4AEF-B267-23EE84DBD2A6}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {6845F190-036C-4AEF-B267-23EE84DBD2A6}.Debug|Any CPU.Build.0 = Debug|Any CPU + {6845F190-036C-4AEF-B267-23EE84DBD2A6}.Release|Any CPU.ActiveCfg = Release|Any CPU + {6845F190-036C-4AEF-B267-23EE84DBD2A6}.Release|Any CPU.Build.0 = Release|Any CPU + EndGlobalSection +EndGlobal diff --git a/global.json b/global.json new file mode 100644 index 00000000..c19a2e05 --- /dev/null +++ b/global.json @@ -0,0 +1,6 @@ +{ + "sdk": { + "version": "8.0.100", + "rollForward": "latestMinor" + } +} diff --git a/notes/sqlite-performance.md b/notes/sqlite-performance.md new file mode 100644 index 00000000..a9488682 --- /dev/null +++ b/notes/sqlite-performance.md @@ -0,0 +1,356 @@ + + +## Storage Size +1 million objects => 540mb ( based on ~= 4.2 million objects => 2.3GB not gzipped) + +1 million objects => 127mb gzipped + +4x reduction in space + +## Local storage takeaways: + +SQLite optimisations make a difference in insertion speed. Insertion speed does slow down on large tables (+1m rows). + +Partioned tables (by, for example, the first two decimals of the hash) have slower but predictable insertion speed. Not sure if compromise is worth it? + +## Even More Optimised single object table +Optimisations are: +- `PRAGMA journal_mode = MEMORY;` +- `PRAGMA synchronous = OFF;` +- `PRAGMA count_changes=OFF;` +- `PRAGMA temp_store=MEMORY;` + + +### Test 1 +------------------------------------------------- +BufferedWriteTest: Wrote 100000 in 2286 ms -> 50000 objects per second +------------------------------------------------- + +------------------------------------------------- +BulkWriteMany: Wrote 100000 in 1426 ms -> 100000 objects per second +------------------------------------------------- + +### Test 2 +------------------------------------------------- +BufferedWriteTest: Wrote 100000 in 3052 ms -> 33333.333333333336 objects per second +------------------------------------------------- + +------------------------------------------------- +BulkWriteMany: Wrote 100000 in 2244 ms -> 50000 objects per second +------------------------------------------------- + +### Test 3 +------------------------------------------------- +BufferedWriteTest: Wrote 100000 in 4941 ms -> 25000 objects per second +------------------------------------------------- + +------------------------------------------------- +BulkWriteMany: Wrote 100000 in 2555 ms -> 50000 objects per second +------------------------------------------------- + +### Test 4 +------------------------------------------------- +BufferedWriteTest: Wrote 100000 in 8022 ms -> 12500 objects per second +------------------------------------------------- + +------------------------------------------------- +BulkWriteMany: Wrote 100000 in 3350 ms -> 33333.333333333336 objects per second +------------------------------------------------- + + +### Test 5 +------------------------------------------------- +BufferedWriteTest: Wrote 100000 in 6602 ms -> 16666.666666666668 objects per second +------------------------------------------------- + +------------------------------------------------- +BulkWriteMany: Wrote 100000 in 3445 ms -> 33333.333333333336 objects per second +------------------------------------------------- + +### Test 5+: A couple of more rounds, pushing objs to 2.000k + +------------------------------------------------- +BufferedWriteTest: Wrote 100000 in 7332 ms -> 14285.714285714286 objects per second +------------------------------------------------- + +------------------------------------------------- +BulkWriteMany: Wrote 100000 in 7625 ms -> 14285.714285714286 objects per second +------------------------------------------------- + +------------------------------------------------- + +------------------------------------------------- +BufferedWriteTest: Wrote 100000 in 7539 ms -> 14285.714285714286 objects per second +------------------------------------------------- + +------------------------------------------------- +BulkWriteMany: Wrote 100000 in 4249 ms -> 25000 objects per second +------------------------------------------------- + +------------------------------------------------- + +------------------------------------------------- +BufferedWriteTest: Wrote 100000 in 8300 ms -> 12500 objects per second +------------------------------------------------- + +------------------------------------------------- +BulkWriteMany: Wrote 100000 in 7289 ms -> 14285.714285714286 objects per second +------------------------------------------------- + +------------------------------------------------- + +------------------------------------------------- +BufferedWriteTest: Wrote 100000 in 8668 ms -> 12500 objects per second +------------------------------------------------- + +------------------------------------------------- +BulkWriteMany: Wrote 100000 in 8060 ms -> 12500 objects per second +------------------------------------------------- + + + +Starting to save 100000 of objects +------------------------------------------------- +BufferedWriteTest: Wrote 100000 in 10228 ms -> 10000 objects per second +------------------------------------------------- + +------------------------------------------------- +BulkWriteMany: Wrote 100000 in 11475 ms -> 9090.90909090909 objects per second +------------------------------------------------- + +------------------------------------------------- + +Starting to save 100000 of objects +------------------------------------------------- +BufferedWriteTest: Wrote 100000 in 12540 ms -> 8333.333333333334 objects per second +------------------------------------------------- + +------------------------------------------------- +BulkWriteMany: Wrote 100000 in 7113 ms -> 14285.714285714286 objects per second +------------------------------------------------- + +------------------------------------------------- + +Starting to save 100000 of objects +------------------------------------------------- +BufferedWriteTest: Wrote 100000 in 17153 ms -> 5882.35294117647 objects per second +------------------------------------------------- + +------------------------------------------------- +BulkWriteMany: Wrote 100000 in 5997 ms -> 20000 objects per second +------------------------------------------------- + +------------------------------------------------- + +Starting to save 100000 of objects +------------------------------------------------- +BufferedWriteTest: Wrote 100000 in 20841 ms -> 5000 objects per second +------------------------------------------------- + +------------------------------------------------- +BulkWriteMany: Wrote 100000 in 9195 ms -> 11111.111111111111 objects per second +------------------------------------------------- + +Starting to save 100000 of objects +------------------------------------------------- +BufferedWriteTest: Wrote 100000 in 13404 ms -> 7692.307692307692 objects per second +------------------------------------------------- + +------------------------------------------------- +BulkWriteMany: Wrote 100000 in 7529 ms -> 14285.714285714286 objects per second +------------------------------------------------- + +------------------------------------------------- + +Starting to save 100000 of objects +------------------------------------------------- +BufferedWriteTest: Wrote 100000 in 19806 ms -> 5263.1578947368425 objects per second +------------------------------------------------- + +------------------------------------------------- +BulkWriteMany: Wrote 100000 in 7318 ms -> 14285.714285714286 objects per second +------------------------------------------------- + +------------------------------------------------- + +Starting to save 100000 of objects +------------------------------------------------- +BufferedWriteTest: Wrote 100000 in 24612 ms -> 4166.666666666667 objects per second +------------------------------------------------- + +------------------------------------------------- +BulkWriteMany: Wrote 100000 in 7410 ms -> 14285.714285714286 objects per second +------------------------------------------------- + +------------------------------------------------- + +Starting to save 100000 of objects +------------------------------------------------- +BufferedWriteTest: Wrote 100000 in 22257 ms -> 4545.454545454545 objects per second +------------------------------------------------- + +------------------------------------------------- +BulkWriteMany: Wrote 100000 in 18699 ms -> 5555.555555555556 objects per second +------------------------------------------------- + +------------------------------------------------- + +Starting to save 100000 of objects +------------------------------------------------- +BufferedWriteTest: Wrote 100000 in 20947 ms -> 5000 objects per second +------------------------------------------------- + +------------------------------------------------- +BulkWriteMany: Wrote 100000 in 14089 ms -> 7142.857142857143 objects per second +------------------------------------------------- + + +## Optimised single object table +Optimisations are: `PRAGMA journal_mode = MEMORY;` and `PRAGMA synchronous = OFF;` + +### Test 1: +------------------------------------------------- +BufferedWriteTest: Wrote 100000 in 2267 ms -> 50000 objects per second +------------------------------------------------- + +------------------------------------------------- +BulkWriteMany: Wrote 100000 in 1327 ms -> 100000 objects per second +------------------------------------------------- + +### Test 2: +------------------------------------------------- +BufferedWriteTest: Wrote 100000 in 4532 ms -> 25000 objects per second +------------------------------------------------- + +------------------------------------------------- +BulkWriteMany: Wrote 100000 in 2243 ms -> 50000 objects per second +------------------------------------------------- + +### Test 3: +------------------------------------------------- +BufferedWriteTest: Wrote 100000 in 3768 ms -> 33333.333333333336 objects per second +------------------------------------------------- + +------------------------------------------------- +BulkWriteMany: Wrote 100000 in 5295 ms -> 20000 objects per second +------------------------------------------------- + +### Test 4: +------------------------------------------------- +BufferedWriteTest: Wrote 100000 in 4033 ms -> 25000 objects per second +------------------------------------------------- + +------------------------------------------------- +BulkWriteMany: Wrote 100000 in 3126 ms -> 33333.333333333336 objects per second +------------------------------------------------- + +### Test 5: +------------------------------------------------- +BufferedWriteTest: Wrote 100000 in 4432 ms -> 25000 objects per second +------------------------------------------------- + +------------------------------------------------- +BulkWriteMany: Wrote 100000 in 3527 ms -> 33333.333333333336 objects per second +------------------------------------------------- + + +## Single object table + +### Test 1: +------------------------------------------------- +BufferedWriteTest: Wrote 100000 in 11964 ms -> 9090.90909090909 objects per second +------------------------------------------------- + +------------------------------------------------- +BulkWriteMany: Wrote 100000 in 6875 ms -> 16666.666666666668 objects per second +------------------------------------------------- +200k total in db + +### Test 2: +------------------------------------------------- +BufferedWriteTest: Wrote 100000 in 21956 ms -> 4761.9047619047615 objects per second +------------------------------------------------- + +------------------------------------------------- +BulkWriteMany: Wrote 100000 in 8904 ms -> 12500 objects per second +------------------------------------------------- +400k total in db + +### Test 3: +------------------------------------------------- +BufferedWriteTest: Wrote 100000 in 25532 ms -> 4000 objects per second +------------------------------------------------- + +------------------------------------------------- +BulkWriteMany: Wrote 100000 in 10124 ms -> 10000 objects per second +------------------------------------------------- +600k total in db + +### Test 4: +------------------------------------------------- +BufferedWriteTest: Wrote 100000 in 26629 ms -> 3846.153846153846 objects per second +------------------------------------------------- + +------------------------------------------------- +BulkWriteMany: Wrote 100000 in 10610 ms -> 10000 objects per second +------------------------------------------------- +800k total in db + +### Test 5: +------------------------------------------------- +BufferedWriteTest: Wrote 100000 in 26956 ms -> 3846.153846153846 objects per second +------------------------------------------------- + +------------------------------------------------- +BulkWriteMany: Wrote 100000 in 11007 ms -> 9090.90909090909 objects per second +------------------------------------------------- + +1000k total in db + + +## Bucketed Object Table (256 individual tables for objects): +Pre-generate 256 tables, of form `objs${prefix}`, where prefix is the cartesian product of all the valid hex decimals (`0-9, a-f`). + +### Test 1: +Forgot to copy paste. +200k total in db + +### Test 2: +------------------------------------------------- +BufferedWriteTest: Wrote 100000 in 19096 ms -> 5263.1578947368425 objects per second +------------------------------------------------- + +------------------------------------------------- +BulkWriteMany: Wrote 100000 in 7401 ms -> 14285.714285714286 objects per second +------------------------------------------------- +400k total in db + +### Test 3: +------------------------------------------------- +BufferedWriteTest: Wrote 100000 in 22477 ms -> 4545.454545454545 objects per second +------------------------------------------------- + +------------------------------------------------- +BulkWriteMany: Wrote 100000 in 8668 ms -> 12500 objects per second +------------------------------------------------- +600k total in db + +### Test 4: +------------------------------------------------- +BufferedWriteTest: Wrote 100000 in 23438 ms -> 4347.826086956522 objects per second +------------------------------------------------- + +------------------------------------------------- +BulkWriteMany: Wrote 100000 in 9288 ms -> 11111.111111111111 objects per second +------------------------------------------------- +800k total in db + +### Test 5: +------------------------------------------------- +BufferedWriteTest: Wrote 100000 in 23735 ms -> 4347.826086956522 objects per second +------------------------------------------------- + +------------------------------------------------- +BulkWriteMany: Wrote 100000 in 9944 ms -> 11111.111111111111 objects per second +------------------------------------------------- +1mil total in db \ No newline at end of file diff --git a/src/Directory.Build.props b/src/Directory.Build.props new file mode 100644 index 00000000..a9c5b063 --- /dev/null +++ b/src/Directory.Build.props @@ -0,0 +1,12 @@ + + + + + + + + + + true + + diff --git a/src/MongoDBTransport/MongoDB.cs b/src/MongoDBTransport/MongoDB.cs new file mode 100644 index 00000000..fd3d77a3 --- /dev/null +++ b/src/MongoDBTransport/MongoDB.cs @@ -0,0 +1,272 @@ +using System; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.Text; +using System.Threading; +using System.Threading.Tasks; +using System.Timers; +using MongoDB.Bson; +using MongoDB.Driver; +using Speckle.Core.Logging; +using Timer = System.Timers.Timer; + +namespace Speckle.Core.Transports; + +// If data storage accessed by transports will always use the hash and content field names, move this enum to ITransport instead. +public enum Field +{ + hash, + content +} + +// Question: the benefit of noSQL is the use of unstructured collections of variable documents. +// Explore storing partially serialized Speckle objects with dynamically generated fields instead of just a content string? +[Obsolete("This Transport is no longer maintained or routinely tested, use with caution")] +public class MongoDBTransport : IDisposable, ITransport +{ + private bool IS_WRITING; + private int MAX_TRANSACTION_SIZE = 1000; + private int PollInterval = 500; + + private ConcurrentQueue<(string, string, int)> Queue = new(); + + /// + /// Timer that ensures queue is consumed if less than MAX_TRANSACTION_SIZE objects are being sent. + /// + /// Is this to prevent requests to read an object before it is written, or to handle read/write locks? + /// If this is can differ per transport, better to use Database.currentOp() to determine if write operations are waiting for a lock. + private Timer WriteTimer; + + public MongoDBTransport( + string connectionString = "mongodb://localhost:27017", + string applicationName = "Speckle", + string scope = "Objects" + ) + { + SpeckleLog.Logger.Information("Creating new MongoDB Transport"); + + ConnectionString = connectionString; + Client = new MongoClient(ConnectionString); + Database = (MongoDatabaseBase)Client.GetDatabase(applicationName); + Collection = Database.GetCollection(scope); + + Initialize(); + + WriteTimer = new Timer + { + AutoReset = true, + Enabled = false, + Interval = PollInterval + }; + WriteTimer.Elapsed += WriteTimerElapsed; + } + + public string ConnectionString { get; set; } + + private MongoClient Client { get; set; } + private IMongoDatabase Database { get; set; } + private IMongoCollection Collection { get; set; } + + public void Dispose() + { + // MongoDB collection connection should dispose automatically + + // Time out locking could be added if an expected use case is multiple clients writing to the same server + } + + public string TransportName { get; set; } = "MongoTransport"; + + public Dictionary TransportContext => new() { { "name", TransportName }, { "type", GetType().Name } }; + + public CancellationToken CancellationToken { get; set; } + + public Action OnProgressAction { get; set; } + + public Action OnErrorAction { get; set; } + public int SavedObjectCount { get; private set; } + + // not implementing this properly + public TimeSpan Elapsed => TimeSpan.Zero; + + public void BeginWrite() + { + SavedObjectCount = 0; + } + + public void EndWrite() { } + + public Task> HasObjects(IReadOnlyList objectIds) + { + throw new NotImplementedException(); + } + + private void Initialize() + { + // Assumes mongoDB server is running + // Mongo database and collection should be created automatically if it doesn't already exist + + // Check if the connection is successful + bool isMongoLive = Database.RunCommandAsync((Command)"{ping:1}").Wait(1000); + if (!isMongoLive) + { + OnErrorAction(TransportName, new Exception("The Mongo database could not be reached.")); + } + } + + /// + /// Returns all the objects in the store. + /// + /// + internal IEnumerable GetAllObjects() + { + var documents = Collection.Find(new BsonDocument()).ToList(); + List documentContents = new(); + foreach (BsonDocument document in documents) + { + documentContents.Add(document[Field.content.ToString()].AsString); + } + + return documentContents; + } + + /// + /// Deletes an object. Note: do not use for any speckle object transport, as it will corrupt the database. + /// + /// + internal void DeleteObject(string hash) + { + var filter = Builders.Filter.Eq(Field.hash.ToString(), hash); + Collection.DeleteOne(filter); + } + + #region Writes + + /// + /// Awaits until write completion (ie, the current queue is fully consumed). + /// + /// + public async Task WriteComplete() + { + await Utilities + .WaitUntil( + () => + { + return GetWriteCompletionStatus(); + }, + 500 + ) + .ConfigureAwait(false); + } + + /// + /// Returns true if the current write queue is empty and committed. + /// + /// + /// + /// Mongo has intent shared and intent exclusive client operations. + /// Each category shares a lock, with intent exclusive operations prioritized. + /// Would change to Database.currentOp() to determine if write operations are waiting for a lock, if the WriteTimer is deprecated + /// + public bool GetWriteCompletionStatus() + { + Console.WriteLine($"write completion {Queue.Count == 0 && !IS_WRITING}"); + return Queue.Count == 0 && !IS_WRITING; + } + + private void WriteTimerElapsed(object sender, ElapsedEventArgs e) + { + WriteTimer.Enabled = false; + if (!IS_WRITING && Queue.Count != 0) + { + ConsumeQueue(); + } + } + + private void ConsumeQueue() + { + IS_WRITING = true; + var i = 0; + ValueTuple result; + + while (i < MAX_TRANSACTION_SIZE && Queue.TryPeek(out result)) + { + Queue.TryDequeue(out result); + var document = new BsonDocument + { + { Field.hash.ToString(), result.Item1 }, + { Field.content.ToString(), result.Item2 } + }; + Collection.InsertOne(document); + } + + if (Queue.Count > 0) + { + ConsumeQueue(); + } + + IS_WRITING = false; + } + + /// + /// Adds an object to the saving queue. + /// + /// + /// + public void SaveObject(string hash, string serializedObject) + { + Queue.Enqueue((hash, serializedObject, Encoding.UTF8.GetByteCount(serializedObject))); + + WriteTimer.Enabled = true; + WriteTimer.Start(); + } + + public void SaveObject(string hash, ITransport sourceTransport) + { + var serializedObject = sourceTransport.GetObject(hash); + Queue.Enqueue((hash, serializedObject, Encoding.UTF8.GetByteCount(serializedObject))); + } + + /// + /// Directly saves the object in the db. + /// + /// + /// + public void SaveObjectSync(string hash, string serializedObject) + { + var document = new BsonDocument { { Field.hash.ToString(), hash }, { Field.content.ToString(), serializedObject } }; + Collection.InsertOne(document); + } + + #endregion + + #region Reads + + /// + /// Gets an object. + /// + /// + /// + public string GetObject(string hash) + { + var filter = Builders.Filter.Eq(Field.hash.ToString(), hash); + BsonDocument objectDocument = Collection.Find(filter).FirstOrDefault(); + if (objectDocument != null) + { + return objectDocument[Field.content.ToString()].AsString; + } + + // pass on the duty of null checks to consumers + return null; + } + + public async Task CopyObjectAndChildren( + string hash, + ITransport targetTransport, + Action onTotalChildrenCountKnown = null + ) + { + throw new NotImplementedException(); + } + + #endregion +} diff --git a/src/MongoDBTransport/MongoDBTransport.csproj b/src/MongoDBTransport/MongoDBTransport.csproj new file mode 100644 index 00000000..1d9bc43a --- /dev/null +++ b/src/MongoDBTransport/MongoDBTransport.csproj @@ -0,0 +1,26 @@ + + + netstandard2.0 + Speckle.Transports.MongoDB + MongoDBTransport + A MongoDB transport for Speckle + $(PackageTags) mongodb transport + true + disable + Speckle.Core.Transports + + + + 0 + false + false + + + + + + + + + + diff --git a/src/Objects/.editorconfig b/src/Objects/.editorconfig new file mode 100644 index 00000000..28981d89 --- /dev/null +++ b/src/Objects/.editorconfig @@ -0,0 +1,10 @@ + +[*.{cs,vb}] + +# Name properties with camelCase +dotnet_naming_rule.properties_should_be_camel_case.severity = none +dotnet_naming_rule.properties_should_be_camel_case.symbols = properties +dotnet_naming_rule.properties_should_be_camel_case.style = property_style + +dotnet_naming_symbols.properties.applicable_kinds = property +dotnet_naming_style.property_style.capitalization = camel_case \ No newline at end of file diff --git a/src/Objects/BuiltElements/AdvanceSteel/AsteelBeam.cs b/src/Objects/BuiltElements/AdvanceSteel/AsteelBeam.cs new file mode 100644 index 00000000..83db3e1c --- /dev/null +++ b/src/Objects/BuiltElements/AdvanceSteel/AsteelBeam.cs @@ -0,0 +1,36 @@ +using System.Collections.Generic; +using Objects.Geometry; +using Objects.Structural.Materials; +using Objects.Structural.Properties.Profiles; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.BuiltElements.AdvanceSteel; + +public class AsteelBeam : Beam, IDisplayValue>, IHasVolume, IHasArea, IAsteelObject +{ + [DetachProperty] + public SectionProfile profile { get; set; } + + [DetachProperty] + public StructuralMaterial material { get; set; } + + [DetachProperty] + public AsteelSectionProfile asteelProfile { get; set; } + + public double volume { get; set; } + public double area { get; set; } + public Base userAttributes { get; set; } + + public Base asteelProperties { get; set; } + + public AsteelBeam() { } + + [SchemaInfo("AsteelBeam", "Creates a Advance Steel beam by curve.", "Advance Steel", "Structure")] + public AsteelBeam([SchemaMainParam] ICurve baseLine, SectionProfile profile, StructuralMaterial material) + { + this.baseLine = baseLine; + this.profile = profile; + this.material = material; + } +} diff --git a/src/Objects/BuiltElements/AdvanceSteel/AsteelBolt.cs b/src/Objects/BuiltElements/AdvanceSteel/AsteelBolt.cs new file mode 100644 index 00000000..e474b51a --- /dev/null +++ b/src/Objects/BuiltElements/AdvanceSteel/AsteelBolt.cs @@ -0,0 +1,27 @@ +using System.Collections.Generic; +using Objects.Geometry; +using Speckle.Core.Models; + +namespace Objects.BuiltElements.AdvanceSteel; + +public abstract class AsteelBolt : Base, IAsteelObject +{ + [DetachProperty] + public List displayValue { get; set; } + + public Base userAttributes { get; set; } + + public Base asteelProperties { get; set; } +} + +public class AsteelCircularBolt : AsteelBolt +{ + //[SchemaInfo("AsteelCircularBolt", "Creates a Advance Steel circular bolt.", "Advance Steel", "Structure")] + public AsteelCircularBolt() { } +} + +public class AsteelRectangularBolt : AsteelBolt +{ + //[SchemaInfo("AsteelRectangularBolt", "Creates a Advance Steel rectangular bolt.", "Advance Steel", "Structure")] + public AsteelRectangularBolt() { } +} diff --git a/src/Objects/BuiltElements/AdvanceSteel/AsteelGrating.cs b/src/Objects/BuiltElements/AdvanceSteel/AsteelGrating.cs new file mode 100644 index 00000000..d0b94c46 --- /dev/null +++ b/src/Objects/BuiltElements/AdvanceSteel/AsteelGrating.cs @@ -0,0 +1,18 @@ +using System.Collections.Generic; +using Objects.Geometry; +using Speckle.Core.Models; + +namespace Objects.BuiltElements.AdvanceSteel; + +public class AsteelGrating : Base, IAsteelObject +{ + [DetachProperty] + public List displayValue { get; set; } + + public Base userAttributes { get; set; } + + public Base asteelProperties { get; set; } + + //[SchemaInfo("AsteelGrating", "Creates a Advance Steel grating.", "Advance Steel", "Structure")] + public AsteelGrating() { } +} diff --git a/src/Objects/BuiltElements/AdvanceSteel/AsteelPlate.cs b/src/Objects/BuiltElements/AdvanceSteel/AsteelPlate.cs new file mode 100644 index 00000000..a8335933 --- /dev/null +++ b/src/Objects/BuiltElements/AdvanceSteel/AsteelPlate.cs @@ -0,0 +1,27 @@ +using System.Collections.Generic; +using Objects.Geometry; +using Objects.Structural.Materials; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.BuiltElements.AdvanceSteel; + +public class AsteelPlate : Area, IDisplayValue>, IHasArea, IHasVolume, IAsteelObject +{ + [DetachProperty] + public StructuralMaterial? material { get; set; } + + public Base userAttributes { get; set; } + + public Base asteelProperties { get; set; } + + [SchemaInfo("AsteelPlate", "Creates a Advance Steel plate.", "Advance Steel", "Structure")] + public AsteelPlate(Polyline outline, string units, StructuralMaterial? material = null) + { + this.outline = outline; + this.material = material; + this.units = units; + } + + public AsteelPlate() { } +} diff --git a/src/Objects/BuiltElements/AdvanceSteel/AsteelPolyBeam.cs b/src/Objects/BuiltElements/AdvanceSteel/AsteelPolyBeam.cs new file mode 100644 index 00000000..8ba194b0 --- /dev/null +++ b/src/Objects/BuiltElements/AdvanceSteel/AsteelPolyBeam.cs @@ -0,0 +1,7 @@ +namespace Objects.BuiltElements.AdvanceSteel; + +public class AsteelPolyBeam : AsteelBeam +{ + //[SchemaInfo("AsteelPolyBeam", "Creates a Advance Steel polybeam.", "Advance Steel", "Structure")] + public AsteelPolyBeam() { } +} diff --git a/src/Objects/BuiltElements/AdvanceSteel/AsteelSectionProfile.cs b/src/Objects/BuiltElements/AdvanceSteel/AsteelSectionProfile.cs new file mode 100644 index 00000000..069a0d30 --- /dev/null +++ b/src/Objects/BuiltElements/AdvanceSteel/AsteelSectionProfile.cs @@ -0,0 +1,12 @@ +using Speckle.Core.Models; + +namespace Objects.BuiltElements.AdvanceSteel; + +public class AsteelSectionProfile : Base +{ + public string ProfSectionType { get; set; } + + public string ProfSectionName { get; set; } + + public AsteelSectionProfileDB SectionProfileDB { get; set; } +} diff --git a/src/Objects/BuiltElements/AdvanceSteel/AsteelSectionProfileDB.cs b/src/Objects/BuiltElements/AdvanceSteel/AsteelSectionProfileDB.cs new file mode 100644 index 00000000..cdbf61e7 --- /dev/null +++ b/src/Objects/BuiltElements/AdvanceSteel/AsteelSectionProfileDB.cs @@ -0,0 +1,5 @@ +using Speckle.Core.Models; + +namespace Objects.BuiltElements.AdvanceSteel; + +public class AsteelSectionProfileDB : Base { } diff --git a/src/Objects/BuiltElements/AdvanceSteel/AsteelSlab.cs b/src/Objects/BuiltElements/AdvanceSteel/AsteelSlab.cs new file mode 100644 index 00000000..bed8a7c8 --- /dev/null +++ b/src/Objects/BuiltElements/AdvanceSteel/AsteelSlab.cs @@ -0,0 +1,27 @@ +using System.Collections.Generic; +using Objects.Geometry; +using Objects.Structural.Materials; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.BuiltElements.AdvanceSteel; + +public class AsteelSlab : Area, IDisplayValue>, IHasArea, IHasVolume, IAsteelObject +{ + [DetachProperty] + public StructuralMaterial? material { get; set; } + + public Base userAttributes { get; set; } + + public Base asteelProperties { get; set; } + + [SchemaInfo("AsteelSlab", "Creates a Advance Steel slab.", "Advance Steel", "Structure")] + public AsteelSlab(Polyline outline, string units, StructuralMaterial? material = null) + { + this.outline = outline; + this.material = material; + this.units = units; + } + + public AsteelSlab() { } +} diff --git a/src/Objects/BuiltElements/AdvanceSteel/AsteelSpecialPart.cs b/src/Objects/BuiltElements/AdvanceSteel/AsteelSpecialPart.cs new file mode 100644 index 00000000..f05b8155 --- /dev/null +++ b/src/Objects/BuiltElements/AdvanceSteel/AsteelSpecialPart.cs @@ -0,0 +1,18 @@ +using System.Collections.Generic; +using Objects.Geometry; +using Speckle.Core.Models; + +namespace Objects.BuiltElements.AdvanceSteel; + +public class AsteelSpecialPart : Base, IAsteelObject +{ + [DetachProperty] + public List displayValue { get; set; } + + public Base userAttributes { get; set; } + + public Base asteelProperties { get; set; } + + //[SchemaInfo("AsteelSpecialPart", "Creates a Advance Steel special part.", "Advance Steel", "Structure")] + public AsteelSpecialPart() { } +} diff --git a/src/Objects/BuiltElements/AdvanceSteel/AsteelStraightBeam.cs b/src/Objects/BuiltElements/AdvanceSteel/AsteelStraightBeam.cs new file mode 100644 index 00000000..0e548aae --- /dev/null +++ b/src/Objects/BuiltElements/AdvanceSteel/AsteelStraightBeam.cs @@ -0,0 +1,7 @@ +namespace Objects.BuiltElements.AdvanceSteel; + +public class AsteelStraightBeam : AsteelBeam +{ + //[SchemaInfo("AsteelStraightBeam", "Creates a Advance Steel straightBeam.", "Advance Steel", "Structure")] + public AsteelStraightBeam() { } +} diff --git a/src/Objects/BuiltElements/AdvanceSteel/Enums.cs b/src/Objects/BuiltElements/AdvanceSteel/Enums.cs new file mode 100644 index 00000000..42200752 --- /dev/null +++ b/src/Objects/BuiltElements/AdvanceSteel/Enums.cs @@ -0,0 +1 @@ +namespace Objects.BuiltElements.AdvanceSteel; diff --git a/src/Objects/BuiltElements/AdvanceSteel/IAsteelObject.cs b/src/Objects/BuiltElements/AdvanceSteel/IAsteelObject.cs new file mode 100644 index 00000000..c648867c --- /dev/null +++ b/src/Objects/BuiltElements/AdvanceSteel/IAsteelObject.cs @@ -0,0 +1,10 @@ +using Speckle.Core.Models; + +namespace Objects.BuiltElements.AdvanceSteel; + +public interface IAsteelObject +{ + Base userAttributes { get; set; } + + Base asteelProperties { get; set; } +} diff --git a/src/Objects/BuiltElements/Alignment.cs b/src/Objects/BuiltElements/Alignment.cs new file mode 100644 index 00000000..476ee520 --- /dev/null +++ b/src/Objects/BuiltElements/Alignment.cs @@ -0,0 +1,38 @@ +using System; +using System.Collections.Generic; +using Objects.Geometry; +using Speckle.Core.Models; +using Speckle.Newtonsoft.Json; + +namespace Objects.BuiltElements; + +public class Alignment : Base, IDisplayValue +{ + [JsonIgnore, Obsolete("Use curves property")] + public ICurve baseCurve { get; set; } + + public List curves { get; set; } + + public string name { get; set; } + + public double startStation { get; set; } + + public double endStation { get; set; } + + public List profiles { get; set; } + + /// + /// Station equation list contains doubles indicating raw station back, station back, and station ahead for each station equation + /// + public List stationEquations { get; set; } + + /// + /// Station equation direction for the corresponding station equation should be true for increasing or false for decreasing + /// + public List stationEquationDirections { get; set; } + + public string units { get; set; } + + [DetachProperty] + public Polyline displayValue { get; set; } +} diff --git a/src/Objects/BuiltElements/Archicad/ArchicadBeam.cs b/src/Objects/BuiltElements/Archicad/ArchicadBeam.cs new file mode 100644 index 00000000..4964a78b --- /dev/null +++ b/src/Objects/BuiltElements/Archicad/ArchicadBeam.cs @@ -0,0 +1,134 @@ +using System; +using System.Collections.Generic; +using Objects.Geometry; +using Speckle.Core.Kits; +using Speckle.Core.Models; +using Speckle.Newtonsoft.Json; + +namespace Objects.BuiltElements.Archicad; + +/* +For further informations about given the variables, visit: +https://archicadapi.graphisoft.com/documentation/api_beamtype +*/ +public class ArchicadBeam : Beam +{ + [SchemaInfo("ArchicadBeam", "Creates an Archicad beam by curve.", "Archicad", "Structure")] + public ArchicadBeam() { } + + // Element base + public string? elementType { get; set; } /*APINullabe*/ + + public List? classifications { get; set; } /*APINullabe*/ + public Base? elementProperties { get; set; } + public Base? componentProperties { get; set; } + + public override Level? level + { + get => archicadLevel; + internal set + { + if (value is ArchicadLevel or null) + { + archicadLevel = value as ArchicadLevel; + } + else + { + throw new ArgumentException($"Expected object of type {nameof(ArchicadLevel)}"); + } + } + } + + [JsonIgnore] + public ArchicadLevel? archicadLevel { get; set; } /*APINullabe*/ + + public string? layer { get; set; } /*APINullabe*/ + + // Positioning + public Point begC { get; set; } + public Point endC { get; set; } + public bool? isSlanted { get; set; } /*APINullabe*/ + public double? slantAngle { get; set; } /*APINullabe*/ + public string? beamShape { get; set; } /*APINullabe*/ + public int? sequence { get; set; } /*APINullabe*/ + public double? curveAngle { get; set; } /*APINullabe*/ + public double? verticalCurveHeight { get; set; } /*APINullabe*/ + public bool? isFlipped { get; set; } /*APINullabe*/ + + // End Cuts + public uint? nCuts { get; set; } /*APINullabe*/ + public Dictionary? Cuts { get; set; } + + // Reference Axis + public short? anchorPoint { get; set; } /*APINullabe*/ + public double? offset { get; set; } + public double? profileAngle { get; set; } + + // Segment + public uint? nSegments { get; set; } /*APINullabe*/ + public uint? nProfiles { get; set; } /*APINullabe*/ + public Dictionary? segments { get; set; } /*APINullabe*/ + + // Scheme + public uint? nSchemes { get; set; } + public Dictionary? Schemes { get; set; } + + // Hole + public Dictionary? Holes { get; set; } + + // Floor Plan and Section - Floor Plan Display + public string? showOnStories { get; set; } /*APINullabe*/ + public string? displayOptionName { get; set; } /*APINullabe*/ + public string? uncutProjectionMode { get; set; } /*APINullabe*/ + public string? overheadProjectionMode { get; set; } /*APINullabe*/ + public string? showProjectionName { get; set; } /*APINullabe*/ + + // Floor Plan and Section - Cut Surfaces + public short? cutContourLinePen { get; set; } + public string? cutContourLineType { get; set; } + public short? overrideCutFillPen { get; set; } + public short? overrideCutFillBackgroundPen { get; set; } + + // Floor Plan and Section - Outlines + public string? showOutline { get; set; } /*APINullabe*/ + public short? uncutLinePen { get; set; } /*APINullabe*/ + public string? uncutLinetype { get; set; } /*APINullabe*/ + public short? overheadLinePen { get; set; } /*APINullabe*/ + public string? overheadLinetype { get; set; } /*APINullabe*/ + public short? hiddenLinePen { get; set; } /*APINullabe*/ + public string? hiddenLinetype { get; set; } /*APINullabe*/ + + // Floor Plan and Section - Symbol + public string? showReferenceAxis { get; set; } /*APINullabe*/ + public short? referencePen { get; set; } /*APINullabe*/ + public string? referenceLinetype { get; set; } /*APINullabe*/ + + // Floor Plan and Section - Cover Fills + public bool? useCoverFill { get; set; } /*APINullabe*/ + public bool? useCoverFillFromSurface { get; set; } + public short? coverFillForegroundPen { get; set; } + public short? coverFillBackgroundPen { get; set; } + public string? coverFillType { get; set; } + public string? coverFillTransformationType { get; set; } + public double? coverFillTransformationOrigoX { get; set; } + public double? coverFillTransformationOrigoY { get; set; } + public double? coverFillTransformationXAxisX { get; set; } + public double? coverFillTransformationXAxisY { get; set; } + public double? coverFillTransformationYAxisX { get; set; } + public double? coverFillTransformationYAxisY { get; set; } + + public class BeamSegment : Base + { + // Segment override materials + public string? leftMaterial { get; set; } + public string? topMaterial { get; set; } + public string? rightMaterial { get; set; } + public string? bottomMaterial { get; set; } + + public string? endsMaterial { get; set; } + + // Segment - The overridden materials are chained + public bool? materialChained { get; set; } + public AssemblySegment assemblySegmentData { get; set; } + } +} diff --git a/src/Objects/BuiltElements/Archicad/ArchicadColumn.cs b/src/Objects/BuiltElements/Archicad/ArchicadColumn.cs new file mode 100644 index 00000000..59693a90 --- /dev/null +++ b/src/Objects/BuiltElements/Archicad/ArchicadColumn.cs @@ -0,0 +1,144 @@ +using System; +using System.Collections.Generic; +using Objects.Geometry; +using Speckle.Core.Kits; +using Speckle.Core.Models; +using Speckle.Newtonsoft.Json; + +namespace Objects.BuiltElements.Archicad; + +/* +For further informations about given the variables, visit: +https://archicadapi.graphisoft.com/documentation/api_columntype +*/ +public class ArchicadColumn : Column +{ + [SchemaInfo("ArchicadColumn", "Creates an Archicad Column by curve.", "Archicad", "Structure")] + public ArchicadColumn() { } + + // Element base + public string? elementType { get; set; } /*APINullabe*/ + + public List? classifications { get; set; } /*APINullabe*/ + public Base? elementProperties { get; set; } + public Base? componentProperties { get; set; } + + public override Level? level + { + get => archicadLevel; + internal set + { + if (value is ArchicadLevel or null) + { + archicadLevel = value as ArchicadLevel; + } + else + { + throw new ArgumentException($"Expected object of type {nameof(ArchicadLevel)}"); + } + } + } + + [JsonIgnore] + public ArchicadLevel? archicadLevel { get; set; } /*APINullabe*/ + + public string? layer { get; set; } /*APINullabe*/ + + // Wall geometry + public Point origoPos { get; set; } + public double height { get; set; } + + // Positioning - story relation + public double? bottomOffset { get; set; } /*APINullabe*/ + public double? topOffset { get; set; } /*APINullabe*/ + public short? relativeTopStory { get; set; } /*APINullabe*/ + + // Positioning - slanted column + public bool? isSlanted { get; set; } /*APINullabe*/ + public double? slantAngle { get; set; } /*APINullabe*/ + public double? slantDirectionAngle { get; set; } /*APINullabe*/ + public bool? isFlipped { get; set; } /*APINullabe*/ + + // Positioning - wrapping + public bool? wrapping { get; set; } /*APINullabe*/ + + // Positioning - Defines the relation of column to zones (Zone Boundary, Reduce Zone Area Only, No Effect on Zones) + public string? columnRelationToZoneName { get; set; } /*APINullabe*/ + + // End Cuts + public uint? nCuts { get; set; } /*APINullabe*/ + public Dictionary? Cuts { get; set; } /*APINullabe*/ + + // Reference Axis + public short? coreAnchor { get; set; } + public double? axisRotationAngle { get; set; } + + // Segment + public uint? nSegments { get; set; } /*APINullabe*/ + public uint? nProfiles { get; set; } /*APINullabe*/ + public Dictionary? segments { get; set; } /*APINullabe*/ + + // Scheme + public uint? nSchemes { get; set; } + public Dictionary? Schemes { get; set; } + + // Floor Plan and Section - Floor Plan Display + public string? showOnStories { get; set; } /*APINullabe*/ + public string? displayOptionName { get; set; } /*APINullabe*/ + public string? showProjectionName { get; set; } /*APINullabe*/ + + // Floor Plan and Section - Cut Surfaces + public short? corePen { get; set; } + public string? contLtype { get; set; } + public short? venLinePen { get; set; } + public string? venLineType { get; set; } + public short? overrideCutFillPen { get; set; } + public short? overrideCutFillBackgroundPen { get; set; } + + // Floor Plan and Section - Outlines + public short? uncutLinePen { get; set; } /*APINullabe*/ + public string? uncutLinetype { get; set; } /*APINullabe*/ + public short? overheadLinePen { get; set; } /*APINullabe*/ + public string? overheadLinetype { get; set; } /*APINullabe*/ + public short? hiddenLinePen { get; set; } /*APINullabe*/ + public string? hiddenLinetype { get; set; } /*APINullabe*/ + + // Floor Plan and Section - Floor Plan Symbol + public string? coreSymbolTypeName { get; set; } /*APINullabe*/ + public double? coreSymbolPar1 { get; set; } /*APINullabe*/ + public double? coreSymbolPar2 { get; set; } /*APINullabe*/ + public short? coreSymbolPen { get; set; } /*APINullabe*/ + + // Floor Plan and Section - Cover Fills + public bool? useCoverFill { get; set; } /*APINullabe*/ + public bool? useCoverFillFromSurface { get; set; } + public short? coverFillForegroundPen { get; set; } + public short? coverFillBackgroundPen { get; set; } + public string? coverFillType { get; set; } + public string? coverFillTransformationType { get; set; } + public double? coverFillTransformationOrigoX { get; set; } + public double? coverFillTransformationOrigoY { get; set; } + public double? coverFillTransformationXAxisX { get; set; } + public double? coverFillTransformationXAxisY { get; set; } + public double? coverFillTransformationYAxisX { get; set; } + public double? coverFillTransformationYAxisY { get; set; } + + public class ColumnSegment : Base + { + // Segment - Veneer attributes + public string? veneerType { get; set; } + public string? veneerBuildingMaterial { get; set; } + + public double? veneerThick { get; set; } + + // Segment - The extrusion overridden material name + public string? extrusionSurfaceMaterial { get; set; } + + // Segment - The ends overridden material name + public string? endsSurfaceMaterial { get; set; } + + // Segment - The overridden materials are chained + public bool? materialChained { get; set; } + public AssemblySegment assemblySegmentData { get; set; } + } +} diff --git a/src/Objects/BuiltElements/Archicad/ArchicadFloor.cs b/src/Objects/BuiltElements/Archicad/ArchicadFloor.cs new file mode 100644 index 00000000..734ad06d --- /dev/null +++ b/src/Objects/BuiltElements/Archicad/ArchicadFloor.cs @@ -0,0 +1,99 @@ +using System; +using System.Collections.Generic; +using Speckle.Core.Models; +using Speckle.Newtonsoft.Json; + +namespace Objects.BuiltElements.Archicad; + +/* +For further informations about given the variables, visit: +https://archicadapi.graphisoft.com/documentation/api_slabtype +*/ +public sealed class ArchicadFloor : Floor +{ + // Element base + public string? elementType { get; set; } /*APINullable*/ + + public List? classifications { get; set; } /*APINullable*/ + public Base? elementProperties { get; set; } + public Base? componentProperties { get; set; } + + public override Level? level + { + get => archicadLevel; + internal set + { + if (value is ArchicadLevel or null) + { + archicadLevel = value as ArchicadLevel; + } + else + { + throw new ArgumentException($"Expected object of type {nameof(ArchicadLevel)}"); + } + } + } + + [JsonIgnore] + public ArchicadLevel? archicadLevel { get; set; } /*APINullabe*/ + + public string? layer { get; set; } /*APINullabe*/ + + // Geometry and positioning + public double? thickness { get; set; } + public ElementShape shape { get; set; } + public string? structure { get; set; } /*APINullabe*/ + public string? compositeName { get; set; } + public string? buildingMaterialName { get; set; } + public string? referencePlaneLocation { get; set; } /*APINullabe*/ + + // EdgeTrims + public string? edgeAngleType { get; set; } + public double? edgeAngle { get; set; } + + // Floor Plan and Section - Floor Plan Display + public string? showOnStories { get; set; } /*APINullabe*/ + public Visibility? visibilityCont { get; set; } + public Visibility? visibilityFill { get; set; } + + // Floor Plan and Section - Cut Surfaces + public short? sectContPen { get; set; } + public string? sectContLtype { get; set; } + public short? cutFillPen { get; set; } + public short? cutFillBackgroundPen { get; set; } + + // Floor Plan and Section - Outlines + public short? contourPen { get; set; } + public string? contourLineType { get; set; } + public short? hiddenContourLinePen { get; set; } + public string? hiddenContourLineType { get; set; } + + // Floor Plan and Section - Cover Fills + public bool? useFloorFill { get; set; } + public short? floorFillPen { get; set; } + public short? floorFillBGPen { get; set; } + public string? floorFillName { get; set; } + public bool? use3DHatching { get; set; } + public string? hatchOrientation { get; set; } + public double? hatchOrientationOrigoX { get; set; } + public double? hatchOrientationOrigoY { get; set; } + public double? hatchOrientationXAxisX { get; set; } + public double? hatchOrientationXAxisY { get; set; } + public double? hatchOrientationYAxisX { get; set; } + public double? hatchOrientationYAxisY { get; set; } + + // Model + public string? topMat { get; set; } + public string? sideMat { get; set; } + public string? botMat { get; set; } + public bool? materialsChained { get; set; } + + public class Visibility : Base + { + public bool? showOnHome { get; set; } + public bool? showAllAbove { get; set; } + public bool? showAllBelow { get; set; } + public short? showRelAbove { get; set; } + public short? showRelBelow { get; set; } + } +} diff --git a/src/Objects/BuiltElements/Archicad/ArchicadLevel.cs b/src/Objects/BuiltElements/Archicad/ArchicadLevel.cs new file mode 100644 index 00000000..92f33df8 --- /dev/null +++ b/src/Objects/BuiltElements/Archicad/ArchicadLevel.cs @@ -0,0 +1,24 @@ +namespace Objects.BuiltElements.Archicad; + +/* +For further informations about given the variables, visit: +https://archicadapi.graphisoft.com/documentation/api_storytype +*/ +public class ArchicadLevel : Level +{ + public short index { get; set; } + + public ArchicadLevel() { } + + public ArchicadLevel(string name, double elevation, short index) + { + this.name = name; + this.elevation = elevation; + this.index = index; + } + + public ArchicadLevel(string name) + { + this.name = name; + } +} diff --git a/src/Objects/BuiltElements/Archicad/ArchicadRoof.cs b/src/Objects/BuiltElements/Archicad/ArchicadRoof.cs new file mode 100644 index 00000000..6cf965b9 --- /dev/null +++ b/src/Objects/BuiltElements/Archicad/ArchicadRoof.cs @@ -0,0 +1,277 @@ +using System; +using System.Collections.Generic; +using Objects.Geometry; +using Objects.Other; +using Speckle.Core.Models; +using Speckle.Newtonsoft.Json; + +namespace Objects.BuiltElements.Archicad; + +/* +For further informations about given the variables, visit: +https://archicadapi.graphisoft.com/documentation/api_shellbasetype +*/ +public class ArchicadShellBase : BuiltElements.Roof +{ + public class Visibility : Base + { + public bool? showOnHome { get; set; } + public bool? showAllAbove { get; set; } + public bool? showAllBelow { get; set; } + public short? showRelAbove { get; set; } + public short? showRelBelow { get; set; } + } + + // Element base + public string? elementType { get; set; } /*APINullabe*/ + + public List? classifications { get; set; } /*APINullabe*/ + public Base? elementProperties { get; set; } + public Base? componentProperties { get; set; } + + public override Level? level + { + get => archicadLevel; + internal set + { + if (value is ArchicadLevel or null) + { + archicadLevel = value as ArchicadLevel; + } + else + { + throw new ArgumentException($"Expected object of type {nameof(ArchicadLevel)}"); + } + } + } + + [JsonIgnore] + public ArchicadLevel? archicadLevel { get; set; } /*APINullabe*/ + + public string? layer { get; set; } /*APINullabe*/ + + // Geometry and positioning + public double? thickness { get; set; } + public string? structure { get; set; } /*APINullabe*/ + public string? compositeName { get; set; } + public string? buildingMaterialName { get; set; } + + // EdgeTrims + public string? edgeAngleType { get; set; } + public double? edgeAngle { get; set; } + + // Floor Plan and Section - Floor Plan Display + public string? showOnStories { get; set; } /*APINullabe*/ + public Visibility? visibilityCont { get; set; } + public Visibility? visibilityFill { get; set; } + public string? displayOptionName { get; set; } /*APINullabe*/ + public string? showProjectionName { get; set; } /*APINullabe*/ + + // Floor Plan and Section - Cut Surfaces + public short? sectContPen { get; set; } /*APINullabe*/ + public string? sectContLtype { get; set; } /*APINullabe*/ + public short? cutFillPen { get; set; } + public short? cutFillBackgroundPen { get; set; } + + // Floor Plan and Section - Outlines + public short? contourPen { get; set; } /*APINullabe*/ + public string? contourLineType { get; set; } /*APINullabe*/ + public short? overheadLinePen { get; set; } /*APINullabe*/ + public string? overheadLinetype { get; set; } /*APINullabe*/ + + // Floor Plan and Section - Cover Fills + public bool? useFloorFill { get; set; } /*APINullabe*/ + public short? floorFillPen { get; set; } + public short? floorFillBGPen { get; set; } + public string? floorFillName { get; set; } + public bool? use3DHatching { get; set; } + public bool? useFillLocBaseLine { get; set; } + public bool? useSlantedFill { get; set; } + public string? hatchOrientation { get; set; } + public double? hatchOrientationOrigoX { get; set; } + public double? hatchOrientationOrigoY { get; set; } + public double? hatchOrientationXAxisX { get; set; } + public double? hatchOrientationXAxisY { get; set; } + public double? hatchOrientationYAxisX { get; set; } + public double? hatchOrientationYAxisY { get; set; } + + // Model + public string? topMat { get; set; } + public string? sideMat { get; set; } + public string? botMat { get; set; } + public bool? materialsChained { get; set; } + public string? trimmingBodyName { get; set; } /*APINullabe*/ +} + +/* +For further informations about given the variables, visit: +https://archicadapi.graphisoft.com/documentation/api_rooftype +*/ +public sealed class ArchicadRoof : ArchicadShellBase +{ + public class BaseLine : Base + { + public Point begC { get; set; } + public Point endC { get; set; } + } + + public class RoofLevel : Base + { + public double? levelHeight { get; set; } + public double? levelAngle { get; set; } + } + + public class LevelEdge : Base + { + public double? edgeLevelAngle { get; set; } + public double? eavesOverhang { get; set; } + public string? topMaterial { get; set; } + public string? bottomMaterial { get; set; } + public string? coverFillType { get; set; } + public string? angleType { get; set; } + } + + public class PivotPolyEdge : Base + { + public int? nLevelEdgeData { get; set; } + public Dictionary? roofLevels { get; set; } + } + + // Geometry and positioning + public string roofClassName { get; set; } + public double? planeRoofAngle { get; set; } + public ElementShape shape { get; set; } + public BaseLine? baseLine { get; set; } + public bool? posSign { get; set; } + public ElementShape? pivotPolygon { get; set; } /*APINullabe*/ + public short? levelNum { get; set; } + public Dictionary? levels { get; set; } /*APINullabe*/ + public Dictionary? roofPivotPolyEdges { get; set; } +} + +/* +For further informations about given the variables, visit: +https://archicadapi.graphisoft.com/documentation/api_shelltype +*/ +public sealed class ArchicadShell : ArchicadShellBase +{ + public class ShellContourEdgeData : Base + { + public string? sideTypeName { get; set; } + public double? sideAngle { get; set; } + public string? edgeTypeName { get; set; } + public string? edgeSideMaterial { get; set; } + } + + public class ShellContourData : Base + { + public ElementShape? shellContourPoly { get; set; } + public Transform shellContourPlane { get; set; } + public double? shellContourHeight { get; set; } + public int? shellContourID { get; set; } + public Dictionary? shellContourEdges { get; set; } + } + + // Geometry and positioning + public string? shellClassName { get; set; } /*APINullabe*/ + public Transform? basePlane { get; set; } /*APINullabe*/ + public bool? flipped { get; set; } /*APINullabe*/ + public bool? hasContour { get; set; } /*APINullabe*/ + public int? numHoles { get; set; } /*APINullabe*/ + public Dictionary? shellContours { get; set; } + public string? defaultEdgeType { get; set; } /*APINullabe*/ + + public double? slantAngle { get; set; } + public double? revolutionAngle { get; set; } + public double? distortionAngle { get; set; } + public bool? segmentedSurfaces { get; set; } + public double? shapePlaneTilt { get; set; } + public double? begPlaneTilt { get; set; } + public double? endPlaneTilt { get; set; } + public ElementShape shape { get; set; } + public ElementShape? shape1 { get; set; } /*APINullabe*/ + public ElementShape? shape2 { get; set; } /*APINullabe*/ + public Transform? axisBase { get; set; } /*APINullabe*/ + public Transform? plane1 { get; set; } /*APINullabe*/ + public Transform? plane2 { get; set; } /*APINullabe*/ + public Point? begC { get; set; } + public double? begAngle { get; set; } + public Vector? extrusionVector { get; set; } + public Vector? shapeDirection { get; set; } + public Vector? distortionVector { get; set; } + public string? morphingRuleName { get; set; } + + // Model + public class BegShapeEdge : Base + { + public string? begShapeEdgeTrimSideType { get; set; } + public double? begShapeEdgeTrimSideAngle { get; set; } + public string? begShapeEdgeSideMaterial { get; set; } + public string? begShapeEdgeType { get; set; } + } + + public class EndShapeEdge : Base + { + public string? endShapeEdgeTrimSideType { get; set; } + public double? endShapeEdgeTrimSideAngle { get; set; } + public string? endShapeEdgeSideMaterial { get; set; } + public string? endShapeEdgeType { get; set; } + } + + public class ExtrudedEdge1 : Base + { + public string? extrudedEdgeTrimSideType1 { get; set; } + public double? extrudedEdgeTrimSideAngle1 { get; set; } + public string? extrudedEdgeSideMaterial1 { get; set; } + public string? extrudedEdgeType1 { get; set; } + } + + public class ExtrudedEdge2 : Base + { + public string? extrudedEdgeTrimSideType2 { get; set; } + public double? extrudedEdgeTrimSideAngle2 { get; set; } + public string? extrudedEdgeSideMaterial2 { get; set; } + public string? extrudedEdgeType2 { get; set; } + } + + public class RevolvedEdge1 : Base + { + public string? revolvedEdgeTrimSideType1 { get; set; } + public double? revolvedEdgeTrimSideAngle1 { get; set; } + public string? revolvedEdgeSideMaterial1 { get; set; } + public string? revolvedEdgeType1 { get; set; } + } + + public class RevolvedEdge2 : Base + { + public string? revolvedEdgeTrimSideType2 { get; set; } + public double? revolvedEdgeTrimSideAngle2 { get; set; } + public string? revolvedEdgeSideMaterial2 { get; set; } + public string? revolvedEdgeType2 { get; set; } + } + + public class RuledEdge1 : Base + { + public string? ruledEdgeTrimSideType1 { get; set; } + public double? ruledEdgeTrimSideAngle1 { get; set; } + public string? ruledEdgeSideMaterial1 { get; set; } + public string? ruledEdgeType1 { get; set; } + } + + public class RuledEdge2 : Base + { + public string? ruledEdgeTrimSideType2 { get; set; } + public double? ruledEdgeTrimSideAngle2 { get; set; } + public string? ruledEdgeSideMaterial2 { get; set; } + public string? ruledEdgeType2 { get; set; } + } + + public BegShapeEdge? begShapeEdge { get; set; } + public EndShapeEdge? endShapeEdge { get; set; } + public ExtrudedEdge1? extrudedEdge1 { get; set; } + public ExtrudedEdge2? extrudedEdge2 { get; set; } + public RevolvedEdge1? revolvedEdge1 { get; set; } + public RevolvedEdge2? revolvedEdge2 { get; set; } + public RuledEdge1? ruledEdge1 { get; set; } + public RuledEdge2? ruledEdge2 { get; set; } +} diff --git a/src/Objects/BuiltElements/Archicad/ArchicadRoom.cs b/src/Objects/BuiltElements/Archicad/ArchicadRoom.cs new file mode 100644 index 00000000..993ba553 --- /dev/null +++ b/src/Objects/BuiltElements/Archicad/ArchicadRoom.cs @@ -0,0 +1,32 @@ +using System.Collections.Generic; +using Speckle.Core.Models; +using Speckle.Newtonsoft.Json; + +namespace Objects.BuiltElements.Archicad; + +/* +For further informations about given the variables, visit: +https://archicadapi.graphisoft.com/documentation/api_zonetype +*/ +public class ArchicadRoom : Room +{ + // Element base + public string elementType { get; set; } + + public List classifications { get; set; } + public Base? elementProperties { get; set; } + public Base? componentProperties { get; set; } + + public override Level? level + { + get => archicadLevel; + set => archicadLevel = value as ArchicadLevel ?? null; + } + + [JsonIgnore] + public ArchicadLevel? archicadLevel { get; set; } + + public string? layer { get; set; } /*APINullabe*/ + + public ElementShape shape { get; set; } +} diff --git a/src/Objects/BuiltElements/Archicad/ArchicadWall.cs b/src/Objects/BuiltElements/Archicad/ArchicadWall.cs new file mode 100644 index 00000000..30471504 --- /dev/null +++ b/src/Objects/BuiltElements/Archicad/ArchicadWall.cs @@ -0,0 +1,123 @@ +using System; +using System.Collections.Generic; +using Objects.Geometry; +using Speckle.Core.Kits; +using Speckle.Core.Models; +using Speckle.Newtonsoft.Json; + +namespace Objects.BuiltElements.Archicad; + +/* +For further informations about given the variables, visit: +https://archicadapi.graphisoft.com/documentation/api_walltype +*/ +public class ArchicadWall : Wall +{ + [SchemaInfo("ArchicadWall", "Creates an Archicad wall.", "Archicad", "Structure")] + public ArchicadWall() { } + + // Element base + public string? elementType { get; set; } /*APINullabe*/ + + public List? classifications { get; set; } /*APINullabe*/ + public Base? elementProperties { get; set; } + public Base? componentProperties { get; set; } + + public override Level? level + { + get => archicadLevel; + internal set + { + if (value is ArchicadLevel or null) + { + archicadLevel = value as ArchicadLevel; + } + else + { + throw new ArgumentException($"Expected object of type {nameof(ArchicadLevel)}"); + } + } + } + + [JsonIgnore] + public ArchicadLevel? archicadLevel { get; set; } /*APINullabe*/ + + public string? layer { get; set; } /*APINullabe*/ + + // Wall geometry + public double? baseOffset { get; set; } /*APINullabe*/ + public Point startPoint { get; set; } + public Point endPoint { get; set; } + + public string? structure { get; set; } /*APINullabe*/ + public string? geometryMethod { get; set; } /*APINullabe*/ + public string? wallComplexity { get; set; } /*APINullabe*/ + + public string? buildingMaterialName { get; set; } + public string? compositeName { get; set; } + public string? profileName { get; set; } + public double? arcAngle { get; set; } + + public ElementShape? shape { get; set; } + + public double? thickness { get; set; } /*APINullabe*/ + + public double? outsideSlantAngle { get; set; } + public double? insideSlantAngle { get; set; } + + public bool? polyWalllCornersCanChange { get; set; } + + // Wall and stories relation + public double? topOffset { get; set; } /*APINullabe*/ + public short? relativeTopStory { get; set; } /*APINullabe*/ + public string? referenceLineLocation { get; set; } /*APINullabe*/ + public double? referenceLineOffset { get; set; } + public double? offsetFromOutside { get; set; } /*APINullabe*/ + public int? referenceLineStartIndex { get; set; } /*APINullabe*/ + public int? referenceLineEndIndex { get; set; } /*APINullabe*/ + public bool flipped { get; set; } + + // Floor Plan and Section - Floor Plan Display + public string? showOnStories { get; set; } /*APINullabe*/ + public string? displayOptionName { get; set; } /*APINullabe*/ + public string? showProjectionName { get; set; } /*APINullabe*/ + + // Floor Plan and Section - Cut Surfaces parameters + public short? cutLinePen { get; set; } + public string? cutLinetype { get; set; } + public short? overrideCutFillPen { get; set; } + public short? overrideCutFillBackgroundPen { get; set; } + + // Floor Plan and Section - Outlines parameters + public short? uncutLinePen { get; set; } /*APINullabe*/ + public string? uncutLinetype { get; set; } /*APINullabe*/ + public short? overheadLinePen { get; set; } /*APINullabe*/ + public string? overheadLinetype { get; set; } /*APINullabe*/ + + // Model - Override Surfaces + public string? referenceMaterialName { get; set; } + public int? referenceMaterialStartIndex { get; set; } + public int? referenceMaterialEndIndex { get; set; } + public string? oppositeMaterialName { get; set; } + public int? oppositeMaterialStartIndex { get; set; } + public int? oppositeMaterialEndIndex { get; set; } + public string? sideMaterialName { get; set; } + public bool? materialsChained { get; set; } /*APINullabe*/ + public bool? inheritEndSurface { get; set; } /*APINullabe*/ + public bool? alignTexture { get; set; } /*APINullabe*/ + public int? sequence { get; set; } /*APINullabe*/ + + // Model - Log Details (log height, start with half log, surface of horizontal edges, log shape) + public double? logHeight { get; set; } + public bool? startWithHalfLog { get; set; } + public string? surfaceOfHorizontalEdges { get; set; } + public string? logShape { get; set; } + + // Model - Defines the relation of wall to zones (Zone Boundary, Reduce Zone Area Only, No Effect on Zones) + public string? wallRelationToZoneName { get; set; } /*APINullabe*/ + + // Does it have any embedded object? + public bool? hasDoor { get; set; } /*APINullabe*/ + + public bool? hasWindow { get; set; } /*APINullabe*/ +} diff --git a/src/Objects/BuiltElements/Archicad/AssemblySegment.cs b/src/Objects/BuiltElements/Archicad/AssemblySegment.cs new file mode 100644 index 00000000..b3224562 --- /dev/null +++ b/src/Objects/BuiltElements/Archicad/AssemblySegment.cs @@ -0,0 +1,61 @@ +using Speckle.Core.Models; + +namespace Objects.BuiltElements.Archicad; + +public class AssemblySegment : Base +{ + public bool circleBased { get; set; } + + public string modelElemStructureType { get; set; } + + public double nominalHeight { get; set; } + + public double nominalWidth { get; set; } + + public bool isHomogeneous { get; set; } + + public double endWidth { get; set; } + + public double endHeight { get; set; } + + public bool isEndWidthAndHeightLinked { get; set; } + + public bool isWidthAndHeightLinked { get; set; } + + public string profileAttrName { get; set; } + + public string buildingMaterial { get; set; } +} + +public class AssemblySegmentScheme : Base +{ + public string lengthType { get; set; } + + public double fixedLength { get; set; } + + public double lengthProportion { get; set; } +} + +public class AssemblySegmentCut : Base +{ + public string cutType { get; set; } + + public double customAngle { get; set; } +} + +public class Hole : Base +{ + public string holeType { get; set; } + + public bool holeContourOn { get; set; } + + public int holeId { get; set; } + + public double centerx { get; set; } + + public double centerz { get; set; } + + public double width { get; set; } + + public double height { get; set; } +} diff --git a/src/Objects/BuiltElements/Archicad/Classification.cs b/src/Objects/BuiltElements/Archicad/Classification.cs new file mode 100644 index 00000000..ef78cd14 --- /dev/null +++ b/src/Objects/BuiltElements/Archicad/Classification.cs @@ -0,0 +1,21 @@ +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.BuiltElements.Archicad; + +public class Classification : Base +{ + public Classification() { } + + [SchemaInfo("Classification", "A classification to set on an element", "BIM", "All")] + public Classification(string system, string? code = null, string? name = null) + { + this.system = system; + this.code = code; + this.name = name; + } + + public string system { get; set; } + public string? code { get; set; } + public string? name { get; set; } +} diff --git a/src/Objects/BuiltElements/Archicad/ComponentProperties.cs b/src/Objects/BuiltElements/Archicad/ComponentProperties.cs new file mode 100644 index 00000000..8183626f --- /dev/null +++ b/src/Objects/BuiltElements/Archicad/ComponentProperties.cs @@ -0,0 +1,50 @@ +using System.Collections.Generic; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.BuiltElements.Archicad; + +public class ComponentProperties : Base +{ + public ComponentProperties() { } + + [SchemaInfo("ComponentProperties", "An Archicad element component properties", "Archicad", "Elements")] + public ComponentProperties( + string name, + List propertyGroups, + [SchemaParamInfo("(Optional) Speckle units.")] string units = "" + ) + { + this.name = name; + this.propertyGroups = propertyGroups; + this.units = units; + } + + public string name { get; set; } + public List? propertyGroups { get; set; } + public string units { get; set; } + + /// + /// Turns a List of ComponentProperties into a Base so that it can be used with the Speckle properties prop + /// + /// + /// + public static Base? ToBase(List? componentPropertiesList) + { + if (componentPropertiesList == null || componentPropertiesList.Count == 0) + { + return null; + } + + var @base = new Base(); + + foreach (ComponentProperties componentProperties in componentPropertiesList) + { + @base[RemoveDisallowedPropNameChars(componentProperties.name)] = PropertyGroup.ToBase( + componentProperties.propertyGroups + ); + } + + return @base; + } +} diff --git a/src/Objects/BuiltElements/Archicad/DirectShape.cs b/src/Objects/BuiltElements/Archicad/DirectShape.cs new file mode 100644 index 00000000..8b2f11ca --- /dev/null +++ b/src/Objects/BuiltElements/Archicad/DirectShape.cs @@ -0,0 +1,27 @@ +using System.Collections.Generic; +using Objects.Geometry; +using Speckle.Core.Models; + +namespace Objects.BuiltElements.Archicad; + +public class DirectShape : Base +{ + public DirectShape() { } + + public DirectShape(string applicationId, List displayValue) + { + this.applicationId = applicationId; + this.displayValue = displayValue; + } + + // Element base + public string elementType { get; set; } + public List classifications { get; set; } + + public ArchicadLevel level { get; set; } + + public string units { get; set; } + + [DetachProperty] + public List displayValue { get; set; } +} diff --git a/src/Objects/BuiltElements/Archicad/ElementShape.cs b/src/Objects/BuiltElements/Archicad/ElementShape.cs new file mode 100644 index 00000000..4da45cb3 --- /dev/null +++ b/src/Objects/BuiltElements/Archicad/ElementShape.cs @@ -0,0 +1,55 @@ +using System.Collections.Generic; +using Objects.Geometry; +using Objects.Primitive; +using Speckle.Core.Models; + +namespace Objects.BuiltElements.Archicad; + +public sealed class ElementShape : Base +{ + public ElementShape() { } + + public ElementShape(Polyline contourPolyline, List? holePolylines = null) + { + this.contourPolyline = contourPolyline; + this.holePolylines = holePolylines; + } + + public Polyline contourPolyline { get; set; } + + public List? holePolylines { get; set; } + + public sealed class PolylineSegment : Base, ICurve + { + public PolylineSegment() { } + + public PolylineSegment(Point startPoint, Point endPoint, double? arcAngle = null, bool? bodyFlag = null) + { + this.startPoint = startPoint; + this.endPoint = endPoint; + this.arcAngle = arcAngle ?? 0; + this.bodyFlag = bodyFlag; + } + + public Point startPoint { get; set; } + public Point endPoint { get; set; } + public double arcAngle { get; set; } + public bool? bodyFlag { get; set; } + public double length { get; set; } + public Interval domain { get; set; } = new(0, 1); + } + + public sealed class Polyline : Base, ICurve + { + public Polyline() { } + + public Polyline(List segments) + { + polylineSegments = segments; + } + + public List polylineSegments { get; set; } = new(); + public double length { get; set; } + public Interval domain { get; set; } = new(0, 1); + } +} diff --git a/src/Objects/BuiltElements/Archicad/Fenestration.cs b/src/Objects/BuiltElements/Archicad/Fenestration.cs new file mode 100644 index 00000000..ce262f92 --- /dev/null +++ b/src/Objects/BuiltElements/Archicad/Fenestration.cs @@ -0,0 +1,77 @@ +using System.Collections.Generic; +using Objects.Geometry; +using Speckle.Core.Models; + +namespace Objects.BuiltElements.Archicad; + +public class ArchicadFenestration : Base, IDisplayValue> +{ + public string parentApplicationId { get; set; } + + // Element base + public string? elementType { get; set; } /*APINullabe*/ + + public List? classifications { get; set; } /*APINullabe*/ + public Base? elementProperties { get; set; } + public Base? componentProperties { get; set; } + + public double? width { get; set; } /*APINullabe*/ + public double? height { get; set; } /*APINullabe*/ + public double? subFloorThickness { get; set; } /*APINullabe*/ + public bool? reflected { get; set; } /*APINullabe*/ + public bool? oSide { get; set; } /*APINullabe*/ + public bool? refSide { get; set; } /*APINullabe*/ + public string? verticalLinkTypeName { get; set; } + public short? verticalLinkStoryIndex { get; set; } + public bool? wallCutUsing { get; set; } + public short? pen { get; set; } /*APINullabe*/ + public string? lineTypeName { get; set; } /*APINullabe*/ + public string? buildingMaterial { get; set; } /*APINullabe*/ + public string? sectFill { get; set; } /*APINullabe*/ + public short? sectFillPen { get; set; } /*APINullabe*/ + public short? sectBackgroundPen { get; set; } /*APINullabe*/ + public short? sectContPen { get; set; } /*APINullabe*/ + public string? cutLineType { get; set; } /*APINullabe*/ + public string? aboveViewLineType { get; set; } /*APINullabe*/ + public short? aboveViewLinePen { get; set; } /*APINullabe*/ + public short? belowViewLinePen { get; set; } /*APINullabe*/ + public string? belowViewLineType { get; set; } /*APINullabe*/ + public bool? useObjectPens { get; set; } /*APINullabe*/ + public bool? useObjLinetypes { get; set; } /*APINullabe*/ + public bool? useObjMaterials { get; set; } /*APINullabe*/ + public bool? useObjSectAttrs { get; set; } /*APINullabe*/ + public string? libraryPart { get; set; } /*APINullabe*/ + public string? displayOptionName { get; set; } /*APINullabe*/ + + [DetachProperty] + public List displayValue { get; set; } +} + +public class ArchicadDoorWindowBase : ArchicadFenestration +{ + public double? revealDepthFromSide { get; set; } /*APINullabe*/ + public double? jambDepthHead { get; set; } /*APINullabe*/ + public double? jambDepth { get; set; } /*APINullabe*/ + public double? jambDepth2 { get; set; } /*APINullabe*/ + public double? objLoc { get; set; } /*APINullabe*/ + public double? lower { get; set; } /*APINullabe*/ + public string? directionType { get; set; } /*APINullabe*/ + + public Point? startPoint { get; set; } /*APINullabe*/ + public Point? dirVector { get; set; } /*APINullabe*/ +} + +public sealed class ArchicadDoor : ArchicadDoorWindowBase { } + +public sealed class ArchicadWindow : ArchicadDoorWindowBase { } + +public sealed class ArchicadSkylight : ArchicadFenestration +{ + public uint? vertexID { get; set; } /*APINullabe*/ + public string? skylightFixMode { get; set; } /*APINullabe*/ + public string? skylightAnchor { get; set; } /*APINullabe*/ + public Point? anchorPosition { get; set; } /*APINullabe*/ + public double? anchorLevel { get; set; } /*APINullabe*/ + public double? azimuthAngle { get; set; } /*APINullabe*/ + public double? elevationAngle { get; set; } /*APINullabe*/ +} diff --git a/src/Objects/BuiltElements/Archicad/Property.cs b/src/Objects/BuiltElements/Archicad/Property.cs new file mode 100644 index 00000000..d7682bce --- /dev/null +++ b/src/Objects/BuiltElements/Archicad/Property.cs @@ -0,0 +1,54 @@ +using System.Collections.Generic; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.BuiltElements.Archicad; + +public class Property : Base +{ + public Property() { } + + [SchemaInfo("Property", "An Archicad element property", "Archicad", "Elements")] + public Property(string name, object value, [SchemaParamInfo("(Optional) Speckle units.")] string units = "") + { + this.name = name; + this.value = value; + this.units = units; + } + + public string name { get; set; } + public object? value { get; set; } + public List? values { get; set; } + public string units { get; set; } + + /// + /// Turns a List of Property into a Base so that it can be used with the Speckle properties prop + /// + /// + /// + public static Base? ToBase(List? properties) + { + if (properties == null || properties.Count == 0) + { + return null; + } + + var @base = new Base(); + + foreach (Property property in properties) + { + var key = RemoveDisallowedPropNameChars(property.name); + if (string.IsNullOrEmpty(key) || @base[key] != null) + { + continue; + } + + @base[key] = property.value; + + // todo + //property.values; + } + + return @base; + } +} diff --git a/src/Objects/BuiltElements/Archicad/PropertyGroup.cs b/src/Objects/BuiltElements/Archicad/PropertyGroup.cs new file mode 100644 index 00000000..4eb949cf --- /dev/null +++ b/src/Objects/BuiltElements/Archicad/PropertyGroup.cs @@ -0,0 +1,48 @@ +using System.Collections.Generic; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.BuiltElements.Archicad; + +public class PropertyGroup : Base +{ + public PropertyGroup() { } + + [SchemaInfo("PropertyGroup", "An Archicad element property group", "Archicad", "Elements")] + public PropertyGroup( + string name, + List propertyList, + [SchemaParamInfo("(Optional) Speckle units.")] string units = "" + ) + { + this.name = name; + this.propertyList = propertyList; + this.units = units; + } + + public string name { get; set; } + public List? propertyList { get; set; } + public string units { get; set; } + + /// + /// Turns a List of PropertyGroup into a Base so that it can be used with the Speckle properties prop + /// + /// + /// + public static Base? ToBase(List? propertyGroups) + { + if (propertyGroups == null || propertyGroups.Count == 0) + { + return null; + } + + var @base = new Base(); + + foreach (PropertyGroup propertyGroup in propertyGroups) + { + @base[RemoveDisallowedPropNameChars(propertyGroup.name)] = Property.ToBase(propertyGroup.propertyList); + } + + return @base; + } +} diff --git a/src/Objects/BuiltElements/Area.cs b/src/Objects/BuiltElements/Area.cs new file mode 100644 index 00000000..9305f873 --- /dev/null +++ b/src/Objects/BuiltElements/Area.cs @@ -0,0 +1,38 @@ +using System.Collections.Generic; +using Objects.Geometry; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.BuiltElements; + +public class Area : Base, IHasArea, IHasVolume, IDisplayValue> +{ + public Area() { } + + /// + /// SchemaBuilder constructor for an Area + /// + [SchemaInfo("Area", "Creates a Speckle area", "BIM", "Other")] + public Area(string name, string number, Level level, [SchemaMainParam] Point center) + { + this.name = name; + this.number = number; + this.level = level; + this.center = center; + } + + public string name { get; set; } + public string number { get; set; } + public Level level { get; set; } + public Point center { get; set; } + public List voids { get; set; } = new(); + public ICurve outline { get; set; } + + public string units { get; set; } + + [DetachProperty] + public List displayValue { get; set; } + + public double area { get; set; } + public double volume { get; set; } +} diff --git a/src/Objects/BuiltElements/Beam.cs b/src/Objects/BuiltElements/Beam.cs new file mode 100644 index 00000000..7d917c6c --- /dev/null +++ b/src/Objects/BuiltElements/Beam.cs @@ -0,0 +1,26 @@ +using System.Collections.Generic; +using Objects.Geometry; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.BuiltElements; + +public class Beam : Base, IDisplayValue> +{ + public Beam() { } + + [SchemaInfo("Beam", "Creates a Speckle beam", "BIM", "Structure")] + public Beam([SchemaMainParam] ICurve baseLine) + { + this.baseLine = baseLine; + } + + public ICurve baseLine { get; set; } + + public virtual Level? level { get; internal set; } + + public string units { get; set; } + + [DetachProperty] + public List displayValue { get; set; } +} diff --git a/src/Objects/BuiltElements/Brace.cs b/src/Objects/BuiltElements/Brace.cs new file mode 100644 index 00000000..c51cd568 --- /dev/null +++ b/src/Objects/BuiltElements/Brace.cs @@ -0,0 +1,24 @@ +using System.Collections.Generic; +using Objects.Geometry; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.BuiltElements; + +public class Brace : Base, IDisplayValue> +{ + public Brace() { } + + [SchemaInfo("Brace", "Creates a Speckle brace", "BIM", "Structure")] + public Brace([SchemaMainParam] ICurve baseLine) + { + this.baseLine = baseLine; + } + + public ICurve baseLine { get; set; } + + public string units { get; set; } + + [DetachProperty] + public List displayValue { get; set; } +} diff --git a/src/Objects/BuiltElements/CableTray.cs b/src/Objects/BuiltElements/CableTray.cs new file mode 100644 index 00000000..f567fc18 --- /dev/null +++ b/src/Objects/BuiltElements/CableTray.cs @@ -0,0 +1,18 @@ +using System.Collections.Generic; +using Objects.Geometry; +using Speckle.Core.Models; + +namespace Objects.BuiltElements; + +public class CableTray : Base, IDisplayValue> +{ + public ICurve baseCurve { get; set; } + public double width { get; set; } + public double height { get; set; } + public double length { get; set; } + + public string units { get; set; } + + [DetachProperty] + public List displayValue { get; set; } +} diff --git a/src/Objects/BuiltElements/Ceiling.cs b/src/Objects/BuiltElements/Ceiling.cs new file mode 100644 index 00000000..b88c9264 --- /dev/null +++ b/src/Objects/BuiltElements/Ceiling.cs @@ -0,0 +1,34 @@ +using System.Collections.Generic; +using Objects.Geometry; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.BuiltElements; + +public class Ceiling : Base, IDisplayValue> +{ + public Ceiling() { } + + [SchemaInfo("Ceiling", "Creates a Speckle ceiling", "BIM", "Architecture")] + public Ceiling( + [SchemaMainParam] ICurve outline, + List? voids = null, + [SchemaParamInfo("Any nested elements that this ceiling might have")] List? elements = null + ) + { + this.outline = outline; + this.voids = voids ?? new(); + this.elements = elements; + } + + public ICurve outline { get; set; } + public List voids { get; set; } = new(); + + [DetachProperty] + public List? elements { get; set; } + + public string units { get; set; } + + [DetachProperty] + public List displayValue { get; set; } +} diff --git a/src/Objects/BuiltElements/Civil/CivilAlignment.cs b/src/Objects/BuiltElements/Civil/CivilAlignment.cs new file mode 100644 index 00000000..c0be779e --- /dev/null +++ b/src/Objects/BuiltElements/Civil/CivilAlignment.cs @@ -0,0 +1,17 @@ +namespace Objects.BuiltElements.Civil; + +public class CivilAlignment : Alignment +{ + public string type { get; set; } + + public string site { get; set; } + + public string style { get; set; } + + public double offset { get; set; } + + /// + /// Name of parent alignment if this is an offset alignment + /// + public string parent { get; set; } +} diff --git a/src/Objects/BuiltElements/Civil/CivilProfile.cs b/src/Objects/BuiltElements/Civil/CivilProfile.cs new file mode 100644 index 00000000..53ff30c2 --- /dev/null +++ b/src/Objects/BuiltElements/Civil/CivilProfile.cs @@ -0,0 +1,23 @@ +using System.Collections.Generic; +using Objects.Geometry; + +namespace Objects.BuiltElements.Civil; + +public class CivilProfile : Profile +{ + public string type { get; set; } + + public string style { get; set; } + + public double offset { get; set; } + + /// + /// Points of vertical intersection + /// + public List pvis { get; set; } + + /// + /// Name of parent profile if this is an offset profile + /// + public string parent { get; set; } +} diff --git a/src/Objects/BuiltElements/Column.cs b/src/Objects/BuiltElements/Column.cs new file mode 100644 index 00000000..e13e054e --- /dev/null +++ b/src/Objects/BuiltElements/Column.cs @@ -0,0 +1,26 @@ +using System.Collections.Generic; +using Objects.Geometry; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.BuiltElements; + +public class Column : Base, IDisplayValue> +{ + public Column() { } + + [SchemaInfo("Column", "Creates a Speckle column", "BIM", "Structure")] + public Column([SchemaMainParam] ICurve baseLine) + { + this.baseLine = baseLine; + } + + public ICurve baseLine { get; set; } + + public virtual Level? level { get; internal set; } + + public string units { get; set; } + + [DetachProperty] + public List displayValue { get; set; } +} diff --git a/src/Objects/BuiltElements/Conduit.cs b/src/Objects/BuiltElements/Conduit.cs new file mode 100644 index 00000000..aaa908e2 --- /dev/null +++ b/src/Objects/BuiltElements/Conduit.cs @@ -0,0 +1,17 @@ +using System.Collections.Generic; +using Objects.Geometry; +using Speckle.Core.Models; + +namespace Objects.BuiltElements; + +public class Conduit : Base, IDisplayValue> +{ + public ICurve baseCurve { get; set; } + public double diameter { get; set; } + public double length { get; set; } + + public string units { get; set; } + + [DetachProperty] + public List displayValue { get; set; } +} diff --git a/src/Objects/BuiltElements/Duct.cs b/src/Objects/BuiltElements/Duct.cs new file mode 100644 index 00000000..f530be6f --- /dev/null +++ b/src/Objects/BuiltElements/Duct.cs @@ -0,0 +1,66 @@ +using System; +using System.Collections.Generic; +using Objects.Geometry; +using Speckle.Core.Kits; +using Speckle.Core.Models; +using Speckle.Newtonsoft.Json; + +namespace Objects.BuiltElements; + +public class Duct : Base, IDisplayValue> +{ + public Duct() { } + + /// + /// SchemaBuilder constructor for a Speckle duct + /// + /// + /// + /// + /// + /// + /// Assign units when using this constructor due to , , and params + [SchemaInfo("Duct", "Creates a Speckle duct", "BIM", "MEP"), SchemaDeprecated] + public Duct([SchemaMainParam] Line baseLine, double width, double height, double diameter, double velocity = 0) + { + baseCurve = baseLine; + this.width = width; + this.height = height; + this.diameter = diameter; + this.velocity = velocity; + } + + /// + /// SchemaBuilder constructor for a Speckle duct + /// + /// + /// + /// + /// + /// + /// Assign units when using this constructor due to , , and params + [SchemaInfo("Duct", "Creates a Speckle duct", "BIM", "MEP")] + public Duct([SchemaMainParam] ICurve baseCurve, double width, double height, double diameter, double velocity = 0) + { + this.baseCurve = baseCurve; + this.width = width; + this.height = height; + this.diameter = diameter; + this.velocity = velocity; + } + + [JsonIgnore, Obsolete("Replaced with baseCurve property")] + public Line baseLine { get; set; } + + public ICurve baseCurve { get; set; } + public double width { get; set; } + public double height { get; set; } + public double diameter { get; set; } + public double length { get; set; } + public double velocity { get; set; } + + public string units { get; set; } + + [DetachProperty] + public List displayValue { get; set; } +} diff --git a/src/Objects/BuiltElements/Featureline.cs b/src/Objects/BuiltElements/Featureline.cs new file mode 100644 index 00000000..3033d75e --- /dev/null +++ b/src/Objects/BuiltElements/Featureline.cs @@ -0,0 +1,31 @@ +using System.Collections.Generic; +using Objects.Geometry; +using Speckle.Core.Models; + +namespace Objects.BuiltElements; + +public class Featureline : Base, IDisplayValue> +{ + /// + /// The base curve of the featureline + /// + public ICurve curve { get; set; } + + /// + /// The points constructing the Featureline + /// + /// + /// Can include both intersection and elevation points + /// + public List points { get; set; } + + public string name { get; set; } + + public string units { get; set; } + + /// + /// The 3D curves generated from the curve and points of the featureline + /// + [DetachProperty] + public List displayValue { get; set; } +} diff --git a/src/Objects/BuiltElements/Floor.cs b/src/Objects/BuiltElements/Floor.cs new file mode 100644 index 00000000..80ae0dde --- /dev/null +++ b/src/Objects/BuiltElements/Floor.cs @@ -0,0 +1,36 @@ +using System.Collections.Generic; +using Objects.Geometry; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.BuiltElements; + +public class Floor : Base, IDisplayValue> +{ + public Floor() { } + + [SchemaInfo("Floor", "Creates a Speckle floor", "BIM", "Architecture")] + public Floor( + [SchemaMainParam] ICurve outline, + List? voids = null, + [SchemaParamInfo("Any nested elements that this floor might have")] List? elements = null + ) + { + this.outline = outline; + + this.voids = voids ?? new(); + + this.elements = elements; + } + + public ICurve outline { get; set; } + public List voids { get; set; } = new(); + + [DetachProperty] + public List? elements { get; set; } + public virtual Level? level { get; internal set; } + public string units { get; set; } + + [DetachProperty] + public List displayValue { get; set; } +} diff --git a/src/Objects/BuiltElements/GridLine.cs b/src/Objects/BuiltElements/GridLine.cs new file mode 100644 index 00000000..c6fb7e01 --- /dev/null +++ b/src/Objects/BuiltElements/GridLine.cs @@ -0,0 +1,36 @@ +using System.Collections.Generic; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.BuiltElements; + +public class GridLine : Base, IDisplayValue> +{ + public GridLine() { } + + [SchemaInfo("GridLine", "Creates a Speckle grid line", "BIM", "Other"), SchemaDeprecated] + public GridLine( + [SchemaParamInfo("NOTE: only Line and Arc curves are supported in Revit"), SchemaMainParam] ICurve baseLine + ) + { + this.baseLine = baseLine; + } + + [SchemaInfo("GridLine", "Creates a Speckle grid line with a label", "BIM", "Other")] + public GridLine( + [SchemaParamInfo("NOTE: only Line and Arc curves are supported in Revit"), SchemaMainParam] ICurve baseLine, + string label = "" + ) + { + this.baseLine = baseLine; + this.label = label; + } + + public ICurve baseLine { get; set; } + public string label { get; set; } + + public string units { get; set; } + + [DetachProperty] + public List displayValue { get; set; } +} diff --git a/src/Objects/BuiltElements/Level.cs b/src/Objects/BuiltElements/Level.cs new file mode 100644 index 00000000..1eb13621 --- /dev/null +++ b/src/Objects/BuiltElements/Level.cs @@ -0,0 +1,29 @@ +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.BuiltElements; + +public class Level : Base +{ + //public List elements { get; set; } + + public Level() { } + + /// + /// SchemaBuilder constructor for a Speckle level + /// + /// + /// + /// Assign units when using this constructor due to param + [SchemaInfo("Level", "Creates a Speckle level", "BIM", "Architecture")] + public Level(string name, double elevation) + { + this.name = name; + this.elevation = elevation; + } + + public string name { get; set; } + public double elevation { get; set; } + + public string units { get; set; } +} diff --git a/src/Objects/BuiltElements/Network.cs b/src/Objects/BuiltElements/Network.cs new file mode 100644 index 00000000..d20d6412 --- /dev/null +++ b/src/Objects/BuiltElements/Network.cs @@ -0,0 +1,88 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using Speckle.Core.Models; +using Speckle.Newtonsoft.Json; + +namespace Objects.BuiltElements; + +/// +/// Represents graph connections between built elements objects +/// +/// +/// Network may need to be created first in native applications before they are linked. +/// +[Obsolete("Networks are no longer used in any connector to assemble MEP systems.")] +public class Network : Base +{ + public Network() { } + + public string name { get; set; } + + /// + /// The elements contained in the network + /// + public List elements { get; set; } + + /// + /// The connections between + /// + public List links { get; set; } +} + +[Obsolete("Networks are no longer used in any connector to assemble MEP systems.")] +public class NetworkElement : Base +{ + public NetworkElement() { } + + public string name { get; set; } + + /// + /// The Base object representing the element in the network (eg Pipe, Duct, etc) + /// + /// + /// Currently named "elements" to assist with receiving in connector flatten method. + /// + [DetachProperty] + public Base elements { get; set; } + + /// + /// The index of the links in that are connected to this element + /// + public List linkIndices { get; set; } + + [JsonIgnore] + public Network network { get; set; } + + /// + /// Retrieves the links for this element + /// + [JsonIgnore] +#pragma warning disable CS8619 // Nullability of reference types in value doesn't match target type. Reason: obsolete. + public List links => linkIndices.Select(i => network?.links[i]).ToList(); +#pragma warning restore CS8619 // Nullability of reference types in value doesn't match target type. Reason: obsolete. +} + +[Obsolete("Networks are no longer used in any connector to assemble MEP systems.")] +public class NetworkLink : Base +{ + public NetworkLink() { } + + public string name { get; set; } + + /// + /// The index of the elements in that are connected by this link + /// + public List elementIndices { get; set; } + + [JsonIgnore] + public Network network { get; set; } + + /// + /// Retrieves the elements for this link + /// + [JsonIgnore] +#pragma warning disable CS8619 // Nullability of reference types in value doesn't match target type. Reason: obsolete. + public List elements => elementIndices.Select(i => network?.elements[i]).ToList(); +#pragma warning restore CS8619 // Nullability of reference types in value doesn't match target type. Reason: obsolete. +} diff --git a/src/Objects/BuiltElements/Opening.cs b/src/Objects/BuiltElements/Opening.cs new file mode 100644 index 00000000..85985a33 --- /dev/null +++ b/src/Objects/BuiltElements/Opening.cs @@ -0,0 +1,19 @@ +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.BuiltElements; + +public class Opening : Base +{ + public Opening() { } + + [SchemaInfo("Arch Opening", "Creates a Speckle opening", "BIM", "Architecture")] + public Opening(ICurve outline) + { + this.outline = outline; + } + + public ICurve outline { get; set; } + + public string units { get; set; } +} diff --git a/src/Objects/BuiltElements/Pipe.cs b/src/Objects/BuiltElements/Pipe.cs new file mode 100644 index 00000000..7c6bf1d8 --- /dev/null +++ b/src/Objects/BuiltElements/Pipe.cs @@ -0,0 +1,36 @@ +using System.Collections.Generic; +using Objects.Geometry; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.BuiltElements; + +public class Pipe : Base, IDisplayValue> +{ + public Pipe() { } + + [SchemaInfo("Pipe", "Creates a Speckle pipe", "BIM", "MEP")] + public Pipe( + [SchemaMainParam] ICurve baseCurve, + double length, + double diameter, + double flowrate = 0, + double relativeRoughness = 0 + ) + { + this.baseCurve = baseCurve; + this.length = length; + this.diameter = diameter; + this["flowRate"] = flowrate; + this["relativeRoughness"] = relativeRoughness; + } + + public ICurve baseCurve { get; set; } + public double length { get; set; } + public double diameter { get; set; } + + public string units { get; set; } + + [DetachProperty] + public List displayValue { get; set; } +} diff --git a/src/Objects/BuiltElements/Profile.cs b/src/Objects/BuiltElements/Profile.cs new file mode 100644 index 00000000..3f4be956 --- /dev/null +++ b/src/Objects/BuiltElements/Profile.cs @@ -0,0 +1,21 @@ +using System.Collections.Generic; +using Objects.Geometry; +using Speckle.Core.Models; + +namespace Objects.BuiltElements; + +public class Profile : Base, IDisplayValue +{ + public List curves { get; set; } + + public string name { get; set; } + + public double startStation { get; set; } + + public double endStation { get; set; } + + public string units { get; set; } + + [DetachProperty] + public Polyline displayValue { get; set; } +} diff --git a/src/Objects/BuiltElements/Rebar.cs b/src/Objects/BuiltElements/Rebar.cs new file mode 100644 index 00000000..40348472 --- /dev/null +++ b/src/Objects/BuiltElements/Rebar.cs @@ -0,0 +1,158 @@ +using System; +using System.Collections.Generic; +using Objects.Geometry; +using Speckle.Core.Models; + +namespace Objects.BuiltElements; + +/// +/// A reinforcement bar group comprised of reinforcing bars of the same type and shape. +/// +/// +/// This class is not suitable for freeform rebar, which can have multiple shapes. +/// +public class RebarGroup : Base, IHasVolume, IDisplayValue> + where T : RebarShape +{ + public RebarGroup() { } + + /// + /// The shape of the rebar group + /// + [DetachProperty] + public RebarShape shape { get; set; } + + /// + /// The number of rebars in the rebar group + /// + /// + /// Excluded end bars are not included in the count + /// + public int number { get; set; } + + /// + /// Indicates if rebar set includes the first bar + /// + /// + /// Only applicable to stirrup (transverse) rebar + /// + public bool hasFirstBar { get; set; } + + /// + /// Indicates if rebar set includes the last bar + /// + /// + /// Only applicable to stirrup (transverse) rebar + /// + public bool hasLastBar { get; set; } + + /// + /// The start hook of bars in the rebar group + /// + /// + /// Null indicates no start hook + /// + [DetachProperty] + public virtual RebarHook? startHook { get; set; } + + /// + /// The end hook of bars in the rebar group + /// + /// + /// Null indicates no end hook + /// + [DetachProperty] + public virtual RebarHook? endHook { get; set; } + + /// + /// The display representation of the rebar group as centerline curves + /// + [DetachProperty] + public List displayValue { get; set; } + + /// + /// The total volume of the rebar group. + /// + public double volume { get; set; } + + public string units { get; set; } +} + +/// +/// The shape describing the geometry and geometry parameters of a reinforcing bar +/// +public class RebarShape : Base +{ + public RebarShape() { } + + /// + /// The name of the rebar shape + /// + public string name { get; set; } + + /// + /// The type of the rebar shape + /// + public RebarType rebarType { get; set; } + + /// + /// The curves of the rebar shape + /// + /// + /// Typically suppresses hooks and bend radius + /// + public List curves { get; set; } = new(); + + /// + /// The diameter of the rebar bar + /// + public double barDiameter { get; set; } + + public string units { get; set; } +} + +public class RebarHook : Base +{ + public RebarHook() { } + + /// + /// The angle of the hook in radians. + /// + public double angle { get; set; } + + /// + /// The length of the hook. + /// + public double length { get; set; } + + /// + /// The radius of the bend of the hook. + /// + public double radius { get; set; } + + public string units { get; set; } +} + +public enum RebarType +{ + Unknown = 0, + Standard = 10, + StirrupPolygonal = 20, + StirrupSpiral = 30, + StirrupTapered = 40 +} + +#region Obsolete +[Obsolete("Deprecated in 2.17: Use the RebarGroup class instead")] +public class Rebar : Base, IHasVolume, IDisplayValue> +{ + public List curves { get; set; } = new(); + + public string units { get; set; } + + [DetachProperty] + public List displayValue { get; set; } + + public double volume { get; set; } +} +#endregion diff --git a/src/Objects/BuiltElements/Revit/AdaptiveComponent.cs b/src/Objects/BuiltElements/Revit/AdaptiveComponent.cs new file mode 100644 index 00000000..e06fb4ad --- /dev/null +++ b/src/Objects/BuiltElements/Revit/AdaptiveComponent.cs @@ -0,0 +1,40 @@ +using System.Collections.Generic; +using Objects.Geometry; +using Objects.Utils; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.BuiltElements.Revit; + +public class AdaptiveComponent : Base, IDisplayValue> +{ + public AdaptiveComponent() { } + + [SchemaInfo("AdaptiveComponent", "Creates a Revit adaptive component by points", "Revit", "Families")] + public AdaptiveComponent( + string type, + string family, + List basePoints, + bool flipped = false, + List? parameters = null + ) + { + this.type = type; + this.family = family; + this.basePoints = basePoints; + this.flipped = flipped; + this.parameters = parameters?.ToBase(); + } + + public string type { get; set; } + public string family { get; set; } + public List basePoints { get; set; } + public bool flipped { get; set; } + public string elementId { get; set; } + public Base? parameters { get; set; } + + public string units { get; set; } + + [DetachProperty] + public List displayValue { get; set; } +} diff --git a/src/Objects/BuiltElements/Revit/BuildingPad.cs b/src/Objects/BuiltElements/Revit/BuildingPad.cs new file mode 100644 index 00000000..364a92b4 --- /dev/null +++ b/src/Objects/BuiltElements/Revit/BuildingPad.cs @@ -0,0 +1,25 @@ +using System.Collections.Generic; +using Objects.Geometry; +using Speckle.Core.Models; + +namespace Objects.BuiltElements.Revit; + +public class BuildingPad : Base, IDisplayValue> +{ + public ICurve outline { get; set; } + + public List voids { get; set; } = new(); + + public string type { get; set; } + + public Level level { get; set; } + + public Base parameters { get; set; } + + public string elementId { get; set; } + + public string units { get; set; } + + [DetachProperty] + public List displayValue { get; set; } +} diff --git a/src/Objects/BuiltElements/Revit/Curve/ModelCurves.cs b/src/Objects/BuiltElements/Revit/Curve/ModelCurves.cs new file mode 100644 index 00000000..b0f45848 --- /dev/null +++ b/src/Objects/BuiltElements/Revit/Curve/ModelCurves.cs @@ -0,0 +1,81 @@ +using System.Collections.Generic; +using Objects.Utils; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.BuiltElements.Revit.Curve; + +public class ModelCurve : Base +{ + public ModelCurve() { } + + [SchemaInfo("ModelCurve", "Creates a Revit model curve", "Revit", "Curves")] + public ModelCurve([SchemaMainParam] ICurve baseCurve, string lineStyle, List? parameters = null) + { + this.baseCurve = baseCurve; + this.lineStyle = lineStyle; + this.parameters = parameters?.ToBase(); + } + + public ICurve baseCurve { get; set; } + public string lineStyle { get; set; } + public Base? parameters { get; set; } + public string elementId { get; set; } + + public string units { get; set; } +} + +public class DetailCurve : Base +{ + public DetailCurve() { } + + [SchemaInfo("DetailCurve", "Creates a Revit detail curve", "Revit", "Curves")] + public DetailCurve([SchemaMainParam] ICurve baseCurve, string lineStyle, List? parameters = null) + { + this.baseCurve = baseCurve; + this.lineStyle = lineStyle; + this.parameters = parameters?.ToBase(); + } + + public ICurve baseCurve { get; set; } + public string lineStyle { get; set; } + public Base? parameters { get; set; } + public string elementId { get; set; } + + public string units { get; set; } +} + +public class RoomBoundaryLine : Base +{ + public RoomBoundaryLine() { } + + [SchemaInfo("RoomBoundaryLine", "Creates a Revit room boundary line", "Revit", "Curves")] + public RoomBoundaryLine([SchemaMainParam] ICurve baseCurve, List? parameters = null) + { + this.baseCurve = baseCurve; + this.parameters = parameters?.ToBase(); + } + + public ICurve baseCurve { get; set; } + public Base? parameters { get; set; } + public string elementId { get; set; } + public Level level { get; set; } + public string units { get; set; } +} + +public class SpaceSeparationLine : Base +{ + public SpaceSeparationLine() { } + + [SchemaInfo("SpaceSeparationLine", "Creates a Revit space separation line", "Revit", "Curves")] + public SpaceSeparationLine([SchemaMainParam] ICurve baseCurve, List? parameters = null) + { + this.baseCurve = baseCurve; + this.parameters = parameters?.ToBase(); + } + + public ICurve baseCurve { get; set; } + public Base? parameters { get; set; } + public string elementId { get; set; } + public string units { get; set; } +} diff --git a/src/Objects/BuiltElements/Revit/DirectShape.cs b/src/Objects/BuiltElements/Revit/DirectShape.cs new file mode 100644 index 00000000..aa103100 --- /dev/null +++ b/src/Objects/BuiltElements/Revit/DirectShape.cs @@ -0,0 +1,61 @@ +using System.Collections.Generic; +using Objects.Geometry; +using Objects.Utils; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.BuiltElements.Revit; + +public class DirectShape : Base, IDisplayValue> +{ + public DirectShape() { } + + /// + /// Constructs a new instance given a list of objects. + /// + /// The name of the + /// The of this instance. + /// A list of base classes to represent the direct shape (only mesh and brep are allowed, anything else will be ignored.) + /// Optional Parameters for this instance. + [SchemaInfo( + "DirectShape by base geometries", + "Creates a Revit DirectShape using a list of base geometry objects.", + "Revit", + "Families" + )] + public DirectShape(string name, RevitCategory category, List baseGeometries, List? parameters = null) + { + this.name = name; + this.category = category; + this.baseGeometries = baseGeometries.FindAll(IsValidObject); + this.parameters = parameters?.ToBase(); + } + + // moving away from using the RevitCategory Enum + public DirectShape(string name, string builtInCategory, List baseGeometries, List? parameters = null) + { + this.name = name; + this.baseGeometries = baseGeometries.FindAll(IsValidObject); + this.parameters = parameters?.ToBase(); + //TODO: move to typed property alongside all other revit elements + this["builtInCategory"] = builtInCategory; + } + + public string name { get; set; } + public RevitCategory category { get; set; } + public Base? parameters { get; set; } + public string elementId { get; set; } + + [DetachProperty] + public List baseGeometries { get; set; } = new(); + + public string units { get; set; } + + [DetachProperty] + public List displayValue { get; set; } + + public bool IsValidObject(Base @base) + { + return @base is Point || @base is ICurve || @base is Mesh || @base is Brep; + } +} diff --git a/src/Objects/BuiltElements/Revit/Enums.cs b/src/Objects/BuiltElements/Revit/Enums.cs new file mode 100644 index 00000000..38fa8c06 --- /dev/null +++ b/src/Objects/BuiltElements/Revit/Enums.cs @@ -0,0 +1,180 @@ +namespace Objects.BuiltElements.Revit; + +//This is an enum so that we can easily create a dropdown in GH for schema builder +//NOTE: if edited the list in Objects.Converter.Revit.Categories should be updated too +public enum RevitCategory +{ + AbutmentFoundations = 0, + AbutmentPiles = 1, + AbutmentWalls = 2, + DuctTerminal = 4, + BridgeAbutments = 3, + Alignments = 5, + StructConnectionAnchors = 6, + ApproachSlabs = 7, + BridgeArches = 8, + AudioVisualDevices = 9, + StairsRailingBaluster = 10, + BridgeBearings = 11, + StructConnectionBolts = 12, + BridgeCables = 13, + BridgeDecks = 14, + BridgeFraming = 15, + CableTrayFitting = 16, + CableTrayRun = 17, + CableTray = 18, + Casework = 19, + Ceilings = 20, + Columns = 21, + CommunicationDevices = 22, + ConduitFitting = 23, + Conduit = 24, + Coordination_Model = 25, + BridgeFramingCrossBracing = 26, + CurtainWallPanels = 27, + CurtaSystem = 28, + CurtainWallMullions = 29, + DataDevices = 30, + BridgeFramingDiaphragms = 31, + Doors = 32, + DuctAccessory = 33, + DuctFitting = 34, + PlaceHolderDucts = 35, + DuctSystem = 36, + DuctCurves = 37, + ElectricalEquipment = 38, + ElectricalFixtures = 39, + Entourage = 40, + ExpansionJoints = 41, + FireAlarmDevices = 42, + FireProtection = 43, + Floors = 44, + FoodServiceEquipment = 45, + Furniture = 46, + FurnitureSystems = 47, + GenericAnnotation = 48, + GenericModel = 49, + BridgeGirders = 50, + Hardscape = 51, + LightingDevices = 52, + LightingFixtures = 53, + Lines = 54, + Mass = 55, + MechanicalEquipment = 56, + MedicalEquipment = 57, + NurseCallDevices = 58, + Parking = 59, + Parts = 60, + PierCaps = 61, + PierColumns = 62, + BridgeFoundations = 63, + PierPiles = 64, + BridgeTowers = 65, + PierWalls = 66, + BridgePiers = 67, + PipeAccessory = 68, + PipeFitting = 69, + PlaceHolderPipes = 70, + PipeSegments = 71, + PipeCurves = 72, + PipingSystem = 73, + Planting = 74, + StructConnectionPlates = 75, + PlumbingFixtures = 76, + StructConnectionProfiles = 77, + StairsRailing = 78, + Ramps = 79, + Roads = 80, + Roofs = 81, + SecurityDevices = 82, + StructConnectionShearStuds = 83, + Signage = 84, + Site = 85, + SpecialityEquipment = 86, + Sprinklers = 87, + Stairs = 88, + StructuralFramingSystem = 89, + StructuralColumns = 90, + StructConnections = 91, + FabricAreas = 92, + StructuralFoundation = 93, + StructuralFraming = 94, + Rebar = 95, + Coupler = 96, + StructuralStiffener = 97, + StructuralTendons = 98, + StructuralTruss = 99, + TemporaryStructure = 100, + Topography = 101, + BridgeFramingTrusses = 102, + VerticalCirculation = 103, + VibrationDampers = 104, + VibrationIsolators = 105, + VibrationManagement = 106, + Walls = 107, + StructConnectionWelds = 108, + Windows = 109, + Railings = 110 +} + +/// +/// FamilyDocuments can only be assigned these categories +/// This is a subset of the list above which was manually retrieved from Revit's UI +/// +public enum RevitFamilyCategory +{ + AudioVisualDevices = 9, + CableTrayFitting = 16, + Casework = 19, + Columns = 21, + CommunicationDevices = 22, + ConduitFitting = 23, + DataDevices = 30, + Doors = 32, + DuctAccessory = 33, + DuctFitting = 34, + ElectricalEquipment = 38, + ElectricalFixtures = 39, + Entourage = 40, + FireAlarmDevices = 42, + FireProtection = 43, + FoodServiceEquipment = 45, + Furniture = 46, + FurnitureSystems = 47, + GenericModel = 49, + Hardscape = 51, + LightingDevices = 52, + LightingFixtures = 53, + Mass = 55, + MechanicalEquipment = 56, + MedicalEquipment = 57, + NurseCallDevices = 58, + Parking = 59, + PipeAccessory = 68, + PipeFitting = 69, + Planting = 74, + PlumbingFixtures = 76, + Roads = 80, + SecurityDevices = 82, + Signage = 84, + Site = 85, + SpecialityEquipment = 86, + Sprinklers = 87, + StructuralFramingSystem = 89, + StructuralColumns = 90, + StructConnections = 91, + StructuralFoundation = 93, + StructuralFraming = 94, + StructuralStiffener = 97, + TemporaryStructure = 100, + VerticalCirculation = 103, + Windows = 109, + Railings = 110 +} + +public enum LocationLine +{ + Centerline, + Exterior, + Interior +} diff --git a/src/Objects/BuiltElements/Revit/FamilyInstance.cs b/src/Objects/BuiltElements/Revit/FamilyInstance.cs new file mode 100644 index 00000000..62a2242b --- /dev/null +++ b/src/Objects/BuiltElements/Revit/FamilyInstance.cs @@ -0,0 +1,55 @@ +using System.Collections.Generic; +using Objects.Geometry; +using Objects.Utils; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.BuiltElements.Revit; + +public class FamilyInstance : Base, IDisplayValue> +{ + public FamilyInstance() { } + + [SchemaInfo("FamilyInstance", "Creates a Revit family instance", "Revit", "Families")] + public FamilyInstance( + Point basePoint, + string family, + string type, + Level level, + double rotation = 0, + bool facingFlipped = false, + bool handFlipped = false, + List? parameters = null + ) + { + this.basePoint = basePoint; + this.family = family; + this.type = type; + this.level = level; + this.rotation = rotation; + this.facingFlipped = facingFlipped; + this.handFlipped = handFlipped; + mirrored = false; + this.parameters = parameters?.ToBase(); + } + + public Point basePoint { get; set; } + public string family { get; set; } + public string type { get; set; } + public string category { get; set; } + public Level level { get; set; } + public double rotation { get; set; } + public bool facingFlipped { get; set; } + public bool handFlipped { get; set; } + public bool mirrored { get; set; } + public Base? parameters { get; set; } + public string elementId { get; set; } + + [DetachProperty] + public List elements { get; set; } + + public string units { get; set; } + + [DetachProperty] + public List displayValue { get; set; } +} diff --git a/src/Objects/BuiltElements/Revit/FreeformElement.cs b/src/Objects/BuiltElements/Revit/FreeformElement.cs new file mode 100644 index 00000000..a94eb7f9 --- /dev/null +++ b/src/Objects/BuiltElements/Revit/FreeformElement.cs @@ -0,0 +1,142 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using Objects.Geometry; +using Objects.Utils; +using Speckle.Core.Kits; +using Speckle.Core.Models; +using Speckle.Newtonsoft.Json; + +namespace Objects.BuiltElements.Revit; + +public class FreeformElement : Base, IDisplayValue> +{ + public FreeformElement() { } + + [SchemaInfo( + "Freeform element", + "Creates a Revit Freeform element using a list of Brep or Meshes. Category defaults to Generic Models", + "Revit", + "Families" + )] + public FreeformElement(List baseGeometries, string subcategory = "", List? parameters = null) + { + this.baseGeometries = baseGeometries; + //this.category = category; + this.subcategory = subcategory; + if (!IsValid()) + { + throw new Exception("Freeform elements can only be created from BREPs or Meshes"); + } + + this.parameters = parameters?.ToBase(); + } + + public Base? parameters { get; set; } + + public string subcategory { get; set; } + + public string elementId { get; set; } + + /// + /// DEPRECATED. Sets the geometry contained in the FreeformElement. This field has been deprecated in favor of `baseGeometries` + /// to align with Revit's API. It remains as a setter-only property for backwards compatibility. + /// It will set the first item on the baseGeometries list, and instantiate a list if necessary. + /// + [JsonIgnore, SchemaIgnore, Obsolete("Use 'baseGeometries' instead", true)] + [System.Diagnostics.CodeAnalysis.SuppressMessage( + "Design", + "CA1044:Properties should not be write only", + Justification = "Obsolete" + )] + public Base baseGeometry + { + set + { + if (baseGeometries == null) + { + baseGeometries = new List { value }; + } + else if (baseGeometries.Count == 0) + { + baseGeometries.Add(value); + } + else + { + baseGeometries[0] = value; + } + } + } + + [DetachProperty, Chunkable] + public List baseGeometries { get; set; } + + public string units { get; set; } + + [DetachProperty] + public List displayValue { get; set; } + + public bool IsValid() + { + return baseGeometries.All(IsValidObject); + } + + public bool IsValidObject(Base @base) + { + return @base is Mesh || @base is Brep || @base is Geometry.Curve; + } + + #region Deprecated Constructors + + [ + SchemaDeprecated, + SchemaInfo( + "Freeform element", + "Creates a Revit Freeform element using a list of Brep or Meshes.", + "Revit", + "Families" + ) + ] + [System.Diagnostics.CodeAnalysis.SuppressMessage( + "Usage", + "CA2201:Do not raise reserved exception types", + Justification = "Obsolete" + )] + public FreeformElement(Base baseGeometry, List? parameters = null) + { + if (!IsValidObject(baseGeometry)) + { + throw new Exception("Freeform elements can only be created from BREPs or Meshes"); + } + + baseGeometries = new List { baseGeometry }; + this.parameters = parameters?.ToBase(); + } + + [ + SchemaDeprecated, + SchemaInfo( + "Freeform element", + "Creates a Revit Freeform element using a list of Brep or Meshes.", + "Revit", + "Families" + ) + ] + [System.Diagnostics.CodeAnalysis.SuppressMessage( + "Usage", + "CA2201:Do not raise reserved exception types", + Justification = "Obsolete" + )] + public FreeformElement(List baseGeometries, List? parameters = null) + { + this.baseGeometries = baseGeometries; + if (!IsValid()) + { + throw new Exception("Freeform elements can only be created from BREPs or Meshes"); + } + + this.parameters = parameters?.ToBase(); + } + + #endregion +} diff --git a/src/Objects/BuiltElements/Revit/Interfaces/IHasMEPConnectors.cs b/src/Objects/BuiltElements/Revit/Interfaces/IHasMEPConnectors.cs new file mode 100644 index 00000000..664aba86 --- /dev/null +++ b/src/Objects/BuiltElements/Revit/Interfaces/IHasMEPConnectors.cs @@ -0,0 +1,8 @@ +using System.Collections.Generic; + +namespace Objects.BuiltElements.Revit.Interfaces; + +public interface IHasMEPConnectors +{ + List Connectors { get; set; } +} diff --git a/src/Objects/BuiltElements/Revit/MEPFamilyInstance.cs b/src/Objects/BuiltElements/Revit/MEPFamilyInstance.cs new file mode 100644 index 00000000..f90538ec --- /dev/null +++ b/src/Objects/BuiltElements/Revit/MEPFamilyInstance.cs @@ -0,0 +1,15 @@ +using System.Collections.Generic; +using Objects.BuiltElements.Revit.Interfaces; +using Objects.Other.Revit; +using Speckle.Core.Models; + +namespace Objects.BuiltElements.Revit; + +public class RevitMEPFamilyInstance : RevitInstance, IHasMEPConnectors +{ + public string RevitPartType { get; set; } + + [DetachProperty] + public List Connectors { get; set; } = new(); + public List Curves { get; set; } = new(); +} diff --git a/src/Objects/BuiltElements/Revit/Parameter.cs b/src/Objects/BuiltElements/Revit/Parameter.cs new file mode 100644 index 00000000..864ac646 --- /dev/null +++ b/src/Objects/BuiltElements/Revit/Parameter.cs @@ -0,0 +1,46 @@ +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.BuiltElements.Revit; + +public class Parameter : Base +{ + public Parameter() { } + + [SchemaInfo("Parameter", "A Revit instance parameter to set on an element", "Revit", "Families")] + public Parameter( + [SchemaParamInfo("The Revit display name, BuiltInParameter name or GUID (for shared parameters)")] string name, + object value, + [SchemaParamInfo( + "(Optional) Speckle units. If not set it's retrieved from the current document. For non lenght based parameters (eg. Air Flow) it should be set to 'none' so that the Revit display unit will be used instead." + )] + string units = "" + ) + { + this.name = name; + this.value = value; + this.units = units; + applicationInternalName = name; + } + + public string name { get; set; } + public object? value { get; set; } + public string applicationUnitType { get; set; } //eg UnitType UT_Length + public string applicationUnit { get; set; } //DisplayUnitType eg DUT_MILLIMITERS + public string applicationInternalName { get; set; } //BuiltInParameterName or GUID for shared parameter + + /// + /// If True it's a Shared Parameter, in which case the ApplicationId field will contain this parameter GUID, + /// otherwise it will store its BuiltInParameter name + /// + public bool isShared { get; set; } + + public bool isReadOnly { get; set; } + + /// + /// True = Type Parameter, False = Instance Parameter + /// + public bool isTypeParameter { get; set; } + + public string units { get; set; } +} diff --git a/src/Objects/BuiltElements/Revit/ParameterUpdater.cs b/src/Objects/BuiltElements/Revit/ParameterUpdater.cs new file mode 100644 index 00000000..7c50e1b1 --- /dev/null +++ b/src/Objects/BuiltElements/Revit/ParameterUpdater.cs @@ -0,0 +1,21 @@ +using System.Collections.Generic; +using Objects.Utils; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.BuiltElements.Revit; + +public class ParameterUpdater : Base +{ + [SchemaInfo("ParameterUpdater", "Updates parameters on a Revit element by id", "Revit", "Families")] + public ParameterUpdater([SchemaParamInfo("A Revit ElementId or UniqueId")] string id, List parameters) + { + elementId = id; + this.parameters = parameters.ToBase(); + } + + public ParameterUpdater() { } + + public string elementId { get; set; } + public Base? parameters { get; set; } +} diff --git a/src/Objects/BuiltElements/Revit/ProjectInfo.cs b/src/Objects/BuiltElements/Revit/ProjectInfo.cs new file mode 100644 index 00000000..3beef64e --- /dev/null +++ b/src/Objects/BuiltElements/Revit/ProjectInfo.cs @@ -0,0 +1,11 @@ +using Objects.Organization; + +namespace Objects.BuiltElements.Revit; + +public class ProjectInfo : BIMModelInfo +{ + public string author { get; set; } + public string issueDate { get; set; } + public string organizationDescription { get; set; } + public string organizationName { get; set; } +} diff --git a/src/Objects/BuiltElements/Revit/RevitBeam.cs b/src/Objects/BuiltElements/Revit/RevitBeam.cs new file mode 100644 index 00000000..b483b12a --- /dev/null +++ b/src/Objects/BuiltElements/Revit/RevitBeam.cs @@ -0,0 +1,38 @@ +using System.Collections.Generic; +using Objects.Utils; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.BuiltElements.Revit; + +public class RevitBeam : Beam +{ + public RevitBeam() { } + + [SchemaInfo("RevitBeam", "Creates a Revit beam by curve and base level.", "Revit", "Structure")] + public RevitBeam( + string family, + string type, + [SchemaMainParam] ICurve baseLine, + Level level, + List? parameters = null + ) + { + this.family = family; + this.type = type; + this.baseLine = baseLine; + this.parameters = parameters?.ToBase(); + this.level = level; + } + + public string family { get; set; } + public string type { get; set; } + public Base? parameters { get; set; } + public string elementId { get; set; } + + public new Level? level + { + get => base.level; + set => base.level = value; + } +} diff --git a/src/Objects/BuiltElements/Revit/RevitBrace.cs b/src/Objects/BuiltElements/Revit/RevitBrace.cs new file mode 100644 index 00000000..6254f5e9 --- /dev/null +++ b/src/Objects/BuiltElements/Revit/RevitBrace.cs @@ -0,0 +1,33 @@ +using System.Collections.Generic; +using Objects.Utils; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.BuiltElements.Revit; + +public class RevitBrace : Brace +{ + public RevitBrace() { } + + [SchemaInfo("RevitBrace", "Creates a Revit brace by curve and base level.", "Revit", "Structure")] + public RevitBrace( + string family, + string type, + [SchemaMainParam] ICurve baseLine, + Level? level, + List? parameters = null + ) + { + this.family = family; + this.type = type; + this.baseLine = baseLine; + this.parameters = parameters?.ToBase(); + this.level = level; + } + + public string family { get; set; } + public string type { get; set; } + public Base? parameters { get; set; } + public string elementId { get; set; } + public Level? level { get; set; } +} diff --git a/src/Objects/BuiltElements/Revit/RevitCableTray.cs b/src/Objects/BuiltElements/Revit/RevitCableTray.cs new file mode 100644 index 00000000..0b7b4852 --- /dev/null +++ b/src/Objects/BuiltElements/Revit/RevitCableTray.cs @@ -0,0 +1,15 @@ +using System.Collections.Generic; +using Objects.BuiltElements.Revit.Interfaces; +using Speckle.Core.Models; + +namespace Objects.BuiltElements.Revit; + +public class RevitCableTray : CableTray, IHasMEPConnectors +{ + public string family { get; set; } + public string type { get; set; } + public Level level { get; set; } + public Base parameters { get; set; } + public string elementId { get; set; } + public List Connectors { get; set; } = new(); +} diff --git a/src/Objects/BuiltElements/Revit/RevitCeiling.cs b/src/Objects/BuiltElements/Revit/RevitCeiling.cs new file mode 100644 index 00000000..874ca2ef --- /dev/null +++ b/src/Objects/BuiltElements/Revit/RevitCeiling.cs @@ -0,0 +1,74 @@ +using System; +using System.Collections.Generic; +using Objects.Geometry; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.BuiltElements.Revit; + +public class RevitCeiling : Ceiling +{ + public RevitCeiling() { } + + [SchemaDeprecated, SchemaInfo("RevitCeiling", "Creates a Revit ceiling", "Revit", "Architecture")] + [System.Diagnostics.CodeAnalysis.SuppressMessage( + "Style", + "IDE0060:Remove unused parameter", + Justification = "Obsolete" + )] + public RevitCeiling( + [SchemaMainParam, SchemaParamInfo("Planar boundary curve")] ICurve outline, + string family, + string type, + Level level, + double slope = 0, + [SchemaParamInfo("Planar line indicating slope direction")] Line? slopeDirection = null, + double offset = 0, + List? voids = null, + [SchemaParamInfo("Any nested elements that this ceiling might have")] List? elements = null + ) + { + this.outline = outline; + this.family = family; + this.type = type; + this.level = level; + this.slope = slope; + this.slopeDirection = slopeDirection; + this.voids = voids ?? new(); + this.elements = elements; + } + + [SchemaInfo("RevitCeiling", "Creates a Revit ceiling", "Revit", "Architecture")] + public RevitCeiling( + [SchemaMainParam, SchemaParamInfo("Planar boundary curve")] ICurve outline, + string family, + string type, + Level level, + double slope = 0, + [SchemaParamInfo("Planar line indicating slope direction")] Line? slopeDirection = null, + List? voids = null, + [SchemaParamInfo("Any nested elements that this ceiling might have")] List? elements = null + ) + { + this.outline = outline; + this.family = family; + this.type = type; + this.level = level; + this.slope = slope; + this.slopeDirection = slopeDirection; + this.voids = voids ?? new(); + this.elements = elements; + } + + public string family { get; set; } + public string type { get; set; } + public Level level { get; set; } + public double slope { get; set; } + public Line? slopeDirection { get; set; } + + [Obsolete("Offset property is now captured in parameters to match the behavior of other Revit objects", true)] + public double offset { get; set; } + + public Base parameters { get; set; } + public string elementId { get; set; } +} diff --git a/src/Objects/BuiltElements/Revit/RevitColumn.cs b/src/Objects/BuiltElements/Revit/RevitColumn.cs new file mode 100644 index 00000000..5a770db9 --- /dev/null +++ b/src/Objects/BuiltElements/Revit/RevitColumn.cs @@ -0,0 +1,114 @@ +using System.Collections.Generic; +using Objects.Utils; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.BuiltElements.Revit; + +public class RevitColumn : Column +{ + public RevitColumn() { } + + /// + /// SchemaBuilder constructor for a Revit column + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// Assign units when using this constructor due to and params + [SchemaInfo("RevitColumn Vertical", "Creates a vertical Revit Column by point and levels.", "Revit", "Architecture")] + public RevitColumn( + string family, + string type, + [SchemaParamInfo("Only the lower point of this line will be used as base point."), SchemaMainParam] ICurve baseLine, + Level level, + Level topLevel, + double baseOffset = 0, + double topOffset = 0, + bool structural = false, + [SchemaParamInfo("Rotation angle in radians")] double rotation = 0, + List? parameters = null + ) + { + this.family = family; + this.type = type; + this.baseLine = baseLine; + this.topLevel = topLevel; + this.baseOffset = baseOffset; + this.topOffset = topOffset; + this.rotation = rotation; + this.parameters = parameters?.ToBase(); + this.level = level; + } + + [ + SchemaDeprecated, + SchemaInfo("RevitColumn Slanted (old)", "Creates a slanted Revit Column by curve.", "Revit", "Structure") + ] + [System.Diagnostics.CodeAnalysis.SuppressMessage( + "Style", + "IDE0060:Remove unused parameter", + Justification = "Obsolete" + )] + public RevitColumn( + string family, + string type, + [SchemaMainParam] ICurve baseLine, + Level level, + bool structural = false, + List? parameters = null + ) + { + this.family = family; + this.type = type; + this.baseLine = baseLine; + this.level = level; + isSlanted = true; + this.parameters = parameters?.ToBase(); + } + + [SchemaInfo("RevitColumn Slanted", "Creates a slanted Revit Column by curve.", "Revit", "Structure")] + public RevitColumn( + string family, + string type, + [SchemaMainParam] ICurve baseLine, + Level level, + Level? topLevel = null, + bool structural = false, + List? parameters = null + ) + { + this.family = family; + this.type = type; + this.baseLine = baseLine; + this.level = level; + this.topLevel = topLevel; + isSlanted = true; + this.parameters = parameters?.ToBase(); + } + + public new Level? level + { + get => base.level; + set => base.level = value; + } + + public Level? topLevel { get; set; } + public double baseOffset { get; set; } + public double topOffset { get; set; } + public bool facingFlipped { get; set; } + public bool handFlipped { get; set; } + public double rotation { get; set; } + public bool isSlanted { get; set; } + public string family { get; set; } + public string type { get; set; } + public Base? parameters { get; set; } + public string elementId { get; set; } +} diff --git a/src/Objects/BuiltElements/Revit/RevitConduit.cs b/src/Objects/BuiltElements/Revit/RevitConduit.cs new file mode 100644 index 00000000..6b4acfab --- /dev/null +++ b/src/Objects/BuiltElements/Revit/RevitConduit.cs @@ -0,0 +1,19 @@ +using System.Collections.Generic; +using Objects.BuiltElements.Revit.Interfaces; +using Speckle.Core.Models; + +namespace Objects.BuiltElements.Revit; + +public class RevitConduit : Conduit, IHasMEPConnectors +{ + public string family { get; set; } + + public string type { get; set; } + + public Level level { get; set; } + + public Base parameters { get; set; } + + public string elementId { get; set; } + public List Connectors { get; set; } = new(); +} diff --git a/src/Objects/BuiltElements/Revit/RevitCurtainWallPanel.cs b/src/Objects/BuiltElements/Revit/RevitCurtainWallPanel.cs new file mode 100644 index 00000000..71fbefcd --- /dev/null +++ b/src/Objects/BuiltElements/Revit/RevitCurtainWallPanel.cs @@ -0,0 +1,3 @@ +namespace Objects.BuiltElements.Revit; + +public class RevitCurtainWallPanel : RevitElement { } diff --git a/src/Objects/BuiltElements/Revit/RevitDuct.cs b/src/Objects/BuiltElements/Revit/RevitDuct.cs new file mode 100644 index 00000000..8c75f4ba --- /dev/null +++ b/src/Objects/BuiltElements/Revit/RevitDuct.cs @@ -0,0 +1,163 @@ +using System.Collections.Generic; +using Objects.BuiltElements.Revit.Interfaces; +using Objects.Geometry; +using Objects.Utils; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.BuiltElements.Revit; + +public class RevitDuct : Duct, IHasMEPConnectors +{ + public RevitDuct() { } + + /// + /// SchemaBuilder constructor for a Revit duct (deprecated) + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// Assign units when using this constructor due to , , and params + [SchemaInfo("RevitDuct", "Creates a Revit duct", "Revit", "MEP"), SchemaDeprecated] + public RevitDuct( + string family, + string type, + [SchemaMainParam] Line baseLine, + string systemName, + string systemType, + Level level, + double width, + double height, + double diameter, + double velocity = 0, + List? parameters = null + ) + { + baseCurve = baseLine; + this.family = family; + this.type = type; + this.width = width; + this.height = height; + this.diameter = diameter; + this.velocity = velocity; + this.systemName = systemName; + this.systemType = systemType; + this.parameters = parameters?.ToBase(); + this.level = level; + } + + /// + /// SchemaBuilder constructor for a Revit duct + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// Assign units when using this constructor due to , , and params + [SchemaInfo("RevitDuct", "Creates a Revit duct", "Revit", "MEP")] + public RevitDuct( + string family, + string type, + [SchemaMainParam] ICurve baseCurve, + string systemName, + string systemType, + Level level, + double width, + double height, + double diameter, + double velocity = 0, + List? parameters = null + ) + { + this.baseCurve = baseCurve; + this.family = family; + this.type = type; + this.width = width; + this.height = height; + this.diameter = diameter; + this.velocity = velocity; + this.systemName = systemName; + this.systemType = systemType; + this.parameters = parameters?.ToBase(); + this.level = level; + } + + public string family { get; set; } + public string type { get; set; } + public string systemName { get; set; } + public string systemType { get; set; } + public Level level { get; set; } + public Base? parameters { get; set; } + public string elementId { get; set; } + public List Connectors { get; set; } = new(); +} + +public class RevitFlexDuct : RevitDuct +{ + public RevitFlexDuct() { } + + /// + /// SchemaBuilder constructor for a Revit flex duct + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// Assign units when using this constructor due to , , and params + [SchemaInfo("RevitFlexDuct", "Creates a Revit flex duct", "Revit", "MEP")] + public RevitFlexDuct( + string family, + string type, + [SchemaMainParam] ICurve baseCurve, + string systemName, + string systemType, + Level level, + double width, + double height, + double diameter, + Vector startTangent, + Vector endTangent, + double velocity = 0, + List? parameters = null + ) + { + this.baseCurve = baseCurve; + this.family = family; + this.type = type; + this.width = width; + this.height = height; + this.diameter = diameter; + this.startTangent = startTangent; + this.endTangent = endTangent; + this.velocity = velocity; + this.systemName = systemName; + this.systemType = systemType; + this.parameters = parameters?.ToBase(); + this.level = level; + } + + public Vector startTangent { get; set; } + public Vector endTangent { get; set; } +} diff --git a/src/Objects/BuiltElements/Revit/RevitElement.cs b/src/Objects/BuiltElements/Revit/RevitElement.cs new file mode 100644 index 00000000..502428fd --- /dev/null +++ b/src/Objects/BuiltElements/Revit/RevitElement.cs @@ -0,0 +1,20 @@ +using System.Collections.Generic; +using Objects.Geometry; +using Speckle.Core.Models; + +namespace Objects.BuiltElements.Revit; + +/// +/// A generic Revit element for which we don't have direct conversions +/// +public class RevitElement : Base, IDisplayValue> +{ + public string family { get; set; } + public string type { get; set; } + public string category { get; set; } + public Base parameters { get; set; } + public string elementId { get; set; } + + [DetachProperty] + public List displayValue { get; set; } +} diff --git a/src/Objects/BuiltElements/Revit/RevitElementType.cs b/src/Objects/BuiltElements/Revit/RevitElementType.cs new file mode 100644 index 00000000..2912b02e --- /dev/null +++ b/src/Objects/BuiltElements/Revit/RevitElementType.cs @@ -0,0 +1,40 @@ +using System.Collections.Generic; +using Objects.Geometry; +using Speckle.Core.Models; + +namespace Objects.BuiltElements.Revit; + +public class RevitElementType : Base +{ + public string family { get; set; } + + public string type { get; set; } + + public string category { get; set; } +} + +public class RevitMepElementType : RevitElementType +{ + public string shape { get; set; } +} + +/// +/// Represents the FamilySymbol subclass of ElementType in Revit +/// +public class RevitSymbolElementType : RevitElementType, IDisplayValue> +{ + /// + /// The type of placement for this family symbol + /// + /// See https://www.revitapidocs.com/2023/2abb8627-1da3-4069-05c9-19e4be5e02ad.htm + public string placementType { get; set; } + + /// + /// Subcomponents found in this family symbol + /// + [DetachProperty] + public List elements { get; set; } + + [DetachProperty] + public List displayValue { get; set; } +} diff --git a/src/Objects/BuiltElements/Revit/RevitFloor.cs b/src/Objects/BuiltElements/Revit/RevitFloor.cs new file mode 100644 index 00000000..35baa3d8 --- /dev/null +++ b/src/Objects/BuiltElements/Revit/RevitFloor.cs @@ -0,0 +1,53 @@ +using System.Collections.Generic; +using Objects.Geometry; +using Objects.Utils; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.BuiltElements.Revit; + +public class RevitFloor : Floor +{ + public RevitFloor() { } + + [SchemaInfo("RevitFloor", "Creates a Revit floor by outline and level", "Revit", "Architecture")] + public RevitFloor( + string family, + string type, + [SchemaMainParam] ICurve outline, + Level level, + bool structural = false, + double slope = 0, + Line? slopeDirection = null, + List? voids = null, + [SchemaParamInfo("Any nested elements that this floor might have")] List? elements = null, + List? parameters = null + ) + { + this.family = family; + this.type = type; + this.level = level; + this.structural = structural; + this.slope = slope; + this.slopeDirection = slopeDirection; + this.parameters = parameters?.ToBase(); + this.outline = outline; + this.voids = voids ?? new(); + this.elements = elements; + } + + public string family { get; set; } + public string type { get; set; } + + public new Level? level + { + get => base.level; + set => base.level = value; + } + + public bool structural { get; set; } + public double slope { get; set; } + public Line? slopeDirection { get; set; } + public Base? parameters { get; set; } + public string elementId { get; set; } +} diff --git a/src/Objects/BuiltElements/Revit/RevitLevel.cs b/src/Objects/BuiltElements/Revit/RevitLevel.cs new file mode 100644 index 00000000..a3c8c7ce --- /dev/null +++ b/src/Objects/BuiltElements/Revit/RevitLevel.cs @@ -0,0 +1,57 @@ +using System.Collections.Generic; +using Objects.Utils; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.BuiltElements.Revit; + +public class RevitLevel : Level +{ + public RevitLevel() { } + + /// + /// SchemaBuilder constructor for a Revit level + /// + /// + /// + /// + /// + /// Assign units when using this constructor due to param + [SchemaInfo( + "RevitLevel", + "Creates a new Revit level unless one with the same elevation already exists", + "Revit", + "Architecture" + )] + public RevitLevel( + [SchemaParamInfo("Level name. NOTE: updating level name is not supported")] string name, + [SchemaParamInfo( + "Level elevation. NOTE: updating level elevation is not supported, a new one will be created unless another level at the new elevation already exists." + )] + double elevation, + [SchemaParamInfo( + "If true, it creates an associated view in Revit. NOTE: only used when creating a level for the first time" + )] + bool createView, + List? parameters = null + ) + { + this.name = name; + this.elevation = elevation; + this.createView = createView; + this.parameters = parameters?.ToBase(); + referenceOnly = false; + } + + [SchemaInfo("RevitLevel by name", "Gets an existing Revit level by name", "Revit", "Architecture")] + public RevitLevel(string name) + { + this.name = name; + referenceOnly = true; + } + + public bool createView { get; set; } + public Base? parameters { get; set; } + public string elementId { get; set; } + public bool referenceOnly { get; set; } +} diff --git a/src/Objects/BuiltElements/Revit/RevitMEPConnector.cs b/src/Objects/BuiltElements/Revit/RevitMEPConnector.cs new file mode 100644 index 00000000..8032ef22 --- /dev/null +++ b/src/Objects/BuiltElements/Revit/RevitMEPConnector.cs @@ -0,0 +1,17 @@ +using System.Collections.Generic; +using Objects.Geometry; +using Speckle.Core.Models; + +namespace Objects.BuiltElements.Revit; + +public class RevitMEPConnector : Base +{ + public double angle { get; set; } + public List connectedConnectorIds { get; set; } = new(); + public double height { get; set; } + public Point origin { get; set; } + public double radius { get; set; } + public string shape { get; set; } + public string systemName { get; set; } + public double width { get; set; } +} diff --git a/src/Objects/BuiltElements/Revit/RevitNetwork.cs b/src/Objects/BuiltElements/Revit/RevitNetwork.cs new file mode 100644 index 00000000..16cfdadb --- /dev/null +++ b/src/Objects/BuiltElements/Revit/RevitNetwork.cs @@ -0,0 +1,76 @@ +using System; +using Objects.Geometry; + +namespace Objects.BuiltElements.Revit; + +[Obsolete( + "Networks are no longer used to assemble MEP systems in Revit. See the RevitCommitBuilder for MEP systems conversion." +)] +public class RevitNetworkElement : NetworkElement +{ + public RevitNetworkElement() { } + + /// + /// Indicates if this element was constructed from an MEP curve + /// + public bool isCurveBased { get; set; } + + /// + /// Indicates if this element needs temporary placeholder objects to be created first when receiving + /// + /// + /// For example, some fittings cannot be created based on connectors, and so will be created similarly to mechanical equipment + /// + public bool isConnectorBased { get; set; } +} + +[Obsolete( + "Networks are no longer used to assemble MEP systems in Revit. See the RevitCommitBuilder for MEP systems conversion." +)] +public class RevitNetworkLink : NetworkLink +{ + public double height { get; set; } + public double width { get; set; } + public double diameter { get; set; } + public Point origin { get; set; } + public Vector direction { get; set; } + + /// + /// The system category + /// + public string systemName { get; set; } + + public string systemType { get; set; } + + /// + /// The connector profile shape of the + /// + public string shape { get; set; } + + /// + /// The link domain + /// + public string domain { get; set; } + + /// + /// The index indicating the position of this link on the connected fitting element, if applicable + /// + /// + /// Revit fitting links are 1-indexed. For example, "T" fittings will have ordered links from index 1-3. + /// + public int fittingIndex { get; set; } + + /// + /// Indicates if this link needs temporary placeholder objects to be created first when receiving + /// + /// + /// Placeholder geometry are curves. + /// For example, U-bend links need temporary pipes to be created first, if one or more linked pipes have not yet been created in the network. + /// + public bool needsPlaceholders { get; set; } + + /// + /// Indicates if this link has been connected to its elements + /// + public bool isConnected { get; set; } +} diff --git a/src/Objects/BuiltElements/Revit/RevitOpening.cs b/src/Objects/BuiltElements/Revit/RevitOpening.cs new file mode 100644 index 00000000..d502ab4d --- /dev/null +++ b/src/Objects/BuiltElements/Revit/RevitOpening.cs @@ -0,0 +1,106 @@ +using System; +using System.Collections.Generic; +using Objects.Geometry; +using Objects.Utils; +using Speckle.Core.Kits; +using Speckle.Core.Logging; +using Speckle.Core.Models; + +namespace Objects.BuiltElements.Revit; + +public class RevitOpening : Opening +{ + public Base? parameters { get; set; } + public string elementId { get; set; } +} + +public class RevitVerticalOpening : RevitOpening { } + +public class RevitWallOpening : RevitOpening +{ + public RevitWallOpening() { } + + [ + Obsolete("Use constructor with Polyline input instead"), + SchemaDeprecated, + SchemaInfo("Revit Wall Opening (Deprecated)", "Creates a Speckle Wall opening for revit", "BIM", "Architecture") + ] + public RevitWallOpening(ICurve outline, RevitWall? host = null) + { + if (outline is not Polyline) + { + throw new SpeckleException("Outline should be a rectangular-shaped polyline", false); + } + + this.outline = outline; + this.host = host; + } + + [SchemaInfo("Revit Wall Opening", "Creates a Speckle Wall opening for revit", "Revit", "Architecture")] + public RevitWallOpening(Polyline outline, RevitWall? host = null) + { + if (outline == null) + { + throw new SpeckleException("Outline cannot be null"); + } + + if (outline.GetPoints().Count != 4) + { + throw new SpeckleException("Outline should be a rectangular-shaped polyline"); + } + + this.outline = outline; + this.host = host; + } + + public RevitWall? host { get; set; } +} + +public class RevitShaft : RevitOpening +{ + public RevitShaft() { } + + /// + /// SchemaBuilder constructor for a Revit shaft + /// + /// + /// + /// + /// + [SchemaInfo("RevitShaft", "Creates a Revit shaft from a bottom and top level", "Revit", "Architecture")] + public RevitShaft( + [SchemaMainParam] ICurve outline, + Level bottomLevel, + Level topLevel, + List? parameters = null + ) + { + this.outline = outline; + this.bottomLevel = bottomLevel; + this.topLevel = topLevel; + this.parameters = parameters?.ToBase(); + } + + public Level bottomLevel { get; set; } + public Level topLevel { get; set; } + public double height { get; set; } + + /* + /// + /// SchemaBuilder constructor for a Revit shaft + /// + /// + /// + /// + /// + /// Assign units when using this constructor due to param + [SchemaInfo("RevitShaft", "Creates a Revit shaft from a bottom level and height")] + public RevitShaft(ICurve outline, Level bottomLevel, double height, List parameters = null) + { + this.outline = outline; + this.bottomLevel = bottomLevel; + this.height = height; + this.parameters = parameters.ToBase(); + } + */ +} diff --git a/src/Objects/BuiltElements/Revit/RevitPipe.cs b/src/Objects/BuiltElements/Revit/RevitPipe.cs new file mode 100644 index 00000000..6084bd61 --- /dev/null +++ b/src/Objects/BuiltElements/Revit/RevitPipe.cs @@ -0,0 +1,78 @@ +using System.Collections.Generic; +using Objects.BuiltElements.Revit.Interfaces; +using Objects.Geometry; +using Objects.Utils; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.BuiltElements.Revit; + +public class RevitPipe : Pipe, IHasMEPConnectors +{ + public RevitPipe() { } + + [SchemaInfo("RevitPipe", "Creates a Revit pipe", "Revit", "MEP")] + public RevitPipe( + string family, + string type, + [SchemaMainParam] ICurve baseCurve, + double diameter, + Level level, + string systemName = "", + string systemType = "", + List? parameters = null + ) + { + this.family = family; + this.type = type; + this.baseCurve = baseCurve; + this.diameter = diameter; + this.systemName = systemName; + this.systemType = systemType; + this.level = level; + this.parameters = parameters?.ToBase(); + } + + public string family { get; set; } + public string type { get; set; } + public string systemName { get; set; } + public string systemType { get; set; } + public Base? parameters { get; set; } + public string elementId { get; set; } + public Level level { get; set; } + public List Connectors { get; set; } = new(); +} + +public class RevitFlexPipe : RevitPipe +{ + public RevitFlexPipe() { } + + [SchemaInfo("RevitFlexPipe", "Creates a Revit flex pipe", "Revit", "MEP")] + public RevitFlexPipe( + string family, + string type, + [SchemaMainParam] ICurve baseCurve, + double diameter, + Level level, + Vector startTangent, + Vector endTangent, + string systemName = "", + string systemType = "", + List? parameters = null + ) + { + this.family = family; + this.type = type; + this.baseCurve = baseCurve; + this.diameter = diameter; + this.startTangent = startTangent; + this.endTangent = endTangent; + this.systemName = systemName; + this.systemType = systemType; + this.level = level; + this.parameters = parameters?.ToBase(); + } + + public Vector startTangent { get; set; } + public Vector endTangent { get; set; } +} diff --git a/src/Objects/BuiltElements/Revit/RevitRailing.cs b/src/Objects/BuiltElements/Revit/RevitRailing.cs new file mode 100644 index 00000000..8a221fba --- /dev/null +++ b/src/Objects/BuiltElements/Revit/RevitRailing.cs @@ -0,0 +1,49 @@ +using System.Collections.Generic; +using Objects.Geometry; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.BuiltElements.Revit; + +public class RevitRailing : Base, IDisplayValue> +{ + public RevitRailing() { } + + [SchemaInfo("Railing", "Creates a Revit railing by base curve.", "Revit", "Architecture")] + public RevitRailing(string type, [SchemaMainParam] Polycurve baseCurve, Level level, bool flipped = false) + { + this.type = type; + path = baseCurve; + this.level = level; + this.flipped = flipped; + } + + //public string family { get; set; } + public string type { get; set; } + public Level level { get; set; } + public Polycurve path { get; set; } + public bool flipped { get; set; } + public string elementId { get; set; } + public Base parameters { get; set; } + public RevitTopRail topRail { get; set; } + + public string units { get; set; } + + [DetachProperty] + public List displayValue { get; set; } +} + +// Used only to transfer parameters of the top railing +// its display mesh will live in the main railing element +public class RevitTopRail : Base +{ + //public string family { get; set; } + public string type { get; set; } + public string elementId { get; set; } + public Base parameters { get; set; } + + [DetachProperty] + public List displayValue { get; set; } + + public string units { get; set; } +} diff --git a/src/Objects/BuiltElements/Revit/RevitRebar.cs b/src/Objects/BuiltElements/Revit/RevitRebar.cs new file mode 100644 index 00000000..e84cdce3 --- /dev/null +++ b/src/Objects/BuiltElements/Revit/RevitRebar.cs @@ -0,0 +1,85 @@ +using System; +using System.Collections.Generic; +using Speckle.Newtonsoft.Json; +using Objects.Geometry; +using Speckle.Core.Models; + +namespace Objects.BuiltElements.Revit; + +public class RevitRebarGroup : RebarGroup +{ + public RevitRebarGroup() { } + + [JsonIgnore] + public RevitRebarShape revitShape { get; set; } + + public override RebarHook? startHook + { + get => revitStartHook; + set => + revitStartHook = value switch + { + RevitRebarHook o => o, + null => null, + _ => throw new ArgumentException($"Expected object of type {nameof(RevitRebarHook)} or null"), + }; + } + + [JsonIgnore] + public RevitRebarHook? revitStartHook { get; set; } + + public override RebarHook? endHook + { + get => revitEndHook; + set => + revitEndHook = value switch + { + RevitRebarHook o => o, + null => null, + _ => throw new ArgumentException($"Expected object of type {nameof(RevitRebarHook)} or null"), + }; + } + + [JsonIgnore] + public RevitRebarHook? revitEndHook { get; set; } + + public string family { get; set; } + public string type { get; set; } + public int barPositions { get; set; } + public Vector normal { get; set; } + public Base parameters { get; set; } + public string elementId { get; set; } +} + +public class RevitRebarShape : RebarShape +{ + public RevitRebarShape() { } + + public Base parameters { get; set; } + public string elementId { get; set; } +} + +public class RevitRebarHook : RebarHook +{ + public RevitRebarHook() { } + + public double multiplier { get; set; } + public string orientation { get; set; } + public Base parameters { get; set; } + public string elementId { get; set; } +} + +#region Obsolete +[Obsolete("Deprecated in 2.17: Use RevitRebarGroup class instead")] +public class RevitRebar : Rebar +{ + public string family { get; set; } + public string type { get; set; } + public string host { get; set; } + public string barType { get; set; } + public string barStyle { get; set; } + public List shapes { get; set; } + public Base parameters { get; set; } + public string elementId { get; set; } +} +#endregion diff --git a/src/Objects/BuiltElements/Revit/RevitRoof/RevitRoof.cs b/src/Objects/BuiltElements/Revit/RevitRoof/RevitRoof.cs new file mode 100644 index 00000000..3f13b8eb --- /dev/null +++ b/src/Objects/BuiltElements/Revit/RevitRoof/RevitRoof.cs @@ -0,0 +1,96 @@ +using System.Collections.Generic; +using Objects.Geometry; +using Objects.Utils; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.BuiltElements.Revit.RevitRoof; + +public class RevitRoof : Roof +{ + public string family { get; set; } + public string type { get; set; } + public Base? parameters { get; set; } + public string elementId { get; set; } + + public new Level? level + { + get => base.level; + set => base.level = value; + } +} + +public class RevitExtrusionRoof : RevitRoof +{ + public RevitExtrusionRoof() { } + + /// + /// SchemaBuilder constructor for a Revit extrusion roof + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// Assign units when using this constructor due to and params + [SchemaInfo("RevitExtrusionRoof", "Creates a Revit roof by extruding a curve", "Revit", "Architecture")] + public RevitExtrusionRoof( + string family, + string type, + [SchemaParamInfo("Extrusion start")] double start, + [SchemaParamInfo("Extrusion end")] double end, + [SchemaParamInfo("Profile along which to extrude the roof"), SchemaMainParam] Line referenceLine, + Level level, + List? elements = null, + List? parameters = null + ) + { + this.family = family; + this.type = type; + this.parameters = parameters?.ToBase(); + this.level = level; + this.start = start; + this.end = end; + this.referenceLine = referenceLine; + this.elements = elements; + } + + public double start { get; set; } + public double end { get; set; } + public Line referenceLine { get; set; } +} + +public class RevitFootprintRoof : RevitRoof +{ + public RevitFootprintRoof() { } + + [SchemaInfo("RevitFootprintRoof", "Creates a Revit roof by outline", "Revit", "Architecture")] + public RevitFootprintRoof( + [SchemaMainParam] ICurve outline, + string family, + string type, + Level level, + RevitLevel? cutOffLevel = null, + double slope = 0, + List? voids = null, + List? elements = null, + List? parameters = null + ) + { + this.outline = outline; + this.voids = voids ?? new(); + this.family = family; + this.type = type; + this.slope = slope; + this.parameters = parameters?.ToBase(); + this.level = level; + this.cutOffLevel = cutOffLevel; + this.elements = elements; + } + + public RevitLevel? cutOffLevel { get; set; } + public double? slope { get; set; } +} diff --git a/src/Objects/BuiltElements/Revit/RevitStair.cs b/src/Objects/BuiltElements/Revit/RevitStair.cs new file mode 100644 index 00000000..26ff77d5 --- /dev/null +++ b/src/Objects/BuiltElements/Revit/RevitStair.cs @@ -0,0 +1,79 @@ +using System.Collections.Generic; +using Objects.Geometry; +using Speckle.Core.Models; + +namespace Objects.BuiltElements.Revit; + +public class RevitStair : Base, IDisplayValue> +{ + public string family { get; set; } + public string type { get; set; } + public Level level { get; set; } + public Level topLevel { get; set; } + public double riserHeight { get; set; } + public int risersNumber { get; set; } + public double treadDepth { get; set; } + public int treadsNumber { get; set; } + public double baseElevation { get; set; } + public double topElevation { get; set; } + public bool beginsWithRiser { get; set; } + public double height { get; set; } + public int numberOfStories { get; set; } + public Base parameters { get; set; } + public List runs { get; set; } + public List landings { get; set; } + public List supports { get; set; } + public string elementId { get; set; } + + public string units { get; set; } + + [DetachProperty] + public List displayValue { get; set; } +} + +public class RevitStairRun : Base +{ + public string family { get; set; } + public string type { get; set; } + public Polycurve path { get; set; } + public Polycurve outline { get; set; } + public double runWidth { get; set; } + public int risersNumber { get; set; } + public int treadsNumber { get; set; } + public double baseElevation { get; set; } + public double topElevation { get; set; } + public bool beginsWithRiser { get; set; } + public bool endsWithRiser { get; set; } + public double extensionBelowRiserBase { get; set; } + public double extensionBelowTreadBase { get; set; } + public double height { get; set; } + public string runStyle { get; set; } + public Base parameters { get; set; } + public string elementId { get; set; } + + public string units { get; set; } +} + +public class RevitStairLanding : Base +{ + public string family { get; set; } + public string type { get; set; } + public bool isAutomaticLanding { get; set; } + public double baseElevation { get; set; } + public double thickness { get; set; } + public Polycurve outline { get; set; } + public Base parameters { get; set; } + public string elementId { get; set; } + + public string units { get; set; } +} + +public class RevitStairSupport : Base +{ + public string family { get; set; } + public string type { get; set; } + public Base parameters { get; set; } + public string elementId { get; set; } + + public string units { get; set; } +} diff --git a/src/Objects/BuiltElements/Revit/RevitTopography.cs b/src/Objects/BuiltElements/Revit/RevitTopography.cs new file mode 100644 index 00000000..f7205a94 --- /dev/null +++ b/src/Objects/BuiltElements/Revit/RevitTopography.cs @@ -0,0 +1,22 @@ +using System.Collections.Generic; +using Objects.Geometry; +using Objects.Utils; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.BuiltElements.Revit; + +public class RevitTopography : Topography +{ + public RevitTopography() { } + + [SchemaInfo("RevitTopography", "Creates a Revit topography", "Revit", "Architecture")] + public RevitTopography([SchemaMainParam] Mesh displayMesh, List? parameters = null) + { + displayValue = new List { displayMesh }; + this.parameters = parameters?.ToBase(); + } + + public string elementId { get; set; } + public Base? parameters { get; set; } +} diff --git a/src/Objects/BuiltElements/Revit/RevitToposolid.cs b/src/Objects/BuiltElements/Revit/RevitToposolid.cs new file mode 100644 index 00000000..096b201b --- /dev/null +++ b/src/Objects/BuiltElements/Revit/RevitToposolid.cs @@ -0,0 +1,43 @@ +using System.Collections.Generic; +using Objects.Geometry; +using Objects.Utils; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.BuiltElements.Revit; + +public class RevitToposolid : Base, IDisplayValue> +{ + public RevitToposolid() { } + + [SchemaInfo("RevitToposolid", "Creates a Revit Toposolid", "Revit", "Architecture")] + public RevitToposolid( + Level level, + List profiles, + List? topPlanePoints = null, + [SchemaParamInfo("Any nested elements that this floor might have")] List? elements = null, + List? parameters = null + ) + { + this.profiles = profiles; + this.level = level; + this.points = topPlanePoints ?? new(); + this.elements = elements; + this.parameters = parameters?.ToBase(); + } + + public List profiles { get; set; } = new(); + + public List points { get; set; } = new(); + + [DetachProperty] + public List? elements { get; set; } + + [DetachProperty] + public List displayValue { get; set; } + + public string family { get; set; } + public string type { get; set; } + public Level level { get; set; } + public Base? parameters { get; set; } +} diff --git a/src/Objects/BuiltElements/Revit/RevitWall.cs b/src/Objects/BuiltElements/Revit/RevitWall.cs new file mode 100644 index 00000000..ac9d4bfe --- /dev/null +++ b/src/Objects/BuiltElements/Revit/RevitWall.cs @@ -0,0 +1,243 @@ +using System; +using System.Collections.Generic; +using Objects.Geometry; +using Objects.Utils; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.BuiltElements.Revit; + +public class RevitWall : Wall +{ + public RevitWall() { } + + /// + /// SchemaBuilder constructor for a Revit wall + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// Assign units when using this constructor due to and params + [SchemaInfo( + "RevitWall by curve and levels", + "Creates a Revit wall with a top and base level.", + "Revit", + "Architecture" + )] + public RevitWall( + string family, + string type, + [SchemaMainParam] ICurve baseLine, + Level level, + Level topLevel, + double baseOffset = 0, + double topOffset = 0, + bool flipped = false, + bool structural = false, + [SchemaParamInfo("Set in here any nested elements that this level might have.")] List? elements = null, + List? parameters = null + ) + { + this.family = family; + this.type = type; + this.baseLine = baseLine; + this.baseOffset = baseOffset; + this.topOffset = topOffset; + this.flipped = flipped; + this.structural = structural; + this.level = level; + this.topLevel = topLevel; + this.elements = elements; + this.parameters = parameters?.ToBase(); + } + + /// + /// SchemaBuilder constructor for a Revit wall + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// Assign units when using this constructor due to , , and params + [SchemaInfo("RevitWall by curve and height", "Creates an unconnected Revit wall.", "Revit", "Architecture")] + public RevitWall( + string family, + string type, + [SchemaMainParam] ICurve baseLine, + Level level, + double height, + double baseOffset = 0, + double topOffset = 0, + bool flipped = false, + bool structural = false, + [SchemaParamInfo("Set in here any nested elements that this wall might have.")] List? elements = null, + List? parameters = null + ) + { + this.family = family; + this.type = type; + this.baseLine = baseLine; + this.height = height; + this.baseOffset = baseOffset; + this.topOffset = topOffset; + this.flipped = flipped; + this.structural = structural; + this.level = level; + this.elements = elements; + this.parameters = parameters?.ToBase(); + } + + public string family { get; set; } + public string type { get; set; } + public double baseOffset { get; set; } + public double topOffset { get; set; } + public bool flipped { get; set; } + public bool structural { get; set; } + + public new Level? level + { + get => base.level; + set => base.level = value; + } + + public Level topLevel { get; set; } + public Base? parameters { get; set; } + public string elementId { get; set; } +} + +public class RevitFaceWall : Wall +{ + public RevitFaceWall() { } + + [SchemaInfo("RevitWall by face", "Creates a Revit wall from a surface.", "Revit", "Architecture")] + public RevitFaceWall( + string family, + string type, + [SchemaParamInfo("Surface or single face Brep to use"), SchemaMainParam] Brep surface, + Level level, + LocationLine locationLine = LocationLine.Interior, + [SchemaParamInfo("Set in here any nested elements that this wall might have.")] List? elements = null, + List? parameters = null + ) + { + if (surface.Surfaces.Count == 0) + { + throw new Exception("Cannot create a RevitWall with an empty BREP"); + } + + if (surface.Surfaces.Count > 1) + { + throw new Exception( + "The provided brep has more than 1 surface. Please deconstruct/explode it to create multiple instances" + ); + } + + this.family = family; + this.type = type; + brep = surface; + this.locationLine = locationLine; + this.level = level; + this.elements = elements; + this.parameters = parameters?.ToBase(); + } + + public string family { get; set; } + public string type { get; set; } + + public Brep brep { get; set; } + + public new Level? level + { + get => base.level; + set => base.level = value; + } + + public LocationLine locationLine { get; set; } + public Base? parameters { get; set; } + public string elementId { get; set; } +} + +public class RevitProfileWall : Wall +{ + public RevitProfileWall() { } + + [SchemaInfo("RevitWall by profile", "Creates a Revit wall from a profile.", "Revit", "Architecture")] + public RevitProfileWall( + string family, + string type, + [SchemaParamInfo("Profile to use"), SchemaMainParam] Polycurve profile, + Level level, + LocationLine locationLine = LocationLine.Interior, + bool structural = false, + [SchemaParamInfo("Set in here any nested elements that this wall might have.")] List? elements = null, + List? parameters = null + ) + { + this.family = family; + this.type = type; + this.profile = profile; + this.locationLine = locationLine; + this.structural = structural; + this.level = level; + this.elements = elements; + this.parameters = parameters?.ToBase(); + } + + public string family { get; set; } + public string type { get; set; } + public Polycurve profile { get; set; } + + public new Level? level + { + get => base.level; + set => base.level = value; + } + + public LocationLine locationLine { get; set; } + public bool structural { get; set; } + public Base? parameters { get; set; } + public string elementId { get; set; } +} + +// [SchemaDescription("Not supported yet.")] +// [SchemaIgnore] +// public class RevitCurtainWall : Wall +// { +// // TODO +// // What props do/can curtain walls have? - grid, mullions, etc. +// +// [SchemaOptional] +// public bool flipped { get; set; } +// +// [SchemaOptional] +// public Base parameters { get; set; } +// +// [SchemaIgnore] +// public string elementId { get; set; } +// } +// +// [SchemaDescription("Not supported yet.")] +// [SchemaIgnore] +// public class RevitWallByPoint : Base +// { +// [SchemaOptional] +// public Base parameters { get; set; } +// +// [SchemaIgnore] +// public string elementId { get; set; } +// } diff --git a/src/Objects/BuiltElements/Revit/RevitWire.cs b/src/Objects/BuiltElements/Revit/RevitWire.cs new file mode 100644 index 00000000..02e532f4 --- /dev/null +++ b/src/Objects/BuiltElements/Revit/RevitWire.cs @@ -0,0 +1,40 @@ +using System.Collections.Generic; +using Objects.BuiltElements.Revit.Interfaces; +using Objects.Utils; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.BuiltElements.Revit; + +public class RevitWire : Wire, IHasMEPConnectors +{ + public RevitWire() { } + + [SchemaInfo("RevitWire", "Creates a Revit wire from points and level", "Revit", "MEP")] + public RevitWire( + List constructionPoints, + string family, + string type, + Level level, + string wiringType = "Arc", + List? parameters = null + ) + { + this.constructionPoints = constructionPoints; + this.family = family; + this.type = type; + this.level = level; + this.wiringType = wiringType; + this.parameters = parameters?.ToBase(); + } + + public string family { get; set; } + public string type { get; set; } + public string wiringType { get; set; } + public List constructionPoints { get; set; } // used in constructor for revit native wires + public string system { get; set; } + public Level level { get; set; } + public Base? parameters { get; set; } + public string elementId { get; set; } + public List Connectors { get; set; } = new(); +} diff --git a/src/Objects/BuiltElements/Revit/RevitZone.cs b/src/Objects/BuiltElements/Revit/RevitZone.cs new file mode 100644 index 00000000..3dac0466 --- /dev/null +++ b/src/Objects/BuiltElements/Revit/RevitZone.cs @@ -0,0 +1,15 @@ +using Speckle.Core.Models; + +namespace Objects.BuiltElements.Revit; + +public class RevitZone : Zone +{ + public RevitZone() { } + + public Level level { get; set; } + public string phaseName { get; set; } + public Base parameters { get; set; } + public string elementId { get; set; } + public bool isDefault { get; set; } + public string serviceType { get; set; } +} diff --git a/src/Objects/BuiltElements/Revit/StructuralConnectionHandler.cs b/src/Objects/BuiltElements/Revit/StructuralConnectionHandler.cs new file mode 100644 index 00000000..c0cba6dd --- /dev/null +++ b/src/Objects/BuiltElements/Revit/StructuralConnectionHandler.cs @@ -0,0 +1,18 @@ +using System.Collections.Generic; +using Objects.Geometry; +using Speckle.Core.Models; + +namespace Objects.BuiltElements.Revit; + +public class StructuralConnectionHandler : Base, IDisplayValue> +{ + public string family { get; set; } + public string type { get; set; } + public Point basePoint { get; set; } + + [DetachProperty] + public List connectedElements { get; set; } + + [DetachProperty] + public List displayValue { get; set; } +} diff --git a/src/Objects/BuiltElements/Roof.cs b/src/Objects/BuiltElements/Roof.cs new file mode 100644 index 00000000..fd076aeb --- /dev/null +++ b/src/Objects/BuiltElements/Roof.cs @@ -0,0 +1,31 @@ +using System.Collections.Generic; +using Objects.Geometry; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.BuiltElements; + +public class Roof : Base, IDisplayValue> +{ + public Roof() { } + + [SchemaDeprecated, SchemaInfo("Roof", "Creates a Speckle roof", "BIM", "Architecture")] + public Roof([SchemaMainParam] ICurve outline, List? voids = null, List? elements = null) + { + this.outline = outline; + this.voids = voids ?? new(); + this.elements = elements; + } + + public ICurve outline { get; set; } + public virtual Level? level { get; internal set; } + public List voids { get; set; } = new(); + + [DetachProperty] + public List? elements { get; set; } + + public string units { get; set; } + + [DetachProperty] + public List displayValue { get; set; } +} diff --git a/src/Objects/BuiltElements/Room.cs b/src/Objects/BuiltElements/Room.cs new file mode 100644 index 00000000..e0975d33 --- /dev/null +++ b/src/Objects/BuiltElements/Room.cs @@ -0,0 +1,62 @@ +using System.Collections.Generic; +using Objects.BuiltElements.Revit; +using Objects.Geometry; +using Objects.Utils; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.BuiltElements; + +public class Room : Base, IHasArea, IHasVolume, IDisplayValue> +{ + public Room() { } + + /// + /// SchemaBuilder constructor for a Room + /// + /// Assign units when using this constructor due to prop + [SchemaInfo("Room", "Creates a Speckle room", "BIM", "Architecture")] + public Room(string name, string number, Level level, [SchemaMainParam] Point basePoint) + { + this.name = name; + this.number = number; + this.level = level; + this.basePoint = basePoint; + } + + /// + /// SchemaBuilder constructor for a Room + /// + /// Assign units when using this constructor due to prop + [SchemaInfo("RevitRoom", "Creates a Revit room with parameters", "Revit", "Architecture")] + public Room( + string name, + string number, + Level level, + [SchemaMainParam] Point basePoint, + List? parameters = null + ) + { + this.name = name; + this.number = number; + this.level = level; + this.basePoint = basePoint; + this["parameters"] = parameters?.ToBase(); + } + + public string name { get; set; } + public string number { get; set; } + public virtual Level? level { get; set; } + public Point basePoint { get; set; } + public double height { get; set; } + public List voids { get; set; } = new(); + public ICurve outline { get; set; } + + public string units { get; set; } + + [DetachProperty] + public List displayValue { get; set; } + + public double area { get; set; } + public double volume { get; set; } +} diff --git a/src/Objects/BuiltElements/Space.cs b/src/Objects/BuiltElements/Space.cs new file mode 100644 index 00000000..076d146b --- /dev/null +++ b/src/Objects/BuiltElements/Space.cs @@ -0,0 +1,77 @@ +using System; +using System.Collections.Generic; +using Objects.BuiltElements.Revit; +using Objects.Geometry; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.BuiltElements; + +public class Space : Base, IHasArea, IHasVolume, IDisplayValue> +{ + public Space() { } + + [SchemaInfo("Space", "Creates a Speckle space", "BIM", "MEP")] + public Space(string name, string number, [SchemaMainParam] Point basePoint, Level level) + { + this.name = name; + this.number = number; + this.basePoint = basePoint; + this.level = level; + } + + [SchemaInfo( + "Space with top level and offset parameters", + "Creates a Speckle space with the specified top level and offsets", + "BIM", + "MEP" + )] + public Space( + string name, + string number, + [SchemaMainParam] Point basePoint, + Level level, + Level topLevel, + double topOffset, + double baseOffset + ) + { + this.name = name; + this.number = number; + this.basePoint = basePoint; + this.level = level; + this.topLevel = topLevel; + this.topOffset = topOffset; + this.baseOffset = baseOffset; + } + + public string name { get; set; } + public string number { get; set; } + public Point basePoint { get; set; } + public Level level { get; set; } + public double baseOffset { get; set; } + public Level topLevel { get; set; } // corresponds to UpperLimit property in Revit api + public double topOffset { get; set; } // corresponds to LimitOffset property in Revit api + public List voids { get; set; } = new(); + public ICurve outline { get; set; } + public string spaceType { get; set; } + + // add the zone object for better forward compatibility + public RevitZone? zone { get; set; } + + [Obsolete("Use zone property instead")] + public string zoneName { get; internal set; } + public string units { get; set; } + + public string roomId { get; set; } + + public string phaseName { get; set; } + + // additional properties to add: also include space separation lines here? + + [DetachProperty] + public List displayValue { get; set; } + + public double area { get; set; } + public double volume { get; set; } +} diff --git a/src/Objects/BuiltElements/Station.cs b/src/Objects/BuiltElements/Station.cs new file mode 100644 index 00000000..f872b69d --- /dev/null +++ b/src/Objects/BuiltElements/Station.cs @@ -0,0 +1,13 @@ +using Objects.Geometry; +using Speckle.Core.Models; + +namespace Objects.BuiltElements; + +public class Station : Base +{ + public double number { get; set; } + public string type { get; set; } + public Point location { get; set; } + + public string units { get; set; } +} diff --git a/src/Objects/BuiltElements/Structure.cs b/src/Objects/BuiltElements/Structure.cs new file mode 100644 index 00000000..b6339463 --- /dev/null +++ b/src/Objects/BuiltElements/Structure.cs @@ -0,0 +1,16 @@ +using System.Collections.Generic; +using Objects.Geometry; +using Speckle.Core.Models; + +namespace Objects.BuiltElements; + +public class Structure : Base, IDisplayValue> +{ + public Point location { get; set; } + public List pipeIds { get; set; } + + public string units { get; set; } + + [DetachProperty] + public List displayValue { get; set; } +} diff --git a/src/Objects/BuiltElements/TeklaStructures/BeamPosition.cs b/src/Objects/BuiltElements/TeklaStructures/BeamPosition.cs new file mode 100644 index 00000000..a21da43f --- /dev/null +++ b/src/Objects/BuiltElements/TeklaStructures/BeamPosition.cs @@ -0,0 +1,13 @@ +using Speckle.Core.Models; + +namespace Objects.BuiltElements.TeklaStructures; + +public class TeklaPosition : Base +{ + public TeklaDepthEnum Depth { get; set; } + public TeklaPlaneEnum Plane { get; set; } + public TeklaRotationEnum Rotation { get; set; } + public double depthOffset { get; set; } + public double planeOffset { get; set; } + public double rotationOffset { get; set; } +} diff --git a/src/Objects/BuiltElements/TeklaStructures/Bolts.cs b/src/Objects/BuiltElements/TeklaStructures/Bolts.cs new file mode 100644 index 00000000..380fb452 --- /dev/null +++ b/src/Objects/BuiltElements/TeklaStructures/Bolts.cs @@ -0,0 +1,43 @@ +using System.Collections.Generic; +using Objects.Geometry; +using Speckle.Core.Models; + +namespace Objects.BuiltElements.TeklaStructures; + +public class Bolts : Base +{ + [DetachProperty] + public List displayValue { get; set; } + + public Point firstPosition { get; set; } + public Point secondPosition { get; set; } + + public double length { get; set; } + public double boltSize { get; set; } + public double tolerance { get; set; } + public TeklaPosition position { get; set; } + public string boltStandard { get; set; } + public double cutLength { get; set; } + public List coordinates { get; set; } + public List boltedPartsIds { get; set; } = new(); // First guid is PartToBeBolted, second guid is PartToBoltTo, any others are OtherPartsToBolt +} + +public class BoltsXY : Bolts +{ + // Lists of XY positions of bolts for Tekla + public List xPosition { get; set; } + public List yPosition { get; set; } +} + +public class BoltsArray : Bolts +{ + // Lists of XY distances between bolts for Tekla + public List xDistance { get; set; } + public List yDistance { get; set; } +} + +public class BoltsCircle : Bolts +{ + public int boltCount { get; set; } + public double diameter { get; set; } +} diff --git a/src/Objects/BuiltElements/TeklaStructures/Enums.cs b/src/Objects/BuiltElements/TeklaStructures/Enums.cs new file mode 100644 index 00000000..69c61e88 --- /dev/null +++ b/src/Objects/BuiltElements/TeklaStructures/Enums.cs @@ -0,0 +1,86 @@ +namespace Objects.BuiltElements.TeklaStructures; + +public enum TeklaBeamType +{ + Beam, + PolyBeam, + SpiralBeam +} + +public enum TeklaChamferType +{ + none, + line, + rounding, + arc, + arc_point, + square, + square_parallel, + line_and_arc +} + +public enum TeklaWeldType +{ + none, + edge_flange, + square_groove_butt, + bevel_groove_single_v_butt, + bevel_groove_single_bevel_butt, + single_v_butt_with_broad_root_face, + single_bevel_butt_with_broad_root_face, + u_groove_single_u_butt, + j_groove_j_butt, + bevel_backing, + fillet, + plug, + spot, + seam, + slot, + flare_bevel_groove, + flare_v_groove, + corner_flange, + partial_penetration_single_bevel_butt_plus_fillet, + partial_penetration_square_groove_plus_fillet, + melt_through, + steep_flanked_bevel_groove_single_v_butt, + steep_flanked_bevel_groove_single_bevel_butt, + edge, + iso_surfacing, + fold, + inclined +} + +public enum TeklaWeldIntermittentType +{ + continuous, + chain_intermittent, + staggered_intermittent +} + +public enum TeklaDepthEnum +{ + middle, + front, + behind +} + +public enum TeklaPlaneEnum +{ + middle, + left, + right +} + +public enum TeklaRotationEnum +{ + front, + top, + back, + below +} + +public enum TeklaOpeningTypeEnum +{ + beam, + contour +} diff --git a/src/Objects/BuiltElements/TeklaStructures/Fitting.cs b/src/Objects/BuiltElements/TeklaStructures/Fitting.cs new file mode 100644 index 00000000..daeb7d12 --- /dev/null +++ b/src/Objects/BuiltElements/TeklaStructures/Fitting.cs @@ -0,0 +1,8 @@ +using Objects.Geometry; + +namespace Objects.BuiltElements.TeklaStructures; + +public class Fitting : Plane +{ + public string hostID { get; set; } +} diff --git a/src/Objects/BuiltElements/TeklaStructures/TeklaBeam.cs b/src/Objects/BuiltElements/TeklaStructures/TeklaBeam.cs new file mode 100644 index 00000000..7d2805f2 --- /dev/null +++ b/src/Objects/BuiltElements/TeklaStructures/TeklaBeam.cs @@ -0,0 +1,59 @@ +using Objects.Geometry; +using Objects.Structural.Materials; +using Objects.Structural.Properties.Profiles; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.BuiltElements.TeklaStructures; + +public class TeklaBeam : Beam, IHasVolume, IHasArea +{ + public TeklaBeam() { } + + [SchemaInfo("TeklaBeam", "Creates a Tekla Structures beam by curve.", "Tekla", "Structure")] + public TeklaBeam([SchemaMainParam] ICurve baseLine, SectionProfile profile, StructuralMaterial material) + { + this.baseLine = baseLine; + this.profile = profile; + this.material = material; + } + + public string name { get; set; } + + [DetachProperty] + public SectionProfile profile { get; set; } + + [DetachProperty] + public StructuralMaterial material { get; set; } + + [DetachProperty] + public string finish { get; set; } + + [DetachProperty] + public string classNumber { get; set; } + + public Vector alignmentVector { get; set; } // This can be set to get proper rotation if coming from an application that doesn't have positioning + + [DetachProperty] + public TeklaPosition position { get; set; } + + public Base userProperties { get; set; } + + [DetachProperty] + public Base rebars { get; set; } + + public TeklaBeamType TeklaBeamType { get; set; } + public double area { get; set; } + public double volume { get; set; } +} + +public class SpiralBeam : TeklaBeam +{ + public Point startPoint { get; set; } + public Point rotationAxisPt1 { get; set; } + public Point rotationAxisPt2 { get; set; } + public double totalRise { get; set; } + public double rotationAngle { get; set; } + public double twistAngleStart { get; set; } + public double twistAngleEnd { get; set; } +} diff --git a/src/Objects/BuiltElements/TeklaStructures/TeklaContourPlate.cs b/src/Objects/BuiltElements/TeklaStructures/TeklaContourPlate.cs new file mode 100644 index 00000000..7fcdcd22 --- /dev/null +++ b/src/Objects/BuiltElements/TeklaStructures/TeklaContourPlate.cs @@ -0,0 +1,68 @@ +using System.Collections.Generic; +using Objects.Geometry; +using Objects.Structural.Materials; +using Objects.Structural.Properties.Profiles; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.BuiltElements.TeklaStructures; + +public class TeklaContourPlate : Area +{ + [SchemaInfo("ContourPlate", "Creates a TeklaStructures contour plate.", "Tekla", "Structure")] + public TeklaContourPlate( + SectionProfile profile, + Polyline outline, + string finish, + string classNumber, + string units, + StructuralMaterial? material = null, + TeklaPosition? position = null, + Base? rebars = null + ) + { + this.profile = profile; + this.outline = outline; + this.material = material; + this.finish = finish; + this.classNumber = classNumber; + this.position = position ?? new(); + this.rebars = rebars; + this.units = units; + } + + public TeklaContourPlate() { } + + [DetachProperty] + public SectionProfile profile { get; set; } + + [DetachProperty] + public StructuralMaterial? material { get; set; } + + [DetachProperty] + public string finish { get; set; } + + [DetachProperty] + public string classNumber { get; set; } + + [DetachProperty] + public TeklaPosition position { get; set; } = new(); + + [DetachProperty] + public Base? rebars { get; set; } + + public List contour { get; set; } // Use for ToNative to Tekla. Other programs can use Area.outline. +} + +public class TeklaContourPoint : Point +{ + public TeklaContourPoint() { } + + public TeklaContourPoint(Point point) { } + + public TeklaChamferType chamferType { get; set; } + public double xDim { get; set; } + public double yDim { get; set; } + public double dz1 { get; set; } + public double dz2 { get; set; } +} diff --git a/src/Objects/BuiltElements/TeklaStructures/TeklaModel.cs b/src/Objects/BuiltElements/TeklaStructures/TeklaModel.cs new file mode 100644 index 00000000..47d0cdc1 --- /dev/null +++ b/src/Objects/BuiltElements/TeklaStructures/TeklaModel.cs @@ -0,0 +1,13 @@ +using System.Collections.Generic; +using Speckle.Core.Models; + +namespace Objects.BuiltElements.TeklaStructures; + +public class TeklaModel : Base +{ + [DetachProperty] + public List Beams { get; set; } + + [DetachProperty] + public List Rebars { get; set; } +} diff --git a/src/Objects/BuiltElements/TeklaStructures/TeklaOpening.cs b/src/Objects/BuiltElements/TeklaStructures/TeklaOpening.cs new file mode 100644 index 00000000..8ab6fae3 --- /dev/null +++ b/src/Objects/BuiltElements/TeklaStructures/TeklaOpening.cs @@ -0,0 +1,18 @@ +namespace Objects.BuiltElements.TeklaStructures; + +public class TeklaOpening : Opening +{ + public string openingHostId { get; set; } + public TeklaOpeningTypeEnum openingType { get; set; } +} + +public class TeklaContourOpening : TeklaOpening +{ + public TeklaContourPlate cuttingPlate { get; set; } + public double thickness { get; set; } +} + +public class TeklaBeamOpening : TeklaOpening +{ + public TeklaBeam cuttingBeam { get; set; } +} diff --git a/src/Objects/BuiltElements/TeklaStructures/TeklaRebar.cs b/src/Objects/BuiltElements/TeklaStructures/TeklaRebar.cs new file mode 100644 index 00000000..9908f5e2 --- /dev/null +++ b/src/Objects/BuiltElements/TeklaStructures/TeklaRebar.cs @@ -0,0 +1,45 @@ +using System; +using Objects.Structural.Materials; +using Speckle.Core.Models; + +namespace Objects.BuiltElements.TeklaStructures; + +#region Obsolete +[Obsolete("Deprecated in 2.17: Create a TeklaRebarGroup class instead")] +public class TeklaRebar : Rebar +{ + public string name { get; set; } + + [DetachProperty] + public Hook startHook { get; set; } + + [DetachProperty] + public Hook endHook { get; set; } + + public double classNumber { get; set; } + public string size { get; set; } + + [DetachProperty] + public StructuralMaterial material { get; set; } +} + +[Obsolete("Deprecated in 2.17: Use a RebarHook class instead")] +public class Hook : Base +{ + public double angle { get; set; } + public double length { get; set; } + public double radius { get; set; } + public shape shape { get; set; } +} + +[Obsolete("Deprecated in 2.17: set starthook and endhook to null or refer to hook angle instead")] +[System.Diagnostics.CodeAnalysis.SuppressMessage("Style", "IDE1006:Naming Styles", Justification = "Obsolete")] +public enum shape +{ + NO_HOOK = 0, + HOOK_90_DEGREES = 1, + HOOK_135_DEGREES = 2, + HOOK_180_DEGREES = 3, + CUSTOM_HOOK = 4 +} +#endregion diff --git a/src/Objects/BuiltElements/TeklaStructures/Welds.cs b/src/Objects/BuiltElements/TeklaStructures/Welds.cs new file mode 100644 index 00000000..da2c7698 --- /dev/null +++ b/src/Objects/BuiltElements/TeklaStructures/Welds.cs @@ -0,0 +1,30 @@ +using System.Collections.Generic; +using Objects.Geometry; +using Speckle.Core.Models; + +namespace Objects.BuiltElements.TeklaStructures; + +public class Welds : Base +{ + [DetachProperty] + public List displayValue { get; set; } + + public string mainObjectId { get; set; } + public string secondaryObjectId { get; set; } + public double sizeAbove { get; set; } + public double sizeBelow { get; set; } + public double lengthAbove { get; set; } + public double lengthBelow { get; set; } + public double pitchAbove { get; set; } + public double pitchBelow { get; set; } + public double angleAbove { get; set; } // In degrees + public double angleBelow { get; set; } // In degrees + public TeklaWeldType typeAbove { get; set; } + public TeklaWeldType typeBelow { get; set; } + public TeklaWeldIntermittentType intermittentType { get; set; } +} + +public class PolygonWelds : Welds +{ + public Polyline polyline { get; set; } +} diff --git a/src/Objects/BuiltElements/Topography.cs b/src/Objects/BuiltElements/Topography.cs new file mode 100644 index 00000000..bf629c50 --- /dev/null +++ b/src/Objects/BuiltElements/Topography.cs @@ -0,0 +1,28 @@ +using System.Collections.Generic; +using Objects.Geometry; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.BuiltElements; + +public class Topography : Base, IDisplayValue> +{ + public Topography() + { + displayValue = new List(); + } + + [SchemaInfo("Topography", "Creates a Speckle topography", "BIM", "Architecture")] + public Topography([SchemaMainParam] Mesh displayMesh) + { + displayValue = new List { displayMesh }; + } + + public Mesh baseGeometry { get; set; } = new(); + + public string units { get; set; } + + [DetachProperty] + public List displayValue { get; set; } + //TODO Figure out if we should add a new constructor that takes a List or if Topography should just have a single mesh display value +} diff --git a/src/Objects/BuiltElements/View.cs b/src/Objects/BuiltElements/View.cs new file mode 100644 index 00000000..d518f101 --- /dev/null +++ b/src/Objects/BuiltElements/View.cs @@ -0,0 +1,27 @@ +using Objects.Geometry; +using Speckle.Core.Models; + +namespace Objects.BuiltElements; + +public class View : Base +{ + public string name { get; set; } +} + +public class View3D : View +{ + public Point origin { get; set; } + public Point target { get; set; } + public Vector upDirection { get; set; } + public Vector forwardDirection { get; set; } + public Box boundingBox { get; set; } // x is right, y is top of screen, z is towards viewer + public bool isOrthogonal { get; set; } + + public string units { get; set; } +} + +public class View2D : View +{ + //public Point topLeft { get; set; } + //public Point bottomRight { get; set; } +} diff --git a/src/Objects/BuiltElements/Wall.cs b/src/Objects/BuiltElements/Wall.cs new file mode 100644 index 00000000..9f6cb754 --- /dev/null +++ b/src/Objects/BuiltElements/Wall.cs @@ -0,0 +1,43 @@ +using System.Collections.Generic; +using Objects.Geometry; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.BuiltElements; + +public class Wall : Base, IDisplayValue> +{ + public Wall() { } + + /// + /// SchemaBuilder constructor for a Speckle wall + /// + /// + /// + /// + /// Assign units when using this constructor due to param + [SchemaInfo("Wall", "Creates a Speckle wall", "BIM", "Architecture")] + public Wall( + double height, + [SchemaMainParam] ICurve baseLine, + [SchemaParamInfo("Any nested elements that this wall might have")] List? elements = null + ) + { + this.height = height; + this.baseLine = baseLine; + this.elements = elements; + } + + public double height { get; set; } + + [DetachProperty] + public List? elements { get; set; } + + public ICurve baseLine { get; set; } + public virtual Level? level { get; internal set; } + + public string units { get; set; } + + [DetachProperty] + public List displayValue { get; set; } +} diff --git a/src/Objects/BuiltElements/Wire.cs b/src/Objects/BuiltElements/Wire.cs new file mode 100644 index 00000000..cc4aa324 --- /dev/null +++ b/src/Objects/BuiltElements/Wire.cs @@ -0,0 +1,20 @@ +using System.Collections.Generic; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.BuiltElements; + +public class Wire : Base +{ + public Wire() { } + + [SchemaInfo("Wire", "Creates a Speckle wire from curve segments and points", "BIM", "MEP")] + public Wire(List segments) + { + this.segments = segments; + } + + public List segments { get; set; } + + public string units { get; set; } +} diff --git a/src/Objects/BuiltElements/Zone.cs b/src/Objects/BuiltElements/Zone.cs new file mode 100644 index 00000000..30d53643 --- /dev/null +++ b/src/Objects/BuiltElements/Zone.cs @@ -0,0 +1,24 @@ +using System.Collections.Generic; +using Speckle.Core.Models; + +namespace Objects.BuiltElements; + +public class Zone : Base, IHasArea, IHasVolume +{ + public Zone() { } + + public Zone(string name) + { + this.name = name; + } + + public string name { get; set; } + public string units { get; set; } + + public List spaces { get; set; } + + // implicit measurements + public double area { get; set; } + public double volume { get; set; } + public double perimeter { get; set; } +} diff --git a/src/Objects/EncodingOptimisations.cs b/src/Objects/EncodingOptimisations.cs new file mode 100644 index 00000000..1db99a56 --- /dev/null +++ b/src/Objects/EncodingOptimisations.cs @@ -0,0 +1,102 @@ +using System; +using System.Collections.Generic; +using Objects.Geometry; + +namespace Objects; + +public static class CurveTypeEncoding +{ + public const double Arc = 0; + public const double Circle = 1; + public const double Curve = 2; + public const double Ellipse = 3; + public const double Line = 4; + public const double Polyline = 5; + public const double PolyCurve = 6; +} + +public static class CurveArrayEncodingExtensions +{ + public static List ToArray(List curves) + { + var list = new List(); + foreach (var curve in curves) + { + switch (curve) + { + case Arc a: + list.AddRange(a.ToList()); + break; + case Circle c: + list.AddRange(c.ToList()); + break; + case Curve c: + list.AddRange(c.ToList()); + break; + case Ellipse e: + list.AddRange(e.ToList()); + break; + case Line l: + list.AddRange(l.ToList()); + break; + case Polycurve p: + list.AddRange(p.ToList()); + break; + case Polyline p: + list.AddRange(p.ToList()); + break; + default: + throw new Exception($"Unkown curve type: {curve.GetType()}."); + } + } + + return list; + } + + public static List FromArray(List list) + { + var curves = new List(); + if (list.Count == 0) + { + return curves; + } + + var done = false; + var currentIndex = 0; + + while (!done) + { + var itemLength = (int)list[currentIndex]; + var item = list.GetRange(currentIndex, itemLength + 1); + + switch (item[1]) + { + case CurveTypeEncoding.Arc: + curves.Add(Arc.FromList(item)); + break; + case CurveTypeEncoding.Circle: + curves.Add(Circle.FromList(item)); + break; + case CurveTypeEncoding.Curve: + curves.Add(Curve.FromList(item)); + break; + case CurveTypeEncoding.Ellipse: + curves.Add(Ellipse.FromList(item)); + break; + case CurveTypeEncoding.Line: + curves.Add(Line.FromList(item)); + break; + case CurveTypeEncoding.Polyline: + curves.Add(Polyline.FromList(item)); + break; + case CurveTypeEncoding.PolyCurve: + curves.Add(Polycurve.FromList(item)); + break; + } + + currentIndex += itemLength + 1; + done = currentIndex >= list.Count; + } + return curves; + } +} diff --git a/src/Objects/GIS/CRS.cs b/src/Objects/GIS/CRS.cs new file mode 100644 index 00000000..39f9e0b3 --- /dev/null +++ b/src/Objects/GIS/CRS.cs @@ -0,0 +1,14 @@ +using Speckle.Core.Models; + +namespace Objects.GIS; + +public class CRS : Base +{ + public string? name { get; set; } + public string? authority_id { get; set; } + public string? wkt { get; set; } + public string? units_native { get; set; } + public float? offset_x { get; set; } + public float? offset_y { get; set; } + public float? rotation { get; set; } +} diff --git a/src/Objects/GIS/GisFeature.cs b/src/Objects/GIS/GisFeature.cs new file mode 100644 index 00000000..1a5f4dc5 --- /dev/null +++ b/src/Objects/GIS/GisFeature.cs @@ -0,0 +1,43 @@ +using System.Collections.Generic; +using Speckle.Core.Models; + +namespace Objects.GIS; + +public class GisFeature : Base +{ + public GisFeature() + { + attributes = new Base(); + } + + public GisFeature(Base attributes) + { + this.attributes = attributes; + } + + public GisFeature(List geometry, Base attributes) + { + this.geometry = geometry; + this.attributes = attributes; + } + + public GisFeature(Base attributes, List displayValue) + { + this.attributes = attributes; + this.displayValue = displayValue; + } + + public GisFeature(List geometry, Base attributes, List displayValue) + { + this.geometry = geometry; + this.attributes = attributes; + this.displayValue = displayValue; + } + + [DetachProperty] + public List? geometry { get; set; } + + [DetachProperty] + public List? displayValue { get; set; } + public Base attributes { get; set; } +} diff --git a/src/Objects/GIS/GisMultipatchGeometry.cs b/src/Objects/GIS/GisMultipatchGeometry.cs new file mode 100644 index 00000000..3833785e --- /dev/null +++ b/src/Objects/GIS/GisMultipatchGeometry.cs @@ -0,0 +1,18 @@ +using System.Collections.Generic; +using Speckle.Core.Models; + +namespace Objects.GIS; + +public class GisMultipatchGeometry : Base +{ + public string units { get; set; } + public List faces { get; set; } + public List vertices { get; set; } + public List? colors { get; set; } + + public GisMultipatchGeometry() + { + faces = new List(); + vertices = new List(); + } +} diff --git a/src/Objects/GIS/GisPolygonGeometry.cs b/src/Objects/GIS/GisPolygonGeometry.cs new file mode 100644 index 00000000..3937f1a6 --- /dev/null +++ b/src/Objects/GIS/GisPolygonGeometry.cs @@ -0,0 +1,17 @@ +using System.Collections.Generic; +using Objects.Geometry; +using Speckle.Core.Models; + +namespace Objects.GIS; + +public class PolygonGeometry : Base +{ + public string units { get; set; } + public Polyline boundary { get; set; } + public List voids { get; set; } + + public PolygonGeometry() + { + voids = new List(); + } +} diff --git a/src/Objects/GIS/GisPolygonGeometry3d.cs b/src/Objects/GIS/GisPolygonGeometry3d.cs new file mode 100644 index 00000000..93613c89 --- /dev/null +++ b/src/Objects/GIS/GisPolygonGeometry3d.cs @@ -0,0 +1,3 @@ +namespace Objects.GIS; + +public class PolygonGeometry3d : PolygonGeometry { } diff --git a/src/Objects/GIS/GisTopography.cs b/src/Objects/GIS/GisTopography.cs new file mode 100644 index 00000000..8a34ac70 --- /dev/null +++ b/src/Objects/GIS/GisTopography.cs @@ -0,0 +1,3 @@ +namespace Objects.GIS; + +public class GisTopography : RasterElement { } diff --git a/src/Objects/GIS/NonGeometryElement.cs b/src/Objects/GIS/NonGeometryElement.cs new file mode 100644 index 00000000..c5e0dc57 --- /dev/null +++ b/src/Objects/GIS/NonGeometryElement.cs @@ -0,0 +1,10 @@ +using System; +using Speckle.Core.Models; + +namespace Objects.GIS; + +[Obsolete("NonGeometryElement was replaced by a more generic class, \"GisFeature\", which contains more information")] +public class NonGeometryElement : Base +{ + public Base? attributes { get; set; } +} diff --git a/src/Objects/GIS/PolygonElement.cs b/src/Objects/GIS/PolygonElement.cs new file mode 100644 index 00000000..8905a7cf --- /dev/null +++ b/src/Objects/GIS/PolygonElement.cs @@ -0,0 +1,13 @@ +using System; +using System.Collections.Generic; +using Speckle.Core.Models; + +namespace Objects.GIS; + +[Obsolete("PolygonElement was replaced by a more generic class, \"GisFeature\", which contains more information")] +public class PolygonElement : Base +{ + [DetachProperty] + public List geometry { get; set; } + public Base attributes { get; set; } +} diff --git a/src/Objects/GIS/RasterElement.cs b/src/Objects/GIS/RasterElement.cs new file mode 100644 index 00000000..63befed1 --- /dev/null +++ b/src/Objects/GIS/RasterElement.cs @@ -0,0 +1,52 @@ +using System.Collections.Generic; +using Objects.Geometry; +using Speckle.Core.Models; + +namespace Objects.GIS; + +public class RasterElement : Base +{ + public int band_count { get; set; } + public List band_names { get; set; } + public float x_origin { get; set; } + public float y_origin { get; set; } + public int x_size { get; set; } + public int y_size { get; set; } + public float x_resolution { get; set; } + public float y_resolution { get; set; } + public List noDataValue { get; set; } + + [DetachProperty] + public List displayValue { get; set; } + + public RasterElement() + { + displayValue = new List(); + band_names = new List(); + noDataValue = new List(); + } + + public RasterElement( + int bandCount, + List bandNames, + float xOrigin, + float yOrigin, + int xSize, + int ySize, + float xResolution, + float yResolution, + List noDataValue + ) + { + displayValue = new List(); + band_count = bandCount; + band_names = bandNames; + x_origin = xOrigin; + y_origin = yOrigin; + x_size = xSize; + y_size = ySize; + x_resolution = xResolution; + y_resolution = yResolution; + this.noDataValue = noDataValue; + } +} diff --git a/src/Objects/GIS/RasterLayer.cs b/src/Objects/GIS/RasterLayer.cs new file mode 100644 index 00000000..b7330d49 --- /dev/null +++ b/src/Objects/GIS/RasterLayer.cs @@ -0,0 +1,18 @@ +using System.Collections.Generic; +using Speckle.Core.Models; + +namespace Objects.GIS; + +public class RasterLayer : Collection +{ + public CRS? crs { get; set; } + public string? units { get; set; } + public CRS? rasterCrs { get; set; } + public string? geomType { get; set; } + public Dictionary? renderer { get; set; } + + public RasterLayer() + { + collectionType = "RasterLayer"; + } +} diff --git a/src/Objects/GIS/VectorLayer.cs b/src/Objects/GIS/VectorLayer.cs new file mode 100644 index 00000000..618f95d8 --- /dev/null +++ b/src/Objects/GIS/VectorLayer.cs @@ -0,0 +1,20 @@ +using System.Collections.Generic; +using Speckle.Core.Models; + +namespace Objects.GIS; + +public class VectorLayer : Collection +{ + public CRS? crs { get; set; } + public string? units { get; set; } + public Base attributes { get; set; } + public string? geomType { get; set; } + public string? nativeGeomType { get; set; } + public Dictionary? renderer { get; set; } + + public VectorLayer() + { + collectionType = "VectorLayer"; + attributes = new Base(); + } +} diff --git a/src/Objects/Geometry/Arc.cs b/src/Objects/Geometry/Arc.cs new file mode 100644 index 00000000..9ca35b90 --- /dev/null +++ b/src/Objects/Geometry/Arc.cs @@ -0,0 +1,293 @@ +using System; +using System.Collections.Generic; +using Objects.Other; +using Objects.Primitive; +using Speckle.Core.Kits; +using Speckle.Core.Logging; +using Speckle.Core.Models; + +namespace Objects.Geometry; + +/// +/// Represents a sub-curve of a three-dimensional circle. +/// +public class Arc : Base, IHasBoundingBox, ICurve, IHasArea, ITransformable +{ + /// + public Arc() { } + + /// + /// Constructs a new using angle values. + /// + /// The Plane where the arc will be drawn + /// The radius of the Arc + /// The angle formed between the start point and the X Axis of the plane + /// The angle formed between the end point and the X Axis of the plane + /// The total angle of the Arc in Radians + /// The object's units + /// The object's unique application ID + public Arc( + Plane plane, + double radius, + double startAngle, + double endAngle, + double angleRadians, + string units = Units.Meters, + string? applicationId = null + ) + { + this.plane = plane; + this.radius = radius; + this.startAngle = startAngle; + this.endAngle = endAngle; + this.angleRadians = angleRadians; + domain = angleRadians > 0 ? new Interval(0, angleRadians) : new Interval(angleRadians, 0); + this.applicationId = applicationId; + this.units = units; + } + + /// + /// Initialise an `Arc` using the arc angle and the start and end points. + /// The radius, midpoint, start angle, and end angle will be calculated. + /// For now, this assumes 2D arcs on the XY plane + /// + /// The start point of the arc + /// The end point of the arc + /// The arc angle + /// Units (defaults to "m") + /// ID given to the arc in the authoring programme (defaults to null) + public Arc( + Point startPoint, + Point endPoint, + double angleRadians, + string units = Units.Meters, + string? applicationId = null + ) + : this( + new Plane(startPoint, new Vector(0, 0, 1), new Vector(1, 0, 0), new Vector(0, 1, 0), units), + startPoint, + endPoint, + angleRadians, + units, + applicationId + ) { } + + /// + /// Initialise an `Arc` using a plane, the arc angle and the start and end points. + /// The radius, midpoint, start angle, and end angle will be calculated. + /// + /// The Plane where the arc will be drawn + /// The start point of the arc + /// The end point of the arc + /// The arc angle + /// Units (defaults to "m") + /// ID given to the arc in the authoring programme (defaults to null) + public Arc( + Plane plane, + Point startPoint, + Point endPoint, + double angleRadians, + string units = Units.Meters, + string? applicationId = null + ) + { + // don't be annoying + if (angleRadians > Math.PI * 2) + { + throw new SpeckleException("Can't create an arc with an angle greater than 2pi"); + } + + if (startPoint == endPoint) + { + throw new SpeckleException("Can't create an arc where the start and end points are the same"); + } + + this.units = units; + this.startPoint = startPoint; + this.endPoint = endPoint; + this.angleRadians = angleRadians; + domain = angleRadians > 0 ? new Interval(0, angleRadians) : new Interval(angleRadians, 0); + this.applicationId = applicationId; + + // find chord and chord angle which may differ from the arc angle + var chordMidpoint = Point.Midpoint(startPoint, endPoint); + var chordLength = Point.Distance(startPoint, endPoint); + var chordAngle = angleRadians; + if (chordAngle > Math.PI) + { + chordAngle -= Math.PI * 2; + } + else if (chordAngle < -Math.PI) + { + chordAngle += Math.PI * 2; + } + // use the law of cosines for an isosceles triangle to get the radius + radius = chordLength / Math.Sqrt(2 - 2 * Math.Cos(chordAngle)); + + // find the chord vector then calculate the perpendicular vector which points to the centre + // which can be used to find the circle centre point + var dir = chordAngle < 0 ? -1 : 1; + var centreToChord = Math.Sqrt(Math.Pow((double)radius, 2) - Math.Pow(chordLength * 0.5, 2)); + var perp = Vector.CrossProduct(new Vector(endPoint - startPoint), plane.normal); + var circleCentre = chordMidpoint + new Point(perp.Unit() * centreToChord * -dir); + plane.origin = circleCentre; + + // use the perpendicular vector in the other direction (from the centre to the arc) to find the arc midpoint + midPoint = + angleRadians > Math.PI + ? chordMidpoint + new Point(perp.Unit() * ((double)radius + centreToChord) * -dir) + : chordMidpoint + new Point(perp.Unit() * ((double)radius - centreToChord) * dir); + + // find the start angle using trig (correcting for quadrant position) and add the arc angle to get the end angle + startAngle = Math.Tan((startPoint.y - circleCentre.y) / (startPoint.x - circleCentre.x)) % (2 * Math.PI); + if (startPoint.x > circleCentre.x && startPoint.y < circleCentre.y) // Q4 + { + startAngle *= -1; + } + else if (startPoint.x < circleCentre.x && startPoint.y < circleCentre.y) // Q3 + { + startAngle += Math.PI; + } + else if (startPoint.x < circleCentre.x && startPoint.y > circleCentre.y) // Q2 + { + startAngle = Math.PI - startAngle; + } + + endAngle = startAngle + angleRadians; + // Set the plane of this arc + this.plane = plane; + } + + /// + /// The radius of the + /// + public double? radius { get; set; } + + /// + /// The start angle of the based on it's + /// + public double? startAngle { get; set; } + + /// + /// The end angle of the based on it's + /// + public double? endAngle { get; set; } + + /// + /// The inner angle of the + /// + public double angleRadians { get; set; } + + /// + /// Gets or sets the plane of the . The plane origin is the center. + /// + public Plane plane { get; set; } + + /// + /// The start of the + /// + public Point startPoint { get; set; } + + /// + /// Gets or sets the point at 0.5 length. + /// + public Point midPoint { get; set; } + + /// + /// The end of the + /// + public Point endPoint { get; set; } + + /// + /// The units this object was specified in. + /// + public string units { get; set; } + + /// + public Interval domain { get; set; } = new(0, 0); + + /// + public double length { get; set; } + + /// + public double area { get; set; } + + /// + public Box bbox { get; set; } + + /// + public bool TransformTo(Transform transform, out Arc transformed) + { + startPoint.TransformTo(transform, out Point transformedStartPoint); + midPoint.TransformTo(transform, out Point transformedMidpoint); + endPoint.TransformTo(transform, out Point transformedEndPoint); + plane.TransformTo(transform, out Plane pln); + var arc = new Arc(pln, transformedStartPoint, transformedEndPoint, angleRadians, units) + { + midPoint = transformedMidpoint, + domain = domain + }; + transformed = arc; + return true; + } + + /// + public bool TransformTo(Transform transform, out ITransformable transformed) + { + var res = TransformTo(transform, out Arc arc); + transformed = arc; + return res; + } + + /// + /// Creates a flat list with the values of the + /// This is only used for serialisation purposes. + /// + /// A list of numbers representing the 's value + public List ToList() + { + var list = new List(); + list.Add(radius ?? 0); + list.Add(startAngle ?? 0); + list.Add(endAngle ?? 0); + list.Add(angleRadians); + list.Add(domain?.start ?? 0); + list.Add(domain?.end ?? 0); + + list.AddRange(plane.ToList()); + list.AddRange(startPoint.ToList()); + list.AddRange(midPoint.ToList()); + list.AddRange(endPoint.ToList()); + list.Add(Units.GetEncodingFromUnit(units)); + list.Insert(0, CurveTypeEncoding.Arc); + list.Insert(0, list.Count); + return list; + } + + /// + /// Creates a new instance based on a flat list of numerical values. + /// This is only used for deserialisation purposes. + /// + /// The input list should be the result of having called + /// A list of numbers + /// A new with the values assigned from the list. + public static Arc FromList(List list) + { + var arc = new Arc + { + radius = list[2], + startAngle = list[3], + endAngle = list[4], + angleRadians = list[5], + domain = new Interval(list[6], list[7]), + units = Units.GetUnitFromEncoding(list[list.Count - 1]), + plane = Plane.FromList(list.GetRange(8, 13)) + }; + arc.startPoint = Point.FromList(list.GetRange(21, 3), arc.units); + arc.midPoint = Point.FromList(list.GetRange(24, 3), arc.units); + arc.endPoint = Point.FromList(list.GetRange(27, 3), arc.units); + arc.plane.units = arc.units; + + return arc; + } +} diff --git a/src/Objects/Geometry/Autocad/AutocadPolycurve.cs b/src/Objects/Geometry/Autocad/AutocadPolycurve.cs new file mode 100644 index 00000000..a88c5147 --- /dev/null +++ b/src/Objects/Geometry/Autocad/AutocadPolycurve.cs @@ -0,0 +1,86 @@ +using System.Collections.Generic; +using Speckle.Core.Models; + +namespace Objects.Geometry.Autocad; + +/// +/// A curve that is comprised of line, arc and/or curve segments, representing the Autocad Polyline, Polyline2d, and Polyline3d classes. +/// +/// +/// and types will have only s and s in . +/// type will have only s in . +/// type will only have s in . +/// , , , and types will have only a single s in . +/// +public class AutocadPolycurve : Polycurve +{ + /// + /// Constructs a new empty instance. + /// + public AutocadPolycurve() { } + + /// + /// Gets or sets the raw coordinates of the vertices. + /// + /// + /// For Polylines, these are xy coordinates in the Object Coordinate System (OCS)/>. + /// For Polyline2d and Polyline3d types, these are xyz coordinates in the Global Coordinate System. fml. + /// + [DetachProperty, Chunkable(31250)] + public List value { get; set; } = new(); + + /// + /// The bulge factor at each vertex. Should be null for Polyline3d. + /// + /// + /// The bulge factor is used to indicate how much of an arc segment is present at this vertex. + /// The bulge factor is the tangent of one fourth the included angle for an arc segment, + /// made negative if the arc goes clockwise from the start point to the endpoint. + /// A bulge of 0 indicates a straight segment, and a bulge of 1 is a semicircle. + /// + public List? bulges { get; set; } + + /// + /// The tangent in radians at each vertex. Should be null for Polyline and Polyline3d. + /// + public List? tangents { get; set; } + + /// + /// The normal of the plane of the Autocad Polyline or Polyline2d. Should be null for Polyline3d. + /// + public Vector? normal { get; set; } + + /// + /// The distance from the plane to the origin of the Autocad Polyline or Polyline2d. Should be null for Polyline3d. + /// + public double? elevation { get; set; } + + public AutocadPolyType polyType { get; set; } +} + +/// +/// Represents the type of a Autocad Polyline. +/// +public enum AutocadPolyType +{ + /// Polyline type is not known + Unknown, + + /// Polyline type is the Autocad Polyline class + Light, + + Simple2d, + + Simple3d, + + /// The Autocad Polyline2d fit curve poly type. Constructed with pairs of arcs with continuous tangents. + FitCurve2d, + + CubicSpline2d, + + CubicSpline3d, + + QuadSpline2d, + + QuadSpline3d, +} diff --git a/src/Objects/Geometry/Box.cs b/src/Objects/Geometry/Box.cs new file mode 100644 index 00000000..f4bdab9f --- /dev/null +++ b/src/Objects/Geometry/Box.cs @@ -0,0 +1,77 @@ +using Objects.Primitive; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.Geometry; + +/// +/// Represents a 3-dimensional box oriented on a plane. +/// +public class Box : Base, IHasVolume, IHasArea, IHasBoundingBox +{ + /// + public Box() { } + + /// + /// Constructs a new instance with a and coordinate intervals for all 3 axis {x , y , z} + /// + /// The plane the box will be oriented by. + /// The range of coordinates (min, max) for the X axis + /// The range of coordinates (min, max) for the Y axis + /// The range of coordinates (min, max) for the Z axis + /// The units the coordinates are in. + /// The unique application ID of the object. + public Box( + Plane basePlane, + Interval xSize, + Interval ySize, + Interval zSize, + string units = Units.Meters, + string? applicationId = null + ) + { + this.basePlane = basePlane; + this.xSize = xSize; + this.ySize = ySize; + this.zSize = zSize; + this.applicationId = applicationId; + this.units = units; + } + + /// + /// Gets or sets the plane that defines the orientation of the + /// + public Plane basePlane { get; set; } + + /// + /// Gets or sets the that defines the min and max coordinate in the X direction + /// + public Interval xSize { get; set; } + + /// + /// Gets or sets the that defines the min and max coordinate in the Y direction + /// + public Interval ySize { get; set; } + + /// + /// Gets or sets the that defines the min and max coordinate in the Y direction + /// + public Interval zSize { get; set; } + + /// + /// The units this object's coordinates are in. + /// + /// + /// This should be one of + /// + public string units { get; set; } + + /// + public double area { get; set; } + + /// + public Box bbox { get; } + + /// + public double volume { get; set; } +} diff --git a/src/Objects/Geometry/Brep.cs b/src/Objects/Geometry/Brep.cs new file mode 100644 index 00000000..324cc591 --- /dev/null +++ b/src/Objects/Geometry/Brep.cs @@ -0,0 +1,710 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Runtime.Serialization; +using Objects.Other; +using Objects.Primitive; +using Speckle.Core.Kits; +using Speckle.Core.Models; +using Speckle.Newtonsoft.Json; + +namespace Objects.Geometry; + +/// +/// Represents a "Boundary Representation" Solid +/// +public class Brep : Base, IHasArea, IHasVolume, IHasBoundingBox, ITransformable, IDisplayValue> +{ + /// + /// Initializes a new instance of class. + /// + public Brep() + { + Surfaces = new List(); + Curve2D = new List(); + Curve3D = new List(); + + Vertices = new List(); + Edges = new List(); + Loops = new List(); + Trims = new List(); + Faces = new List(); + + IsClosed = false; + Orientation = BrepOrientation.None; + } + + public Brep(string provenance, Mesh displayValue, string units = Units.Meters, string? applicationId = null) + : this(provenance, new List { displayValue }, units, applicationId) { } + + public Brep(string provenance, List displayValues, string units = Units.Meters, string? applicationId = null) + : this() + { + this.provenance = provenance; + displayValue = displayValues; + this.applicationId = applicationId; + this.units = units; + } + + public string provenance { get; set; } + + /// + /// The unit's this object's coordinates are in. + /// + /// + /// This should be one of + /// + public string units { get; set; } + + /// + /// Gets or sets the list of surfaces in this instance. + /// + [JsonIgnore] + public List Surfaces { get; set; } + + /// + /// Gets or sets the flat list of numbers representing the 's surfaces. + /// + [DetachProperty, SchemaIgnore, Chunkable(31250)] + public List SurfacesValue + { + get + { + var list = new List(); + if (Surfaces != null) + { + foreach (var srf in Surfaces) + { + list.AddRange(srf.ToList()); + } + } + + return list; + } + set + { + if (value == null) + { + return; + } + + var list = new List(); + var done = false; + var currentIndex = 0; + while (!done) + { + var len = (int)value[currentIndex]; + list.Add(Surface.FromList(value.GetRange(currentIndex + 1, len))); + currentIndex += len + 1; + done = currentIndex >= value.Count; + } + + Surfaces = list; + } + } + + /// + /// Gets or sets the list of 3-dimensional curves in this instance. + /// + [JsonIgnore] + public List Curve3D { get; set; } + + /// + /// Gets or sets the flat list of numbers representing the 's 3D curves. + /// + /// + /// This is only used for the class serialisation/deserialisation. You should use instead. + /// + [DetachProperty, SchemaIgnore, Chunkable(31250)] + public List Curve3DValues + { + get => CurveArrayEncodingExtensions.ToArray(Curve3D); + set + { + if (value != null) + { + Curve3D = CurveArrayEncodingExtensions.FromArray(value); + } + } + } + + /// + /// Gets or sets the list of 2-dimensional UV curves in this instance. + /// + [JsonIgnore] + public List Curve2D { get; set; } + + /// + /// Gets or sets the flat list of numbers representing the 's 2D curves. + /// + /// + /// This is only used for the class serialisation/deserialisation. You should use instead. + /// + [DetachProperty, SchemaIgnore, Chunkable(31250)] + public List Curve2DValues + { + get => CurveArrayEncodingExtensions.ToArray(Curve2D); + set + { + if (value != null) + { + Curve2D = CurveArrayEncodingExtensions.FromArray(value); + } + } + } + + /// + /// Gets or sets the list of vertices in this instance. + /// + [JsonIgnore] + public List Vertices { get; set; } + + /// + /// Gets or sets the flat list of numbers representing the 's vertices. + /// + /// + /// This is only used for the class serialisation/deserialisation. You should use instead. + /// + [DetachProperty, SchemaIgnore, Chunkable(31250)] + public List VerticesValue + { + get + { + var list = new List(); + list.Add(Units.GetEncodingFromUnit(units)); + foreach (var vertex in Vertices) + { + list.AddRange(vertex.ToList()); + } + + return list; + } + set + { + if (value != null) + { + var units = value.Count % 3 == 0 ? Units.None : Units.GetUnitFromEncoding(value[0]); + for (int i = value.Count % 3 == 0 ? 0 : 1; i < value.Count; i += 3) + { + Vertices.Add(new Point(value[i], value[i + 1], value[i + 2], units)); + } + } + } + } + + /// + /// Gets or sets the list of edges in this instance. + /// + [JsonIgnore] + public List Edges { get; set; } + + /// + /// Gets or sets the flat list of numbers representing the 's edges. + /// + /// + /// This is only used for the class serialisation/deserialisation. You should use instead. + /// + [DetachProperty, SchemaIgnore, Chunkable(62500)] + public List EdgesValue + { + get => + Edges + .SelectMany(e => + { + var ints = new List(); + ints.Add(e.Curve3dIndex); + ints.Add(e.StartIndex); + ints.Add(e.EndIndex); + ints.Add(Convert.ToInt32(e.ProxyCurveIsReversed)); + ints.Add(e.Domain.start ?? 0); + ints.Add(e.Domain.end ?? 1); + ints.AddRange(e.TrimIndices.Select(Convert.ToDouble).Cast()); + return ints.Prepend(ints.Count); + }) + .ToList(); + set + { + Edges = new List(); + if (value == null || value.Count == 0) + { + return; + } + + var i = 0; + while (i < value.Count) + { + int n = Convert.ToInt32(value[i]); + + var loopValues = value.GetRange(i + 1, n); + var curve3dIndex = Convert.ToInt32(loopValues[0]); + var startIndex = Convert.ToInt32(loopValues[1]); + var endIndex = Convert.ToInt32(loopValues[2]); + var proxyReversed = Convert.ToBoolean(loopValues[3]); + var domainStart = loopValues[4]; + var domainEnd = loopValues[5]; + Interval domain = + domainStart.HasValue && domainEnd.HasValue ? new(domainStart.Value, domainEnd.Value) : new(0, 1); + + var trimIndices = loopValues.GetRange(6, loopValues.Count - 6).Select(d => Convert.ToInt32(d)).ToArray(); + + var edge = new BrepEdge(this, curve3dIndex, trimIndices, startIndex, endIndex, proxyReversed, domain); + Edges.Add(edge); + i += n + 1; + } + } + } + + /// + /// Gets or sets the list of closed UV loops in this instance. + /// + [JsonIgnore] + public List Loops { get; set; } + + /// + /// Gets or sets the flat list of numbers representing the 's loops. + /// + /// + /// This is only used for the class serialisation/deserialisation. You should use instead. + /// + [DetachProperty, SchemaIgnore, Chunkable(62500)] + public List LoopsValue + { + get => + Loops + .SelectMany(l => + { + var ints = new List(); + ints.Add(l.FaceIndex); + ints.Add((int)l.Type); + ints.AddRange(l.TrimIndices); + return ints.Prepend(ints.Count); + }) + .ToList(); + set + { + Loops = new List(); + if (value == null || value.Count == 0) + { + return; + } + + var i = 0; + while (i < value.Count) + { + int n = value[i]; + + var loopValues = value.GetRange(i + 1, n); + var faceIndex = loopValues[0]; + var type = (BrepLoopType)loopValues[1]; + var trimIndices = loopValues.GetRange(2, loopValues.Count - 2); + var loop = new BrepLoop(this, faceIndex, trimIndices, type); + Loops.Add(loop); + i += n + 1; + } + } + } + + /// + /// Gets or sets the list of UV trim segments for each surface in this instance. + /// + [JsonIgnore] + public List Trims { get; set; } + + /// + /// Gets or sets the flat list of numbers representing the 's trims. + /// + /// + /// This is only used for the class serialisation/deserialisation. You should use instead. + /// + [DetachProperty, SchemaIgnore, Chunkable(62500)] + public List TrimsValue + { + get + { + List list = new(); + foreach (var trim in Trims) + { + list.Add(trim.EdgeIndex); + list.Add(trim.StartIndex); + list.Add(trim.EndIndex); + list.Add(trim.FaceIndex); + list.Add(trim.LoopIndex); + list.Add(trim.CurveIndex); + list.Add(trim.IsoStatus); + list.Add((int)trim.TrimType); + list.Add(trim.IsReversed ? 1 : 0); + } + + return list; + } + set + { + if (value == null) + { + return; + } + + var list = new List(); + for (int i = 0; i < value.Count; i += 9) + { + var trim = new BrepTrim + { + EdgeIndex = value[i], + StartIndex = value[i + 1], + EndIndex = value[i + 2], + FaceIndex = value[i + 3], + LoopIndex = value[i + 4], + CurveIndex = value[i + 5], + IsoStatus = value[i + 6], + TrimType = (BrepTrimType)value[i + 7], + IsReversed = value[i + 8] == 1 + }; + list.Add(trim); + } + + Trims = list; + } + } + + /// + /// Gets or sets the list of faces in this instance. + /// + [JsonIgnore] + public List Faces { get; set; } + + /// + /// Gets or sets the flat list of numbers representing the 's faces. + /// + /// + /// This is only used for the class serialisation/deserialisation. You should use instead. + /// + [DetachProperty, SchemaIgnore, Chunkable(62500)] + public List FacesValue + { + get => + Faces + .SelectMany(f => + { + var ints = new List(); + ints.Add(f.SurfaceIndex); + ints.Add(f.OuterLoopIndex); + ints.Add(f.OrientationReversed ? 1 : 0); + ints.AddRange(f.LoopIndices); + return ints.Prepend(ints.Count); + }) + .ToList(); + set + { + Faces = new List(); + if (value == null || value.Count == 0) + { + return; + } + + var i = 0; + while (i < value.Count) + { + int n = value[i]; + + var faceValues = value.GetRange(i + 1, n); + var surfIndex = faceValues[0]; + var outerLoopIndex = faceValues[1]; + var orientationIsReversed = faceValues[2] == 1; + var loopIndices = faceValues.GetRange(3, faceValues.Count - 3); + var face = new BrepFace(this, surfIndex, loopIndices, outerLoopIndex, orientationIsReversed); + Faces.Add(face); + i += n + 1; + } + } + } + + /// + /// Gets or sets if this instance is closed or not. + /// + public bool IsClosed { get; set; } + + /// + /// Gets or sets the list of surfaces in this instance. + /// + public BrepOrientation Orientation { get; set; } + + /// + [DetachProperty] + public List displayValue { get; set; } + + /// + public double area { get; set; } + + /// + public Box bbox { get; set; } + + /// + public double volume { get; set; } + + /// + public bool TransformTo(Transform transform, out Brep transformed) + { + // transform display values + var displayValues = new List(displayValue.Count); + foreach (Mesh v in displayValue) + { + v.TransformTo(transform, out Mesh mesh); + displayValues.Add(mesh); + } + + // transform surfaces + var surfaces = new List(Surfaces.Count); + foreach (var srf in Surfaces) + { + srf.TransformTo(transform, out Surface surface); + surfaces.Add(surface); + } + + // transform curve3d + var success3D = true; + var transformedCurve3D = new List(); + foreach (var curve in Curve3D) + { + if (curve is ITransformable c) + { + c.TransformTo(transform, out ITransformable tc); + transformedCurve3D.Add((ICurve)tc); + } + else + { + success3D = false; + } + } + + // transform vertices + var transformedVertices = new List(); + foreach (var vertex in Vertices) + { + vertex.TransformTo(transform, out Point transformedVertex); + transformedVertices.Add(transformedVertex); + } + + transformed = new Brep + { + provenance = provenance, + units = units, + displayValue = displayValues, + Surfaces = surfaces, + Curve3D = transformedCurve3D, + Curve2D = new List(Curve2D), + Vertices = transformedVertices, + Edges = new List(Edges.Count), + Loops = new List(Loops.Count), + Trims = new List(Trims.Count), + Faces = new List(Faces.Count), + IsClosed = IsClosed, + Orientation = Orientation, + applicationId = applicationId ?? id + }; + + foreach (var e in Edges) + { + transformed.Edges.Add( + new BrepEdge( + transformed, + e.Curve3dIndex, + e.TrimIndices, + e.StartIndex, + e.EndIndex, + e.ProxyCurveIsReversed, + e.Domain + ) + ); + } + + foreach (var l in Loops) + { + transformed.Loops.Add(new BrepLoop(transformed, l.FaceIndex, l.TrimIndices, l.Type)); + } + + foreach (var t in Trims) + { + transformed.Trims.Add( + new BrepTrim( + transformed, + t.EdgeIndex, + t.FaceIndex, + t.LoopIndex, + t.CurveIndex, + t.IsoStatus, + t.TrimType, + t.IsReversed, + t.StartIndex, + t.EndIndex + ) + ); + } + + foreach (var f in Faces) + { + transformed.Faces.Add( + new BrepFace(transformed, f.SurfaceIndex, f.LoopIndices, f.OuterLoopIndex, f.OrientationReversed) + ); + } + + return success3D; + } + + /// + public bool TransformTo(Transform transform, out ITransformable transformed) + { + var res = TransformTo(transform, out Brep brep); + transformed = brep; + return res; + } + + [OnDeserialized] + internal void OnDeserialized(StreamingContext context) + { + Surfaces.ForEach(s => s.units = units); + + for (var i = 0; i < Edges.Count; i++) + { + var e = Edges[i]; + lock (e) + { + if (e.Brep != null) + { + e = new BrepEdge( + this, + e.Curve3dIndex, + e.TrimIndices, + e.StartIndex, + e.EndIndex, + e.ProxyCurveIsReversed, + e.Domain + ); + Edges[i] = e; + } + else + { + e.Brep = this; + } + } + } + + for (var i = 0; i < Loops.Count; i++) + { + var l = Loops[i]; + lock (l) + { + if (l.Brep != null) + { + l = new BrepLoop(this, l.FaceIndex, l.TrimIndices, l.Type); + Loops[i] = l; + } + else + { + l.Brep = this; + } + } + } + + for (var i = 0; i < Trims.Count; i++) + { + var t = Trims[i]; + lock (t) + { + if (t.Brep != null) + { + t = new BrepTrim( + this, + t.EdgeIndex, + t.FaceIndex, + t.LoopIndex, + t.CurveIndex, + t.IsoStatus, + t.TrimType, + t.IsReversed, + t.StartIndex, + t.EndIndex + ); + Trims[i] = t; + } + else + { + t.Brep = this; + } + } + } + + for (var i = 0; i < Faces.Count; i++) + { + var f = Faces[i]; + lock (f) + { + if (f.Brep != null) + { + f = new BrepFace(this, f.SurfaceIndex, f.LoopIndices, f.OuterLoopIndex, f.OrientationReversed); + Faces[i] = f; + } + else + { + f.Brep = this; + } + } + } + } +} + +/// +/// Represents the orientation of a +/// +public enum BrepOrientation +{ + /// Brep has no specific orientation + None = 0, + + /// Brep faces inward + Inward = -1, + + /// Brep faces outward + Outward = 1, + + /// Orientation is not known + Unknown = 2 +} + +/// +/// Represents the type of a loop in a 's face. +/// +public enum BrepLoopType +{ + /// Loop type is not known + Unknown, + + /// Loop is the outer loop of a face + Outer, + + /// Loop is an inner loop of a face + Inner, + + /// Loop is a closed curve with no area. + Slit, + + /// Loop represents a curve on a surface + CurveOnSurface, + + /// Loop is collapsed to a point. + PointOnSurface +} + +/// +/// Represents the type of a trim in a 's loop. +/// +public enum BrepTrimType +{ + Unknown, + Boundary, + Mated, + Seam, + Singular, + CurveOnSurface, + PointOnSurface, + Slit +} diff --git a/src/Objects/Geometry/BrepEdge.cs b/src/Objects/Geometry/BrepEdge.cs new file mode 100644 index 00000000..faffb323 --- /dev/null +++ b/src/Objects/Geometry/BrepEdge.cs @@ -0,0 +1,58 @@ +using System.Collections.Generic; +using System.Linq; +using Objects.Primitive; +using Speckle.Core.Models; +using Speckle.Newtonsoft.Json; + +namespace Objects.Geometry; + +/// +/// Represents an edge of the . +/// +public class BrepEdge : Base +{ + public BrepEdge() { } + + public BrepEdge( + Brep brep, + int curve3dIndex, + int[] trimIndices, + int startIndex, + int endIndex, + bool proxyCurvedIsReversed, + Interval? domain + ) + { + Brep = brep; + Curve3dIndex = curve3dIndex; + TrimIndices = trimIndices; + StartIndex = startIndex; + EndIndex = endIndex; + ProxyCurveIsReversed = proxyCurvedIsReversed; + Domain = domain ?? new(0, 1); + } + + [JsonIgnore] + public Brep Brep { get; set; } + + public int Curve3dIndex { get; set; } + public int[] TrimIndices { get; set; } + public int StartIndex { get; set; } + public int EndIndex { get; set; } + + public bool ProxyCurveIsReversed { get; set; } + + public Interval Domain { get; set; } = new(0, 1); + + [JsonIgnore] + public Point StartVertex => Brep.Vertices[StartIndex]; + + [JsonIgnore] + public Point EndVertex => Brep.Vertices[EndIndex]; + + [JsonIgnore] + public IEnumerable Trims => TrimIndices.Select(i => Brep.Trims[i]); + + [JsonIgnore] + public ICurve Curve => Brep.Curve3D[Curve3dIndex]; +} diff --git a/src/Objects/Geometry/BrepFace.cs b/src/Objects/Geometry/BrepFace.cs new file mode 100644 index 00000000..6f9d52a7 --- /dev/null +++ b/src/Objects/Geometry/BrepFace.cs @@ -0,0 +1,40 @@ +using System.Collections.Generic; +using System.Linq; +using Speckle.Core.Models; +using Speckle.Newtonsoft.Json; + +namespace Objects.Geometry; + +/// +/// Represents a face on a +/// +public class BrepFace : Base +{ + public BrepFace() { } + + public BrepFace(Brep brep, int surfaceIndex, List loopIndices, int outerLoopIndex, bool orientationReversed) + { + Brep = brep; + SurfaceIndex = surfaceIndex; + LoopIndices = loopIndices; + OuterLoopIndex = outerLoopIndex; + OrientationReversed = orientationReversed; + } + + [JsonIgnore] + public Brep Brep { get; set; } + + public int SurfaceIndex { get; set; } + public List LoopIndices { get; set; } + public int OuterLoopIndex { get; set; } + public bool OrientationReversed { get; set; } + + [JsonIgnore] + public BrepLoop OuterLoop => Brep.Loops[OuterLoopIndex]; + + [JsonIgnore] + public Surface Surface => Brep.Surfaces[SurfaceIndex]; + + [JsonIgnore] + public List Loops => LoopIndices.Select(i => Brep.Loops[i]).ToList(); +} diff --git a/src/Objects/Geometry/BrepLoop.cs b/src/Objects/Geometry/BrepLoop.cs new file mode 100644 index 00000000..b84246ed --- /dev/null +++ b/src/Objects/Geometry/BrepLoop.cs @@ -0,0 +1,35 @@ +using System.Collections.Generic; +using System.Linq; +using Speckle.Core.Models; +using Speckle.Newtonsoft.Json; + +namespace Objects.Geometry; + +/// +/// Represents a UV Trim Closed Loop on one of the 's surfaces. +/// +public class BrepLoop : Base +{ + public BrepLoop() { } + + public BrepLoop(Brep brep, int faceIndex, List trimIndices, BrepLoopType type) + { + Brep = brep; + FaceIndex = faceIndex; + TrimIndices = trimIndices; + Type = type; + } + + [JsonIgnore] + public Brep Brep { get; set; } + + public int FaceIndex { get; set; } + public List TrimIndices { get; set; } + public BrepLoopType Type { get; set; } + + [JsonIgnore] + public BrepFace Face => Brep.Faces[FaceIndex]; + + [JsonIgnore] + public List Trims => TrimIndices.Select(i => Brep.Trims[i]).ToList(); +} diff --git a/src/Objects/Geometry/BrepTrim.cs b/src/Objects/Geometry/BrepTrim.cs new file mode 100644 index 00000000..cadc268f --- /dev/null +++ b/src/Objects/Geometry/BrepTrim.cs @@ -0,0 +1,65 @@ +using Objects.Primitive; +using Speckle.Core.Models; +using Speckle.Newtonsoft.Json; + +namespace Objects.Geometry; + +/// +/// Represents a UV Trim curve for one of the 's surfaces. +/// +public class BrepTrim : Base +{ + public BrepTrim() { } + + public BrepTrim( + Brep brep, + int edgeIndex, + int faceIndex, + int loopIndex, + int curveIndex, + int isoStatus, + BrepTrimType trimType, + bool reversed, + int startIndex, + int endIndex + ) + { + Brep = brep; + EdgeIndex = edgeIndex; + FaceIndex = faceIndex; + LoopIndex = loopIndex; + CurveIndex = curveIndex; + IsoStatus = isoStatus; + TrimType = trimType; + IsReversed = reversed; + StartIndex = startIndex; + EndIndex = endIndex; + } + + [JsonIgnore] + public Brep Brep { get; set; } + + public int EdgeIndex { get; set; } + public int StartIndex { get; set; } + public int EndIndex { get; set; } + public int FaceIndex { get; set; } + public int LoopIndex { get; set; } + public int CurveIndex { get; set; } + public int IsoStatus { get; set; } + public BrepTrimType TrimType { get; set; } + public bool IsReversed { get; set; } + + public Interval Domain { get; set; } = new(0, 1); + + [JsonIgnore] + public BrepFace Face => Brep.Faces[FaceIndex]; + + [JsonIgnore] + public BrepLoop Loop => Brep.Loops[LoopIndex]; + + [JsonIgnore] + public BrepEdge? Edge => EdgeIndex != -1 ? Brep.Edges[EdgeIndex] : null; + + [JsonIgnore] + public ICurve Curve2d => Brep.Curve2D[CurveIndex]; +} diff --git a/src/Objects/Geometry/Circle.cs b/src/Objects/Geometry/Circle.cs new file mode 100644 index 00000000..56fd4b16 --- /dev/null +++ b/src/Objects/Geometry/Circle.cs @@ -0,0 +1,98 @@ +using System.Collections.Generic; +using Objects.Primitive; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.Geometry; + +/// +/// Represents a circular curve based on a base and a as radius. +/// +public class Circle : Base, ICurve, IHasArea, IHasBoundingBox +{ + /// + /// Constructs an empty instance. + /// + public Circle() { } + + /// + /// Constructs a new instance. + /// + /// The plane where the circle lies + /// The radius of the circle + /// The units the circle is modeled in + /// The unique ID of this circle in a specific application + public Circle(Plane plane, double radius, string units = Units.Meters, string? applicationId = null) + { + this.plane = plane; + this.radius = radius; + this.applicationId = applicationId; + this.units = units; + } + + /// + /// The radius of the circle + /// + public double? radius { get; set; } + + /// + /// The the circle lies in. + /// + public Plane plane { get; set; } + + /// + /// The units this object was modeled in. + /// + public string units { get; set; } + + /// + public Interval domain { get; set; } = new(0, 1); + + /// + public double length { get; set; } + + //public Point center { get; set; } + + /// + public double area { get; set; } + + /// + public Box bbox { get; set; } + + /// + /// Returns the coordinates of this as a list of numbers + /// + /// A list of values representing the + public List ToList() + { + var list = new List(); + + list.Add(radius ?? 0); + list.Add(domain?.start ?? 0); + list.Add(domain?.end ?? 1); + list.AddRange(plane.ToList()); + + list.Add(Units.GetEncodingFromUnit(units)); + list.Insert(0, CurveTypeEncoding.Circle); + list.Insert(0, list.Count); + return list; + } + + /// + /// Creates a new based on a list of coordinates and the unit they're drawn in. + /// + /// The list of values representing this + /// A new with the provided values. + public static Circle FromList(List list) + { + var circle = new Circle + { + radius = list[2], + domain = new Interval(list[3], list[4]), + plane = Plane.FromList(list.GetRange(5, 13)), + units = Units.GetUnitFromEncoding(list[list.Count - 1]) + }; + + return circle; + } +} diff --git a/src/Objects/Geometry/ControlPoint.cs b/src/Objects/Geometry/ControlPoint.cs new file mode 100644 index 00000000..82f02471 --- /dev/null +++ b/src/Objects/Geometry/ControlPoint.cs @@ -0,0 +1,76 @@ +using System; +using System.Collections.Generic; +using Objects.Other; +using Speckle.Newtonsoft.Json; + +namespace Objects.Geometry; + +public class ControlPoint : Point, ITransformable +{ + public ControlPoint() { } + + public ControlPoint(double x, double y, double z, string units, string? applicationId = null) + : base(x, y, z, units, applicationId) + { + weight = 1; + } + + public ControlPoint(double x, double y, double z, double w, string units, string? applicationId = null) + : base(x, y, z, units, applicationId) + { + weight = w; + } + + /// + /// OBSOLETE - This is just here for backwards compatibility. + /// + [ + JsonProperty(NullValueHandling = NullValueHandling.Ignore), + Obsolete("Access coordinates using XYZ and weight fields", true) + ] + private new List value + { +#pragma warning disable CS8603 // Possible null reference return. Reason: obsolete. + get => null; +#pragma warning restore CS8603 // Possible null reference return. Reason: obsolete. + set + { + x = value[0]; + y = value[1]; + z = value[2]; + weight = value.Count > 3 ? value[3] : 1; + } + } + + public double weight { get; set; } + + public bool TransformTo(Transform transform, out ControlPoint transformed) + { + TransformTo(transform, out Point transformedPoint); + transformed = new ControlPoint( + transformedPoint.x, + transformedPoint.y, + transformedPoint.z, + weight, + units, + applicationId + ); + return true; + } + + public override string ToString() + { + return $"{{{x},{y},{z},{weight}}}"; + } + + public void Deconstruct(out double x, out double y, out double z, out double weight) + { + Deconstruct(out x, out y, out z, out weight, out _); + } + + public void Deconstruct(out double x, out double y, out double z, out double weight, out string? units) + { + Deconstruct(out x, out y, out z, out units); + weight = this.weight; + } +} diff --git a/src/Objects/Geometry/Curve.cs b/src/Objects/Geometry/Curve.cs new file mode 100644 index 00000000..daf8c62e --- /dev/null +++ b/src/Objects/Geometry/Curve.cs @@ -0,0 +1,203 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using Objects.Other; +using Objects.Primitive; +using Speckle.Core.Kits; +using Speckle.Core.Logging; +using Speckle.Core.Models; + +namespace Objects.Geometry; + +public class Curve : Base, ICurve, IHasBoundingBox, IHasArea, ITransformable, IDisplayValue +{ + /// + /// Constructs an empty instance. + /// + public Curve() { } + + /// + /// Constructs a new instance based on displayValue a polyline. + /// + /// The polyline that will be this curve's + /// The units this curve is be modelled in + /// The unique ID of this curve in a specific application + public Curve(Polyline poly, string units = Units.Meters, string? applicationId = null) + { + displayValue = poly; + this.applicationId = applicationId; + this.units = units; + } + + public int degree { get; set; } + + public bool periodic { get; set; } + + /// + /// "True" if weights differ, "False" if weights are the same. + /// + public bool rational { get; set; } + + [DetachProperty, Chunkable(31250)] + public List points { get; set; } + + /// + /// Gets or sets the weights for this . Use a default value of 1 for unweighted points. + /// + [DetachProperty, Chunkable(31250)] + public List weights { get; set; } + + /// + /// Gets or sets the knots for this . Count should be equal to count + + 1. + /// + [DetachProperty, Chunkable(31250)] + public List knots { get; set; } + + public bool closed { get; set; } + + /// + /// The units this object was specified in. + /// + public string units { get; set; } + + /// + public Interval domain { get; set; } = new Interval(0, 1); + + /// + public double length { get; set; } + + /// + [DetachProperty] + public Polyline displayValue { get; set; } + + /// + public double area { get; set; } + + /// + public Box bbox { get; set; } + + /// + public bool TransformTo(Transform transform, out Curve transformed) + { + // transform points + var transformedPoints = new List(); + foreach (var point in GetPoints()) + { + point.TransformTo(transform, out Point transformedPoint); + transformedPoints.Add(transformedPoint); + } + + var result = displayValue.TransformTo(transform, out ITransformable polyline); + transformed = new Curve + { + degree = degree, + periodic = periodic, + rational = rational, + points = transformedPoints.SelectMany(o => o.ToList()).ToList(), + weights = weights, + knots = knots, + displayValue = (Polyline)polyline, + closed = closed, + units = units, + applicationId = applicationId, + domain = domain != null ? new Interval { start = domain.start, end = domain.end } : new Interval(0, 1) + }; + + return result; + } + + /// + public bool TransformTo(Transform transform, out ITransformable transformed) + { + var res = TransformTo(transform, out Curve curve); + transformed = curve; + return res; + } + + /// as list of s + /// when list is malformed + public List GetPoints() + { + if (points.Count % 3 != 0) + { + throw new SpeckleException( + $"{nameof(Curve)}.{nameof(points)} list is malformed: expected length to be multiple of 3" + ); + } + + var pts = new List(points.Count / 3); + for (int i = 2; i < points.Count; i += 3) + { + pts.Add(new Point(points[i - 2], points[i - 1], points[i], units)); + } + + return pts; + } + + /// + /// Returns the values of this as a list of numbers + /// + /// A list of values representing the + public List ToList() + { + var list = new List(); + var curve = this; + list.Add(curve.degree); // 0 + list.Add(curve.periodic ? 1 : 0); // 1 + list.Add(curve.rational ? 1 : 0); // 2 + list.Add(curve.closed ? 1 : 0); // 3 + list.Add(curve.domain?.start ?? 0); // 4 + list.Add(curve.domain?.end ?? 1); // 5 + + list.Add(curve.points.Count); // 6 + list.Add(curve.weights.Count); // 7 + list.Add(curve.knots.Count); // 8 + + list.AddRange(curve.points); // 9 onwards + list.AddRange(curve.weights); + list.AddRange(curve.knots); + + list.Add(Units.GetEncodingFromUnit(units)); + list.Insert(0, CurveTypeEncoding.Curve); + list.Insert(0, list.Count); + return list; + } + + /// + /// Creates a new based on a list of coordinates and the unit they're drawn in. + /// + /// The list of values representing this + /// A new with the provided values. + public static Curve FromList(List list) + { + if (list[0] != list.Count - 1) + { + throw new Exception($"Incorrect length. Expected {list[0]}, got {list.Count}."); + } + + if (list[1] != CurveTypeEncoding.Curve) + { + throw new Exception($"Wrong curve type. Expected {CurveTypeEncoding.Curve}, got {list[1]}."); + } + + var curve = new Curve + { + degree = (int)list[2], + periodic = list[3] == 1, + rational = list[4] == 1, + closed = list[5] == 1, + domain = new Interval(list[6], list[7]) + }; + + var pointsCount = (int)list[8]; + var weightsCount = (int)list[9]; + var knotsCount = (int)list[10]; + + curve.points = list.GetRange(11, pointsCount); + curve.weights = list.GetRange(11 + pointsCount, weightsCount); + curve.knots = list.GetRange(11 + pointsCount + weightsCount, knotsCount); + + curve.units = Units.GetUnitFromEncoding(list[list.Count - 1]); + return curve; + } +} diff --git a/src/Objects/Geometry/Ellipse.cs b/src/Objects/Geometry/Ellipse.cs new file mode 100644 index 00000000..b248e145 --- /dev/null +++ b/src/Objects/Geometry/Ellipse.cs @@ -0,0 +1,121 @@ +using System.Collections.Generic; +using Objects.Primitive; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.Geometry; + +public class Ellipse : Base, ICurve, IHasArea +{ + /// + /// Initializes a new instance of the class. + /// This constructor is only intended for serialization/deserialization purposes. + /// Use other constructors to manually create ellipses. + /// + public Ellipse() { } + + /// + /// Initializes a new instance of the class. + /// + /// The plane to draw the ellipse in. + /// First radius of the ellipse. + /// Second radius of the ellipse. + /// Application ID, defaults to null. + public Ellipse(Plane plane, double radius1, double radius2, string units = Units.Meters, string? applicationId = null) + : this(plane, radius1, radius2, new Interval(0, 1), null, units) { } + + /// + /// Initializes a new instance of the class. + /// + /// The plane to draw the ellipse in. + /// First radius of the ellipse. + /// Second radius of the ellipse. + /// The curve's internal parametrization domain. + /// The domain to trim the curve with. Will be null if the ellipse is not trimmed. + /// Application ID, defaults to null. + public Ellipse( + Plane plane, + double radius1, + double radius2, + Interval domain, + Interval? trimDomain, + string units = Units.Meters, + string? applicationId = null + ) + { + this.plane = plane; + firstRadius = radius1; + secondRadius = radius2; + this.domain = domain; + this.trimDomain = trimDomain; + this.applicationId = applicationId; + this.units = units; + } + + /// + /// Gets or sets the first radius of the . This is usually the major radius. + /// + public double? firstRadius { get; set; } + + /// + /// Gets or sets the second radius of the . This is usually the minor radius. + /// + public double? secondRadius { get; set; } + + /// + /// Gets or sets the plane to draw this ellipse in. + /// + public Plane plane { get; set; } + + /// + /// Gets or set the domain interval to trim this with. + /// + public Interval? trimDomain { get; set; } + + /// + public Box bbox { get; set; } + + public string units { get; set; } + + /// + /// Gets or sets the domain interval for this . + /// + public Interval domain { get; set; } = new(0, 0); + + /// + public double length { get; set; } + + //public Point center { get; set; } + + /// + public double area { get; set; } + + public List ToList() + { + var list = new List(); + list.Add(firstRadius ?? 0); + list.Add(secondRadius ?? 0); + list.Add(domain?.start ?? 0); + list.Add(domain?.end ?? 0); + + list.AddRange(plane.ToList()); + + list.Add(Units.GetEncodingFromUnit(units)); + list.Insert(0, CurveTypeEncoding.Ellipse); + list.Insert(0, list.Count); + return list; + } + + public static Ellipse FromList(List list) + { + var ellipse = new Ellipse + { + firstRadius = list[2], + secondRadius = list[3], + domain = new Interval(list[4], list[5]), + plane = Plane.FromList(list.GetRange(6, 13)), + units = Units.GetUnitFromEncoding(list[list.Count - 1]) + }; + return ellipse; + } +} diff --git a/src/Objects/Geometry/Extrusion.cs b/src/Objects/Geometry/Extrusion.cs new file mode 100644 index 00000000..aaa382f1 --- /dev/null +++ b/src/Objects/Geometry/Extrusion.cs @@ -0,0 +1,41 @@ +using System; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.Geometry; + +[Obsolete("Unused")] +[SuppressMessage("Design", "CA1051:Do not declare visible instance fields", Justification = "Obsolete")] +public class Extrusion : Base, IHasVolume, IHasArea, IHasBoundingBox +{ + [SuppressMessage("Style", "IDE1006:Naming Styles", Justification = "Obsolete")] + public double? length; + + public Extrusion() { } + + public Extrusion(Base profile, double length, bool capped, string units = Units.Meters, string? applicationId = null) + { + this.profile = profile; + this.length = length; + this.capped = capped; + this.applicationId = applicationId; + this.units = units; + } + + public bool? capped { get; set; } + public Base profile { get; set; } + public Point pathStart { get; set; } + public Point pathEnd { get; set; } + public Base pathCurve { get; set; } + public Base pathTangent { get; set; } + public List profiles { get; set; } + + public string units { get; set; } + + public double area { get; set; } + + public Box bbox { get; set; } + public double volume { get; set; } +} diff --git a/src/Objects/Geometry/Line.cs b/src/Objects/Geometry/Line.cs new file mode 100644 index 00000000..310085e3 --- /dev/null +++ b/src/Objects/Geometry/Line.cs @@ -0,0 +1,133 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using Objects.Other; +using Objects.Primitive; +using Speckle.Core.Kits; +using Speckle.Core.Logging; +using Speckle.Core.Models; +using Speckle.Newtonsoft.Json; + +namespace Objects.Geometry; + +public class Line : Base, ICurve, IHasBoundingBox, ITransformable +{ + public Line() { } + + [Obsolete("Line should not use a constructor that only sets the start point. Deprecated in 2.18.", true)] + public Line(double x, double y, double z = 0, string units = Units.Meters, string? applicationId = null) + { + start = new Point(x, y, z); +#pragma warning disable CS8625 // Cannot convert null literal to non-nullable reference type. Reason: Obsolete. + end = null; +#pragma warning restore CS8625 // Cannot convert null literal to non-nullable reference type. Reason: Obsolete. + this.applicationId = applicationId; + this.units = units; + } + + public Line(Point start, Point end, string units = Units.Meters, string? applicationId = null) + { + this.start = start; + this.end = end; + length = Point.Distance(start, end); + this.applicationId = applicationId; + this.units = units; + } + + public Line(IList coordinates, string units = Units.Meters, string? applicationId = null) + { + if (coordinates.Count < 6) + { + throw new SpeckleException("Line from coordinate array requires 6 coordinates."); + } + + start = new Point(coordinates[0], coordinates[1], coordinates[2], units, applicationId); + end = new Point(coordinates[3], coordinates[4], coordinates[5], units, applicationId); + length = Point.Distance(start, end); + this.applicationId = applicationId; + this.units = units; + } + + [Obsolete("Use IList constructor", true)] + public Line(IEnumerable coordinatesArray, string units = Units.Meters, string? applicationId = null) + : this(coordinatesArray.ToList(), units, applicationId) { } + + /// + /// OBSOLETE - This is just here for backwards compatibility. + /// You should not use this for anything. Access coordinates using start and end point. + /// + + [JsonProperty(NullValueHandling = NullValueHandling.Ignore)] + public List value + { +#pragma warning disable CS8603 // Possible null reference return. Reason: Obsolete. + get => null; +#pragma warning restore CS8603 // Possible null reference return. Reason: Obsolete. + set + { + if (value == null) + { + return; + } + + start = new Point(value[0], value[1], value[2]); + end = new Point(value[3], value[4], value[5]); + } + } + + public double area { get; set; } + + public string units { get; set; } + + public Point start { get; set; } + public Point end { get; set; } + + public Interval domain { get; set; } = new(0, 1); + public double length { get; set; } + + public Box bbox { get; set; } + + public bool TransformTo(Transform transform, out Line transformed) + { + start.TransformTo(transform, out Point transformedStart); + end.TransformTo(transform, out Point transformedEnd); + transformed = new Line + { + start = transformedStart, + end = transformedEnd, + applicationId = applicationId, + units = units, + domain = domain is null ? new(0, 1) : new() { start = domain.start, end = domain.end } + }; + return true; + } + + public bool TransformTo(Transform transform, out ITransformable transformed) + { + var res = TransformTo(transform, out Line line); + transformed = line; + return res; + } + + public List ToList() + { + var list = new List(); + list.AddRange(start.ToList()); + list.AddRange(end.ToList()); + list.Add(domain?.start ?? 0); + list.Add(domain?.end ?? 1); + list.Add(Units.GetEncodingFromUnit(units)); + list.Insert(0, CurveTypeEncoding.Line); + list.Insert(0, list.Count); + return list; + } + + public static Line FromList(List list) + { + var units = Units.GetUnitFromEncoding(list[list.Count - 1]); + var startPt = new Point(list[2], list[3], list[4], units); + var endPt = new Point(list[5], list[6], list[7], units); + var line = new Line(startPt, endPt, units) { domain = new Interval(list[8], list[9]) }; + return line; + } +} diff --git a/src/Objects/Geometry/Mesh.cs b/src/Objects/Geometry/Mesh.cs new file mode 100644 index 00000000..1c2409f4 --- /dev/null +++ b/src/Objects/Geometry/Mesh.cs @@ -0,0 +1,254 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using Objects.Other; +using Speckle.Core.Kits; +using Speckle.Core.Logging; +using Speckle.Core.Models; +using Speckle.Newtonsoft.Json; + +namespace Objects.Geometry; + +public class Mesh : Base, IHasBoundingBox, IHasVolume, IHasArea, ITransformable +{ + public Mesh() { } + + /// + /// Constructs a new mesh from it's raw values. + /// + /// + /// + /// + /// + /// + /// + public Mesh( + List vertices, + List faces, + List? colors = null, + List? texture_coords = null, + string units = Units.Meters, + string? applicationId = null + ) + { + this.vertices = vertices; + this.faces = faces; + this.colors = colors ?? this.colors; + textureCoordinates = texture_coords ?? textureCoordinates; + this.applicationId = applicationId; + this.units = units; + } + + [Obsolete("Use lists constructor", true)] + public Mesh( + double[] vertices, + int[] faces, + int[]? colors = null, + double[]? texture_coords = null, + string units = Units.Meters, + string? applicationId = null + ) + : this( + vertices.ToList(), + faces.ToList(), + colors?.ToList() ?? new(), + texture_coords?.ToList() ?? new(), + units, + applicationId + ) { } + + [DetachProperty, Chunkable(31250)] + public List vertices { get; set; } = new(); + + [DetachProperty, Chunkable(62500)] + public List faces { get; set; } = new(); + + /// Vertex colors as ARGB s + [DetachProperty, Chunkable(62500)] + public List colors { get; set; } = new(); + + [DetachProperty, Chunkable(31250)] + public List textureCoordinates { get; set; } = new(); + + /// + /// The unit's this is in. + /// This should be one of + /// + public string units { get; set; } = Units.None; + + /// + public double area { get; set; } + + /// + public Box bbox { get; set; } + + /// + public double volume { get; set; } + + /// + public bool Transform(Transform transform) + { + // transform vertices + vertices = GetPoints() + .SelectMany(vertex => + { + vertex.TransformTo(transform, out Point transformedVertex); + return transformedVertex.ToList(); + }) + .ToList(); + + return true; + } + + /// + public bool TransformTo(Transform transform, out Mesh transformed) + { + // transform vertices + var transformedVertices = new List(); + foreach (var vertex in GetPoints()) + { + vertex.TransformTo(transform, out Point transformedVertex); + transformedVertices.Add(transformedVertex); + } + + transformed = new Mesh + { + vertices = transformedVertices.SelectMany(o => o.ToList()).ToList(), + textureCoordinates = textureCoordinates, + applicationId = applicationId ?? id, + faces = faces, + colors = colors, + units = units + }; + transformed["renderMaterial"] = this["renderMaterial"]; + + return true; + } + + /// + public bool TransformTo(Transform transform, out ITransformable transformed) + { + var res = TransformTo(transform, out Mesh brep); + transformed = brep; + return res; + } + + #region Convenience Methods + + [JsonIgnore] + public int VerticesCount => vertices.Count / 3; + + [JsonIgnore] + public int TextureCoordinatesCount => textureCoordinates.Count / 2; + + /// + /// Gets a vertex as a by + /// + /// The index of the vertex + /// Vertex as a + public Point GetPoint(int index) + { + index *= 3; + return new Point(vertices[index], vertices[index + 1], vertices[index + 2], units, applicationId); + } + + /// as list of s + /// when list is malformed + public List GetPoints() + { + if (vertices.Count % 3 != 0) + { + throw new SpeckleException( + $"{nameof(Mesh)}.{nameof(vertices)} list is malformed: expected length to be multiple of 3" + ); + } + + var pts = new List(vertices.Count / 3); + for (int i = 2; i < vertices.Count; i += 3) + { + pts.Add(new Point(vertices[i - 2], vertices[i - 1], vertices[i], units)); + } + + return pts; + } + + /// + /// Gets a texture coordinate as a by + /// + /// The index of the texture coordinate + /// Texture coordinate as a + public (double, double) GetTextureCoordinate(int index) + { + index *= 2; + return (textureCoordinates[index], textureCoordinates[index + 1]); + } + + /// + /// If not already so, this method will align + /// such that a vertex and its corresponding texture coordinates have the same index. + /// This alignment is what is expected by most applications.
+ ///
+ /// + /// If the calling application expects + /// vertices.count == textureCoordinates.count + /// Then this method should be called by the MeshToNative method before parsing and + /// to ensure compatibility with geometry originating from applications that map to using vertex instance index (rather than vertex index) + ///
+ /// , , and lists will be modified to contain no shared vertices (vertices shared between polygons) + ///
+ public void AlignVerticesWithTexCoordsByIndex() + { + if (textureCoordinates.Count == 0) + { + return; + } + + if (TextureCoordinatesCount == VerticesCount) + { + return; //Tex-coords already aligned as expected + } + + var facesUnique = new List(faces.Count); + var verticesUnique = new List(TextureCoordinatesCount * 3); + bool hasColors = colors.Count > 0; + var colorsUnique = hasColors ? new List(TextureCoordinatesCount) : null; + + int nIndex = 0; + while (nIndex < faces.Count) + { + int n = faces[nIndex]; + if (n < 3) + { + n += 3; // 0 -> 3, 1 -> 4 + } + + if (nIndex + n >= faces.Count) + { + break; //Malformed face list + } + + facesUnique.Add(n); + for (int i = 1; i <= n; i++) + { + int vertIndex = faces[nIndex + i]; + int newVertIndex = verticesUnique.Count / 3; + + var (x, y, z) = GetPoint(vertIndex); + verticesUnique.Add(x); + verticesUnique.Add(y); + verticesUnique.Add(z); + + colorsUnique?.Add(colors[vertIndex]); + facesUnique.Add(newVertIndex); + } + + nIndex += n + 1; + } + + vertices = verticesUnique; + colors = colorsUnique ?? colors; + faces = facesUnique; + } + + #endregion +} diff --git a/src/Objects/Geometry/Plane.cs b/src/Objects/Geometry/Plane.cs new file mode 100644 index 00000000..c636ab06 --- /dev/null +++ b/src/Objects/Geometry/Plane.cs @@ -0,0 +1,133 @@ +using System.Collections.Generic; +using Objects.Other; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.Geometry; + +/// +/// A 3-dimensional Plane consisting of an origin , and 3 as it's X, Y and Z axis. +/// +public class Plane : Base, ITransformable +{ + /// + /// Constructs an empty + /// + public Plane() { } + + /// + /// Constructs a new given it's individual values. + /// + /// The point to be used as origin + /// The vector to be used as Z axis + /// The vector to be used as the X axis + /// The vector to be used as the Y axis + /// The units the coordinates are in. + /// The unique ID of this polyline in a specific application + public Plane( + Point origin, + Vector normal, + Vector xDir, + Vector yDir, + string units = Units.Meters, + string? applicationId = null + ) + { + this.origin = origin; + this.normal = normal; + xdir = xDir; + ydir = yDir; + this.applicationId = applicationId; + this.units = units; + } + + /// + /// The s origin point. + /// + public Point origin { get; set; } + + /// + /// The s Z axis. + /// + public Vector normal { get; set; } + + /// + /// The s X axis. + /// + public Vector xdir { get; set; } + + /// + /// The s Y axis. + /// + public Vector ydir { get; set; } + + /// + /// The unit's this is in. + /// This should be one of + /// + public string units { get; set; } + + /// + public bool TransformTo(Transform transform, out Plane transformed) + { + origin.TransformTo(transform, out Point transformedOrigin); + normal.TransformTo(transform, out Vector transformedNormal); + xdir.TransformTo(transform, out Vector transformedXdir); + ydir.TransformTo(transform, out Vector transformedYdir); + transformed = new Plane + { + origin = transformedOrigin, + normal = transformedNormal, + xdir = transformedXdir, + ydir = transformedYdir, + applicationId = applicationId, + units = units + }; + + return true; + } + + /// + public bool TransformTo(Transform transform, out ITransformable transformed) + { + var res = TransformTo(transform, out Plane plane); + transformed = plane; + return res; + } + + /// + /// Returns the values of this as a list of numbers + /// + /// A list of values representing the Plane. + + public List ToList() + { + var list = new List(); + + list.AddRange(origin.ToList()); + list.AddRange(normal.ToList()); + list.AddRange(xdir.ToList()); + list.AddRange(ydir.ToList()); + list.Add(Units.GetEncodingFromUnit(units)); + + return list; + } + + /// + /// Creates a new based on a list of values and the unit they're drawn in. + /// + /// The list of values representing this plane + /// A new with the provided values. + public static Plane FromList(List list) + { + var plane = new Plane(); + + var units = Units.GetUnitFromEncoding(list[list.Count - 1]); + plane.origin = new Point(list[0], list[1], list[2], units); + plane.normal = new Vector(list[3], list[4], list[5], units); + plane.xdir = new Vector(list[6], list[7], list[8], units); + plane.ydir = new Vector(list[9], list[10], list[11], units); + + return plane; + } +} diff --git a/src/Objects/Geometry/Point.cs b/src/Objects/Geometry/Point.cs new file mode 100644 index 00000000..307741c8 --- /dev/null +++ b/src/Objects/Geometry/Point.cs @@ -0,0 +1,242 @@ +using System; +using System.Collections.Generic; +using Objects.Other; +using Speckle.Core.Kits; +using Speckle.Core.Models; +using Speckle.Newtonsoft.Json; + +namespace Objects.Geometry; + +/// +/// A 3-dimensional point +/// +/// +/// TODO: The Point class does not override the Equality operator, which means that there may be cases where `Equals` is used instead of `==`, as the comparison will be done by reference, not value. +/// +public class Point : Base, ITransformable +{ + /// + public Point() { } + + /// + /// Constructs a new from a set of coordinates and it's units. + /// + /// The x coordinate + /// The y coordinate + /// The z coordinate + /// The units of the point's coordinates. Defaults to Meters. + /// The object's unique application ID + public Point(double x, double y, double z = 0d, string units = Units.Meters, string? applicationId = null) + { + this.x = x; + this.y = y; + this.z = z; + this.applicationId = applicationId; + this.units = units; + } + + /// + /// Constructs a new from a + /// + /// The Vector whose coordinates will be used for the Point + public Point(Vector vector) + : this(vector.x, vector.y, vector.z, vector.units, vector.applicationId) { } + + /// + /// Gets or sets the coordinates of the + /// + [JsonProperty(NullValueHandling = NullValueHandling.Ignore), Obsolete("Use x,y,z properties instead", true)] + public List value + { + get => null!; + set + { + x = value[0]; + y = value[1]; + z = value.Count > 2 ? value[2] : 0; + } + } + + /// + /// The x coordinate of the point. + /// + public double x { get; set; } + + /// + /// The y coordinate of the point. + /// + public double y { get; set; } + + /// + /// The z coordinate of the point. + /// + public double z { get; set; } + + /// + /// The units this is in. + /// This should be one of the units specified in + /// + public string units { get; set; } = Units.None; + + [JsonIgnore, Obsolete("Bounding box no longer applicable to point as of 2.18", true)] + public Box? bbox { get; set; } + + /// + public bool TransformTo(Transform transform, out Point transformed) + { + var matrix = transform.matrix; + + var unitFactor = Units.GetConversionFactor(transform.units, units); // applied to translation vector + var divisor = matrix.M41 + matrix.M42 + matrix.M43 + unitFactor * matrix.M44; + var x = (this.x * matrix.M11 + this.y * matrix.M12 + this.z * matrix.M13 + unitFactor * matrix.M14) / divisor; + var y = (this.x * matrix.M21 + this.y * matrix.M22 + this.z * matrix.M23 + unitFactor * matrix.M24) / divisor; + var z = (this.x * matrix.M31 + this.y * matrix.M32 + this.z * matrix.M33 + unitFactor * matrix.M34) / divisor; + + transformed = new Point(x, y, z) { units = units, applicationId = applicationId }; + return true; + } + + /// + public bool TransformTo(Transform transform, out ITransformable transformed) + { + var res = TransformTo(transform, out Point pt); + transformed = pt; + return res; + } + + /// + /// Returns the coordinates of this as a list of numbers + /// + /// A list of coordinates {x, y, z} + public List ToList() + { + return new List { x, y, z }; + } + + /// + /// Creates a new based on a list of coordinates and the unit they're drawn in. + /// + /// The list of coordinates {x, y, z} + /// The units the coordinates are in + /// A new with the provided coordinates. + public static Point FromList(IList list, string units) + { + return new Point(list[0], list[1], list[2], units); + } + + /// + /// Deconstructs a into it's coordinates and units + /// + /// The x coordinate + /// The y coordinate + /// The z coordinate + /// The units the point's coordinates are in. + public void Deconstruct(out double x, out double y, out double z, out string? units) + { + Deconstruct(out x, out y, out z); + units = this.units; + } + + /// + /// Deconstructs a into it's coordinates and units + /// + /// The x coordinate + /// The y coordinate + /// The z coordinate + public void Deconstruct(out double x, out double y, out double z) + { + x = this.x; + y = this.y; + z = this.z; + } + + public static Point operator +(Point point1, Point point2) => + new(point1.x + point2.x, point1.y + point2.y, point1.z + point2.z, point1.units); + + public static Point operator -(Point point1, Point point2) => + new(point1.x - point2.x, point1.y - point2.y, point1.z - point2.z, point1.units); + + public static Point operator *(Point point1, Point point2) => + new(point1.x * point2.x, point1.y * point2.y, point1.z * point2.z, point1.units); + + public static Point operator *(Point point, double val) => + new(point.x * val, point.y * val, point.z * val, point.units); + + public static Point operator /(Point point, double val) => + new(point.x / val, point.y / val, point.z / val, point.units); + + public static bool operator ==(Point? point1, Point? point2) + { + if (point1 is null && point2 is null) + { + return true; + } + else if (point1 is null || point2 is null) + { + return false; + } + + return point1.units == point2.units && point1.x == point2.x && point1.y == point2.y && point1.z == point2.z; + } + + public static bool operator !=(Point? point1, Point? point2) => !(point1 == point2); + + /// + /// Computes a point equidistant from two points. + /// + /// First point. + /// Second point. + /// A point at the same distance from and + public static Point Midpoint(Point point1, Point point2) + { + return new Point( + 0.5 * (point1.x + point2.x), + 0.5 * (point1.y + point2.y), + 0.5 * (point1.z + point2.z), + point1.units + ); + } + + /// + /// Computes the distance between two points + /// + /// First point. + /// Second point. + /// The distance from to + public static double Distance(Point point1, Point point2) + { + return Math.Sqrt( + Math.Pow(point1.x - point2.x, 2) + Math.Pow(point1.y - point2.y, 2) + Math.Pow(point1.z - point2.z, 2) + ); + } + + /// + /// Computes the distance between two points. + /// + /// point for distance measurement + /// The length of the line between this and the other point + public double DistanceTo(Point point) + { + return Math.Sqrt(Math.Pow(x - point.x, 2) + Math.Pow(y - point.y, 2) + Math.Pow(z - point.z, 2)); + } + + public static Point Add(Point left, Point right) + { + throw new NotImplementedException(); + } + + public override bool Equals(object obj) + { + if (ReferenceEquals(this, obj)) + { + return true; + } + + if (ReferenceEquals(obj, null)) + { + return false; + } + + throw new NotImplementedException(); + } +} diff --git a/src/Objects/Geometry/Pointcloud.cs b/src/Objects/Geometry/Pointcloud.cs new file mode 100644 index 00000000..9b894c0f --- /dev/null +++ b/src/Objects/Geometry/Pointcloud.cs @@ -0,0 +1,108 @@ +using System.Collections.Generic; +using System.Linq; +using Objects.Other; +using Speckle.Core.Kits; +using Speckle.Core.Logging; +using Speckle.Core.Models; + +namespace Objects.Geometry; + +/// +/// A collection of points, with color and size support. +/// +public class Pointcloud : Base, IHasBoundingBox, ITransformable +{ + /// + /// Constructs an empty + /// + public Pointcloud() { } + + /// Flat list of x,y,z coordinates + /// Optional list of colors + /// Optional list of sizes + [SchemaInfo(nameof(Pointcloud), "Create a point cloud object", "Objects", "Geometry")] + public Pointcloud(List points, List? colors = null, List? sizes = null) + { + this.points = points; + this.colors = colors ?? new(); + this.sizes = sizes ?? new(); + } + + /// + /// Gets or sets the list of points of this , stored as a flat list of coordinates [x1,y1,z1,x2,y2,...] + /// + [DetachProperty, Chunkable(31250)] + public List points { get; set; } = new(); + + /// + /// Gets or sets the list of colors of this 's points., stored as ARGB s. + /// + [DetachProperty, Chunkable(62500)] + public List colors { get; set; } = new(); + + /// + /// Gets or sets the list of sizes of this 's points. + /// + [DetachProperty, Chunkable(62500)] + public List sizes { get; set; } = new(); + + /// + /// The unit's this is in. + /// This should be one of + /// + public string units { get; set; } + + /// + public Box bbox { get; set; } + + /// + public bool TransformTo(Transform transform, out Pointcloud transformed) + { + // transform points + var transformedPoints = new List(); + foreach (var point in GetPoints()) + { + point.TransformTo(transform, out Point transformedPoint); + transformedPoints.Add(transformedPoint); + } + + transformed = new Pointcloud + { + units = units, + points = transformedPoints.SelectMany(o => o.ToList()).ToList(), + colors = colors, + sizes = sizes, + applicationId = applicationId + }; + + return true; + } + + /// + public bool TransformTo(Transform transform, out ITransformable transformed) + { + var res = TransformTo(transform, out Pointcloud pc); + transformed = pc; + return res; + } + + /// as list of s + /// when list is malformed + public List GetPoints() + { + if (points.Count % 3 != 0) + { + throw new SpeckleException( + $"{nameof(Pointcloud)}.{nameof(points)} list is malformed: expected length to be multiple of 3" + ); + } + + var pts = new List(points.Count / 3); + for (int i = 2; i < points.Count; i += 3) + { + pts.Add(new Point(points[i - 2], points[i - 1], points[i], units)); + } + + return pts; + } +} diff --git a/src/Objects/Geometry/Polycurve.cs b/src/Objects/Geometry/Polycurve.cs new file mode 100644 index 00000000..21305c29 --- /dev/null +++ b/src/Objects/Geometry/Polycurve.cs @@ -0,0 +1,166 @@ +using System; +using System.Collections.Generic; +using Objects.Other; +using Objects.Primitive; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.Geometry; + +/// +/// A curve that is comprised of multiple curves connected. +/// +public class Polycurve : Base, ICurve, IHasArea, IHasBoundingBox, ITransformable +{ + /// + /// Constructs a new empty instance. + /// + public Polycurve() { } + + /// + /// Constructs a new empty with defined units and unique application ID. + /// + /// The units the Polycurve was modelled in. + /// The unique ID of this polyline in a specific application + public Polycurve(string units = Units.Meters, string? applicationId = null) + { + this.applicationId = applicationId; + this.units = units; + } + + /// + /// Gets or sets the list of segments that comprise this + /// + public List segments { get; set; } = new(); + + /// + /// Gets or sets a Boolean value indicating if the is closed + /// (i.e. The start point of the first segment and the end point of the last segment coincide.) + /// + public bool closed { get; set; } + + /// + /// The unit's this is in. + /// This should be one of + /// + public string units { get; set; } + + /// + /// The internal domain of this curve. + /// + public Interval domain { get; set; } = new(0, 1); + + /// + public double length { get; set; } + + /// + public double area { get; set; } + + /// + public Box bbox { get; set; } + + /// + public bool TransformTo(Transform transform, out ITransformable polycurve) + { + // transform segments + var success = true; + var transformed = new List(); + foreach (var curve in segments) + { + if (curve is ITransformable c) + { + c.TransformTo(transform, out ITransformable tc); + transformed.Add((ICurve)tc); + } + else + { + success = false; + } + } + + polycurve = new Polycurve + { + segments = transformed, + applicationId = applicationId, + closed = closed, + units = units + }; + + return success; + } + + /// + /// Constructs a new instance from an existing curve. + /// + /// The polyline to be used when constructing the + /// A with the same shape as the provided polyline. + public static implicit operator Polycurve(Polyline polyline) + { + Polycurve polycurve = + new() + { + units = polyline.units, + area = polyline.area, + domain = polyline.domain, + closed = polyline.closed, + bbox = polyline.bbox, + length = polyline.length + }; + + var points = polyline.GetPoints(); + for (var i = 0; i < points.Count - 1; i++) + { + var line = new Line(points[i], points[i + 1], polyline.units); + polycurve.segments.Add(line); + } + if (polyline.closed) + { + var line = new Line(points[points.Count - 1], points[0], polyline.units); + polycurve.segments.Add(line); + } + + return polycurve; + } + + /// + /// Returns the values of this as a list of numbers + /// + /// A list of values representing the polycurve. + public List ToList() + { + var list = new List(); + list.Add(closed ? 1 : 0); + list.Add(domain?.start ?? 0); + list.Add(domain?.end ?? 1); + + var crvs = CurveArrayEncodingExtensions.ToArray(segments); + list.Add(crvs.Count); + list.AddRange(crvs); + + list.Add(Units.GetEncodingFromUnit(units)); + list.Insert(0, CurveTypeEncoding.PolyCurve); + list.Insert(0, list.Count); + + return list; + } + + /// + /// Creates a new based on a list of coordinates and the unit they're drawn in. + /// + /// The list of values representing this polycurve + /// A new with the provided values. + public static Polycurve FromList(List list) + { + var polycurve = new Polycurve { closed = list[2] == 1, domain = new Interval(list[3], list[4]) }; + + var temp = list.GetRange(6, (int)list[5]); + polycurve.segments = CurveArrayEncodingExtensions.FromArray(temp); + polycurve.units = Units.GetUnitFromEncoding(list[list.Count - 1]); + return polycurve; + } + + public Polycurve ToPolycurve() + { + throw new NotImplementedException(); + } +} diff --git a/src/Objects/Geometry/Polyline.cs b/src/Objects/Geometry/Polyline.cs new file mode 100644 index 00000000..6716fa14 --- /dev/null +++ b/src/Objects/Geometry/Polyline.cs @@ -0,0 +1,266 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using Objects.Other; +using Objects.Primitive; +using Speckle.Core.Kits; +using Speckle.Core.Logging; +using Speckle.Core.Models; +using Speckle.Newtonsoft.Json; + +namespace Objects.Geometry; + +/// +/// A polyline curve, defined by a set of vertices. +/// +public class Polyline : Base, ICurve, IHasArea, IHasBoundingBox, IConvertible, ITransformable +{ + /// + /// Constructs an empty + /// + public Polyline() { } + + /// + /// Constructs a new instance from a flat list of coordinates. + /// + /// The array of 3-dimensional coordinates [x1,y1,z1,x2,y2,... + /// The units the coordinates are in. + /// The unique ID of this polyline in a specific application + [Obsolete("Use list constructor instead", true)] + public Polyline(IEnumerable coordinatesArray, string units = Units.Meters, string? applicationId = null) + : this(coordinatesArray.ToList(), units, applicationId) { } + + /// + /// Constructs a new instance from a flat list of coordinates. + /// + /// The list of 3-dimensional coordinates [x1,y1,z1,x2,y2,... + /// The units the coordinates are in. + /// The unique ID of this polyline in a specific application + public Polyline(List coordinates, string units = Units.Meters, string? applicationId = null) + { + value = coordinates; + this.units = units; + this.applicationId = applicationId; + } + + /// + /// Gets or sets the raw coordinates that define this polyline. Use GetPoints instead to access this data as instances instead. + /// + [DetachProperty, Chunkable(31250)] + public List value { get; set; } = new(); + + /// + /// If true, do not add the last point to the value list. Polyline first and last points should be unique. + /// + public bool closed { get; set; } + + /// + /// The unit's this is in. + /// This should be one of + /// + public string units { get; set; } + + /// + /// Gets the list of points representing the vertices of this polyline. + /// + [JsonIgnore, Obsolete("Use " + nameof(GetPoints) + " Instead", true)] + public List points => GetPoints(); + + /// + public object ToType(Type conversionType, IFormatProvider provider) + { + if (conversionType == typeof(Polycurve)) + { + return (Polycurve)this; + } + + throw new InvalidCastException(); + } + + /// + public TypeCode GetTypeCode() + { + throw new NotImplementedException(); + } + + /// + public bool ToBoolean(IFormatProvider provider) + { + throw new NotImplementedException(); + } + + /// + public byte ToByte(IFormatProvider provider) + { + throw new NotImplementedException(); + } + + /// + public char ToChar(IFormatProvider provider) + { + throw new NotImplementedException(); + } + + /// + public DateTime ToDateTime(IFormatProvider provider) + { + throw new NotImplementedException(); + } + + /// + public decimal ToDecimal(IFormatProvider provider) + { + throw new NotImplementedException(); + } + + /// + public double ToDouble(IFormatProvider provider) + { + throw new NotImplementedException(); + } + + /// + public short ToInt16(IFormatProvider provider) + { + throw new NotImplementedException(); + } + + /// + public int ToInt32(IFormatProvider provider) + { + throw new NotImplementedException(); + } + + /// + public long ToInt64(IFormatProvider provider) + { + throw new NotImplementedException(); + } + + /// + public sbyte ToSByte(IFormatProvider provider) + { + throw new NotImplementedException(); + } + + /// + public float ToSingle(IFormatProvider provider) + { + throw new NotImplementedException(); + } + + /// + public string ToString(IFormatProvider provider) + { + throw new NotImplementedException(); + } + + /// + public ushort ToUInt16(IFormatProvider provider) + { + throw new NotImplementedException(); + } + + /// + public uint ToUInt32(IFormatProvider provider) + { + throw new NotImplementedException(); + } + + /// + public ulong ToUInt64(IFormatProvider provider) + { + throw new NotImplementedException(); + } + + /// + /// The internal domain of this curve. + /// + public Interval domain { get; set; } = new(0, 1); + + /// + public double length { get; set; } + + /// + public double area { get; set; } + + /// + public Box bbox { get; set; } + + /// + public bool TransformTo(Transform transform, out ITransformable transformed) + { + // transform points + var transformedPoints = new List(); + foreach (var point in GetPoints()) + { + point.TransformTo(transform, out Point transformedPoint); + transformedPoints.Add(transformedPoint); + } + + transformed = new Polyline + { + value = transformedPoints.SelectMany(o => o.ToList()).ToList(), + closed = closed, + applicationId = applicationId, + units = units + }; + + return true; + } + + ///This function may be suboptimal for performance for polylines with many points + /// as List of s + /// when list is malformed + public List GetPoints() + { + if (value.Count % 3 != 0) + { + throw new SpeckleException( + $"{nameof(Polyline)}.{nameof(value)} list is malformed: expected length to be multiple of 3" + ); + } + + var pts = new List(value.Count / 3); + for (int i = 2; i < value.Count; i += 3) + { + pts.Add(new Point(value[i - 2], value[i - 1], value[i], units)); + } + + return pts; + } + + /// + /// Returns the values of this as a list of numbers + /// + /// A list of values representing the polyline. + public List ToList() + { + var list = new List(); + list.Add(closed ? 1 : 0); // 2 + list.Add(domain?.start ?? 0); // 3 + list.Add(domain?.end ?? 1); // 4 + list.Add(value.Count); // 5 + list.AddRange(value); // 6 onwards + + list.Add(Units.GetEncodingFromUnit(units)); + list.Insert(0, CurveTypeEncoding.Polyline); // 1 + list.Insert(0, list.Count); // 0 + return list; + } + + /// + /// Creates a new based on a list of coordinates and the unit they're drawn in. + /// + /// The list of values representing this polyline + /// A new with the provided values. + + public static Polyline FromList(List list) + { + var polyline = new Polyline { closed = list[2] == 1, domain = new Interval(list[3], list[4]) }; + var pointCount = (int)list[5]; + polyline.value = list.GetRange(6, pointCount); + polyline.units = Units.GetUnitFromEncoding(list[list.Count - 1]); + return polyline; + } +} diff --git a/src/Objects/Geometry/PolylineExtensions.cs b/src/Objects/Geometry/PolylineExtensions.cs new file mode 100644 index 00000000..49d5677e --- /dev/null +++ b/src/Objects/Geometry/PolylineExtensions.cs @@ -0,0 +1,22 @@ +using System.Collections.Generic; + +namespace Objects.Geometry; + +public static class PolylineExtensions +{ + public static IEnumerable EnumerateAsLines(this Polyline polyline) + { + List points = polyline.GetPoints(); + if (points.Count == 0) + { + yield break; + } + + Point previousPoint = points[0]; + for (int i = 1; i < points.Count; i++) + { + yield return new Line(previousPoint, points[i], polyline.units); + previousPoint = points[i]; + } + } +} diff --git a/src/Objects/Geometry/Spiral.cs b/src/Objects/Geometry/Spiral.cs new file mode 100644 index 00000000..89ef8aea --- /dev/null +++ b/src/Objects/Geometry/Spiral.cs @@ -0,0 +1,40 @@ +using Objects.Primitive; +using Speckle.Core.Models; + +namespace Objects.Geometry; + +public enum SpiralType +{ + Biquadratic, + BiquadraticParabola, + Bloss, + Clothoid, + Cosine, + Cubic, + CubicParabola, + Radioid, + Sinusoid, + Unknown +} + +public class Spiral : Base, ICurve, IHasBoundingBox, IDisplayValue +{ + public Point startPoint { get; set; } + public Point endPoint { get; set; } + public Plane plane { get; set; } // plane with origin at spiral center + public double turns { get; set; } // total angle of spiral. positive is counterclockwise, negative is clockwise + public Vector pitchAxis { get; set; } = new(); + public double pitch { get; set; } + public SpiralType spiralType { get; set; } + + public string units { get; set; } + + public double length { get; set; } + + public Interval domain { get; set; } + + [DetachProperty] + public Polyline displayValue { get; set; } + + public Box bbox { get; set; } +} diff --git a/src/Objects/Geometry/Surface.cs b/src/Objects/Geometry/Surface.cs new file mode 100644 index 00000000..17e1a2c0 --- /dev/null +++ b/src/Objects/Geometry/Surface.cs @@ -0,0 +1,258 @@ +using System.Collections.Generic; +using Objects.Other; +using Objects.Primitive; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.Geometry; + +/// +/// A Surface in NURBS form. +/// +public class Surface : Base, IHasBoundingBox, IHasArea, ITransformable +{ + /// + /// Constructs a new empty + /// + public Surface() + { + applicationId = null; + pointData = new List(); + } + + /// + /// Constructs a new empty + /// + /// The units this surface is modeled in + /// This surface's unique identifier on a specific application + public Surface(string units = Units.Meters, string? applicationId = null) + { + this.applicationId = applicationId; + this.units = units; + } + + /// + /// The degree of the surface in the U direction + /// + public int degreeU { get; set; } + + /// + /// The degree of the surface in the V direction + /// + public int degreeV { get; set; } + + /// + /// Determines if the is rational. + /// + public bool rational { get; set; } + + /// + /// The raw data of the surface's control points. Use GetControlPoints or SetControlPoints instead of accessing this directly. + /// + public List pointData { get; set; } + + /// + /// The number of control points in the U direction + /// + public int countU { get; set; } + + /// + /// The number of control points in the V direction + /// + public int countV { get; set; } + + /// + /// The knot vector in the U direction + /// + public List knotsU { get; set; } + + /// + /// The knot vector in the V direction + /// + public List knotsV { get; set; } + + /// + /// The surface's domain in the U direction + /// + public Interval domainU { get; set; } + + /// + /// The surface's domain in the V direction + /// + public Interval domainV { get; set; } + + /// + /// Determines if a surface is closed around the . + /// + public bool closedU { get; set; } + + /// + /// Determines if a surface is closed around the + /// + public bool closedV { get; set; } + + /// + /// The unit's this is in. + /// This should be one of + /// + public string units { get; set; } + + /// + public double area { get; set; } + + /// + public Box bbox { get; set; } + + /// + public bool TransformTo(Transform transform, out Surface transformed) + { + var ptMatrix = GetControlPoints(); + foreach (var ctrlPts in ptMatrix) + { + for (int i = 0; i < ctrlPts.Count; i++) + { + ctrlPts[i].TransformTo(transform, out var tPt); + ctrlPts[i] = tPt; + } + } + + transformed = new Surface + { + degreeU = degreeU, + degreeV = degreeV, + countU = countU, + countV = countV, + rational = rational, + closedU = closedU, + closedV = closedV, + domainU = domainU, + domainV = domainV, + knotsU = knotsU, + knotsV = knotsV, + units = units + }; + transformed.SetControlPoints(ptMatrix); + + return true; + } + + /// + public bool TransformTo(Transform transform, out ITransformable transformed) + { + var res = TransformTo(transform, out Surface surface); + transformed = surface; + return res; + } + + /// + /// Gets the control points of this s + /// + /// A 2-dimensional array representing this s control points. + /// The ControlPoints will be ordered following directions "[u][v]" + + public List> GetControlPoints() + { + var matrix = new List>(); + for (var i = 0; i < countU; i++) + { + matrix.Add(new List()); + } + + for (var i = 0; i < pointData.Count; i += 4) + { + var uIndex = i / (countV * 4); + matrix[uIndex].Add(new ControlPoint(pointData[i], pointData[i + 1], pointData[i + 2], pointData[i + 3], units)); + } + + return matrix; + } + + /// + /// Sets the control points of this . + /// + /// A 2-dimensional array of instances. + /// The must be ordered following directions "[u][v]" + public void SetControlPoints(List> value) + { + List data = new(); + countU = value.Count; + countV = value[0].Count; + value.ForEach( + row => + row.ForEach(pt => + { + data.Add(pt.x); + data.Add(pt.y); + data.Add(pt.z); + data.Add(pt.weight); + }) + ); + pointData = data; + } + + /// + /// Returns the coordinates of this as a list of numbers + /// + /// A list of values representing the surface + public List ToList() + { + var list = new List(); + list.Add(degreeU); + list.Add(degreeV); + list.Add(countU); + list.Add(countV); + list.Add(rational ? 1 : 0); + list.Add(closedU ? 1 : 0); + list.Add(closedV ? 1 : 0); + list.Add(domainU.start ?? 0); // 7 + list.Add(domainU.end ?? 1); + list.Add(domainV.start ?? 0); + list.Add(domainV.end ?? 1); // [0] 10 + + list.Add(pointData.Count); // 11 + list.Add(knotsU.Count); // 12 + list.Add(knotsV.Count); // 13 + + list.AddRange(pointData); + list.AddRange(knotsU); + list.AddRange(knotsV); + + list.Add(Units.GetEncodingFromUnit(units)); + list.Insert(0, list.Count); + + return list; + } + + /// + /// Creates a new based on a list of coordinates and the unit they're drawn in. + /// + /// The list of values representing this surface + /// A new with the provided values. + public static Surface FromList(List list) + { + var srf = new Surface + { + degreeU = (int)list[0], + degreeV = (int)list[1], + countU = (int)list[2], + countV = (int)list[3], + rational = list[4] == 1, + closedU = list[5] == 1, + closedV = list[6] == 1, + domainU = new Interval { start = list[7], end = list[8] }, + domainV = new Interval { start = list[9], end = list[10] } + }; + + var pointCount = (int)list[11]; + var knotsUCount = (int)list[12]; + var knotsVCount = (int)list[13]; + + srf.pointData = list.GetRange(14, pointCount); + srf.knotsU = list.GetRange(14 + pointCount, knotsUCount); + srf.knotsV = list.GetRange(14 + pointCount + knotsUCount, knotsVCount); + + var u = list[list.Count - 1]; + srf.units = Units.GetUnitFromEncoding(u); + return srf; + } +} diff --git a/src/Objects/Geometry/Vector.cs b/src/Objects/Geometry/Vector.cs new file mode 100644 index 00000000..ea3271eb --- /dev/null +++ b/src/Objects/Geometry/Vector.cs @@ -0,0 +1,261 @@ +using System; +using System.Collections.Generic; +using Objects.Other; +using Speckle.Core.Kits; +using Speckle.Core.Models; +using Speckle.Newtonsoft.Json; + +namespace Objects.Geometry; + +/// +/// A 3-dimensional vector +/// +public class Vector : Base, IHasBoundingBox, ITransformable +{ + /// + public Vector() { } + + /// + /// Constructs a new 2D from it's X and Y coordinates. + /// + /// The x coordinate of the vector + /// The y coordinate of the vector + /// The units the coordinates are in. + /// The unique application ID of the object. + public Vector(double x, double y, string units = Units.Meters, string? applicationId = null) + { + this.x = x; + this.y = y; + z = 0; + this.applicationId = applicationId; + this.units = units; + } + + /// + /// Constructs a new 2D from it's X and Y coordinates. + /// + /// The x coordinate of the vector + /// The y coordinate of the vector + /// The y coordinate of the vector + /// The units the coordinates are in. + /// The unique application ID of the object. + public Vector(double x, double y, double z, string units = Units.Meters, string? applicationId = null) + { + this.x = x; + this.y = y; + this.z = z; + this.applicationId = applicationId; + this.units = units; + } + + /// + /// Constructs a new from a + /// + /// The point whose coordinates will be used + /// The unique application ID of the object. + public Vector(Point point, string? applicationId = null) + : this(point.x, point.y, point.z, point.units ?? Units.None, applicationId) { } + + /// + /// Gets or sets the coordinates of the vector + /// + [ + JsonProperty(NullValueHandling = NullValueHandling.Ignore), + Obsolete("Use X,Y,Z fields to access coordinates instead", true) + ] + public List value + { +#pragma warning disable CS8603 // Possible null reference return. + get => null; +#pragma warning restore CS8603 // Possible null reference return. + set + { + x = value[0]; + y = value[1]; + z = value.Count > 2 ? value[2] : 0; + } + } + + /// + /// The unit's this is in. + /// This should be one of + /// + public string units { get; set; } = Units.None; + + /// + /// The x coordinate of the vector. + /// + public double x { get; set; } + + /// + /// The y coordinate of the vector. + /// + public double y { get; set; } + + /// + /// The z coordinate of the vector. + /// + public double z { get; set; } + + /// + /// Gets the Euclidean length of this vector. + /// + /// Length of the vector. + [JsonIgnore] + public double Length => Math.Sqrt(DotProduct(this, this)); + + /// + public Box bbox { get; set; } + + /// + public bool TransformTo(Transform transform, out Vector transformed) + { + var m = transform.matrix; + var tX = x * m.M11 + y * m.M12 + z * m.M13; + var tY = x * m.M21 + y * m.M22 + z * m.M23; + var tZ = x * m.M31 + y * m.M32 + z * m.M33; + transformed = new Vector(tX, tY, tZ, units, applicationId); + return true; + } + + /// + public bool TransformTo(Transform transform, out ITransformable transformed) + { + _ = TransformTo(transform, out Vector vec); + transformed = vec; + return true; + } + + /// + /// Returns the coordinates of this as a list of numbers + /// + /// A list of coordinates {x, y, z} + public List ToList() + { + return new List { x, y, z }; + } + + /// + /// Creates a new vector based on a list of coordinates and the unit they're drawn in. + /// + /// The list of coordinates {x, y, z} + /// The units the coordinates are in + /// A new with the provided coordinates. + public static Vector FromList(List list, string units) + { + return new Vector(list[0], list[1], list[2], units); + } + + /// + /// Divides a vector by a numerical value. This will divide each coordinate by the provided value. + /// + /// The vector to divide + /// The value to divide by + /// The resulting + public static Vector operator /(Vector vector, double val) => + new(vector.x / val, vector.y / val, vector.z / val, vector.units); + + /// + /// Multiplies a vector by a numerical value. This will multiply each coordinate by the provided value. + /// + /// The vector to multiply + /// The value to multiply by + /// The resulting + public static Vector operator *(Vector vector, double val) => + new(vector.x * val, vector.y * val, vector.z * val, vector.units); + + /// + /// Adds two vectors by adding each of their coordinates. + /// + /// The first vector + /// The second vector + /// The resulting + public static Vector operator +(Vector vector1, Vector vector2) => + new(vector1.x + vector2.x, vector1.y + vector2.y, vector1.z + vector2.z, vector1.units); + + /// + /// Subtracts two vectors by subtracting each of their coordinates. + /// + /// The first vector + /// The second vector + /// The resulting + public static Vector operator -(Vector vector1, Vector vector2) => + new(vector1.x - vector2.x, vector1.y - vector2.y, vector1.z - vector2.z, vector1.units); + + /// + /// Gets the scalar product (dot product) of two given vectors + /// Dot product = u1*v1 + u2*v2 + u3*v3. + /// + /// First vector. + /// Second vector. + /// Numerical value of the dot product. + public static double DotProduct(Vector u, Vector v) + { + return u.x * v.x + u.y * v.y + u.z * v.z; + } + + /// + /// Computes the vector product (cross product) of two given vectors + /// Cross product = { u2 * v3 - u3 * v2; u3 * v1 - u1 * v3; u1 * v2 - u2 * v1 }. + /// + /// First vector. + /// Second vector. + /// Vector result of the cross product. + public static Vector CrossProduct(Vector u, Vector v) + { + var x = u.y * v.z - u.z * v.y; + var y = u.z * v.x - u.x * v.z; + var z = u.x * v.y - u.y * v.x; + + return new Vector(x, y, z); + } + + public static double Angle(Vector u, Vector v) + { + return Math.Acos(DotProduct(u, v) / (u.Length * v.Length)); + } + + [Obsolete("Renamed to " + nameof(Normalize), true)] + public void Unitize() + { + Normalize(); + } + + /// + /// Compute and return a unit vector from this vector + /// + /// a normalized unit vector + public void Normalize() + { + var length = Length; + x /= length; + y /= length; + z /= length; + } + + /// + /// Inverses the direction of the vector, equivalent to multiplying by -1 + /// + /// A pointing in the opposite direction + public Vector Negate() + { + x *= -1; + y *= -1; + z *= -1; + return this; + } + + /// + /// Returns a normalized copy of this vector. + /// + /// A copy of this vector unitized. + public Vector Unit() + { + return this / Length; + } + + public static Vector Divide(Vector left, Vector right) + { + throw new NotImplementedException(); + } +} diff --git a/src/Objects/Interfaces.cs b/src/Objects/Interfaces.cs new file mode 100644 index 00000000..9bd5aa4e --- /dev/null +++ b/src/Objects/Interfaces.cs @@ -0,0 +1,114 @@ +using System.Collections.Generic; +using Objects.BuiltElements; +using Objects.Geometry; +using Objects.Other; +using Objects.Primitive; +using Speckle.Core.Models; + +namespace Objects; + +#region Generic interfaces. + +/// +/// Represents an object that has a +/// +public interface IHasBoundingBox +{ + /// + /// The bounding box containing the object. + /// + Box bbox { get; } +} + +/// +/// Represents a object that has +/// +public interface IHasArea +{ + /// + /// The area of the object + /// + double area { get; set; } +} + +/// +/// Represents an object that has +/// +public interface IHasVolume +{ + /// + /// The volume of the object + /// + double volume { get; set; } +} + +/// +/// Represents +/// +public interface ICurve +{ + /// + /// The length of the curve. + /// + double length { get; set; } + + /// + /// The numerical domain driving the curve's internal parametrization. + /// + Interval domain { get; set; } +} + +/// +/// Generic Interface for transformable objects. +/// +/// The type of object to support transformations. +public interface ITransformable : ITransformable + where T : ITransformable +{ + /// + bool TransformTo(Transform transform, out T transformed); +} + +/// +/// Interface for transformable objects where the type may not be known on convert (eg ICurve implementations) +/// +public interface ITransformable +{ + /// + /// Returns a copy of the object with it's coordinates transformed by the provided + /// + /// The to be applied. + /// The transformed copy of the object. + /// True if the transform operation was successful, false otherwise. + bool TransformTo(Transform transform, out ITransformable transformed); +} + +#endregion + +#region Built elements + +/// +/// Specifies displayable value(s) to be used as a fallback +/// if a displayable form cannot be converted. +/// +/// +/// objects that represent conceptual / abstract / mathematically derived geometry +/// can use to be used in case the object lacks a natively displayable form. +/// (e.g , , ) +/// +/// +/// Type of display value. +/// Expected to be either a type or a of s, +/// most likely or . +/// +public interface IDisplayValue +{ + /// + /// (s) will be used to display this + /// if a native displayable object cannot be converted. + /// + T displayValue { get; } +} + + +#endregion diff --git a/src/Objects/Objects.csproj b/src/Objects/Objects.csproj new file mode 100644 index 00000000..b3463e55 --- /dev/null +++ b/src/Objects/Objects.csproj @@ -0,0 +1,36 @@ + + + + netstandard2.0 + Objects + Objects + Speckle.Objects + Objects + Objects is the default object model for Speckle + $(PackageTags), objects + true + true + enable + + + + true + + $(NoWarn); + CA1008; CA1024; CA1034; CA1065; + CA1708; CA1711; CA1716; CA1724; CA1725; + CA1819; CS8618; + CA2201; CA2225; + CS0659; CS0661; CS0728; CS8618; + IDE0041; IDE0060; IDE1006; + + + + + + + + + + + \ No newline at end of file diff --git a/src/Objects/ObjectsKit.cs b/src/Objects/ObjectsKit.cs new file mode 100644 index 00000000..81b4812a --- /dev/null +++ b/src/Objects/ObjectsKit.cs @@ -0,0 +1,152 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Reflection; +using Speckle.Core.Helpers; +using Speckle.Core.Kits; +using Speckle.Core.Logging; +using Speckle.Core.Models; + +namespace Objects; + +/// +/// The default Speckle Kit +/// +public class ObjectsKit : ISpeckleKit +{ + private static string? s_objectsFolder; + + private readonly Dictionary _loadedConverters = new(); + + private List? _converters; + + /// + /// Local installations store objects in C:\Users\USERNAME\AppData\Roaming\Speckle\Kits\Objects + /// Admin/System-wide installations in C:\ProgramData\Speckle\Kits\Objects + /// + public static string ObjectsFolder + { + get => s_objectsFolder ??= SpecklePathProvider.ObjectsFolderPath; + [Obsolete("Use " + nameof(SpecklePathProvider.OverrideObjectsFolderName), true)] + set => s_objectsFolder = value; + } + + /// + public string Description => "The default Speckle Kit."; + + /// + public string Name => "Objects"; + + /// + public string Author => "Speckle"; + + /// + public string WebsiteOrEmail => "https://speckle.systems"; + + /// + public IEnumerable Types => + Assembly.GetExecutingAssembly().GetTypes().Where(t => t.IsSubclassOf(typeof(Base)) && !t.IsAbstract); + + /// + public IEnumerable Converters => _converters ??= GetAvailableConverters(); + + /// + public ISpeckleConverter LoadConverter(string app) + { + try + { + _converters = GetAvailableConverters(); + if (_loadedConverters.TryGetValue(app, out Type t)) + { + return (ISpeckleConverter)Activator.CreateInstance(t); + } + + var converterInstance = LoadConverterFromDisk(app); + _loadedConverters[app] = converterInstance.GetType(); + + return converterInstance; + } + catch (Exception ex) + { + SpeckleLog.Logger.Fatal(ex, "Failed to load converter for app {app}", app); + throw new KitException($"Failed to load converter for app {app}:\n\n{ex.Message}", this, ex); + } + } + + private static ISpeckleConverter LoadConverterFromDisk(string app) + { + var basePath = Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location); + + var path = Path.Combine(basePath!, $"Objects.Converter.{app}.dll"); + + //fallback to the default folder, in case the Objects.dll was loaded in the app domain for other reasons + if (!File.Exists(path)) + { + path = Path.Combine(ObjectsFolder, $"Objects.Converter.{app}.dll"); + } + + if (!File.Exists(path)) + { + throw new FileNotFoundException($"Converter for {app} was not found in kit {basePath}", path); + } + + AssemblyName assemblyToLoad = AssemblyName.GetAssemblyName(path); + var objects = Assembly.GetExecutingAssembly().GetName(); + + //only get assemblies matching the Major and Minor version of Objects + if (assemblyToLoad.Version.Major != objects.Version.Major || assemblyToLoad.Version.Minor != objects.Version.Minor) + { + throw new SpeckleException( + $"Mismatch between Objects library v{objects.Version} Converter v{assemblyToLoad.Version}.\nEnsure the same 2.x version of Speckle connectors is installed." + ); + } + + var assembly = Assembly.LoadFrom(path); + + var converterInstance = assembly + .GetTypes() + .Where(type => typeof(ISpeckleConverter).IsAssignableFrom(type)) + .Select(type => (ISpeckleConverter)Activator.CreateInstance(type)) + .FirstOrDefault(converter => converter.GetServicedApplications().Contains(app)); + + if (converterInstance == null) + { + throw new SpeckleException($"No suitable converter instance found for {app}"); + } + + SpeckleLog.Logger + .ForContext() + .ForContext("basePath", basePath) + .ForContext("app", app) + .Information("Converter {converterName} successfully loaded from {path}", converterInstance.Name, path); + + return converterInstance; + } + + public List GetAvailableConverters() + { + var basePath = Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location); + var allConverters = Directory.EnumerateFiles(basePath!, "Objects.Converter.*.dll").ToArray(); + + //fallback to the default folder, in case the Objects.dll was loaded in the app domain for other reasons + if (allConverters.Length == 0) + { + allConverters = Directory.EnumerateFiles(ObjectsFolder, "Objects.Converter.*.dll").ToArray(); + } + + //only get assemblies matching the Major and Minor version of Objects + var objects = Assembly.GetExecutingAssembly().GetName(); + var availableConverters = new List(); + foreach (var converter in allConverters) + { + AssemblyName assemblyName = AssemblyName.GetAssemblyName(converter); + if (assemblyName.Version.Major == objects.Version.Major && assemblyName.Version.Minor == objects.Version.Minor) + { + availableConverters.Add(converter); + } + } + + return availableConverters.Select(dllPath => dllPath.Split('.').Reverse().ElementAt(1)).ToList(); + } +} diff --git a/src/Objects/Organization/DataTable.cs b/src/Objects/Organization/DataTable.cs new file mode 100644 index 00000000..81721e90 --- /dev/null +++ b/src/Objects/Organization/DataTable.cs @@ -0,0 +1,45 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using Speckle.Core.Models; + +namespace Objects.Organization; + +public class DataTable : Base +{ + public DataTable() { } + + public int columnCount => columnMetadata.Count; + public int rowCount => rowMetadata.Count; + public int headerRowIndex { get; set; } + public string name { get; set; } + public List rowMetadata { get; set; } = new List(); + public List columnMetadata { get; set; } = new List(); + public List> data { get; set; } = new List>(); + + public void AddRow(Base metadata, int index = -1, params object[] objects) + { + if (objects.Length != columnCount) + { + throw new ArgumentException( + $"\"AddRow\" method was passed {objects.Length} objects, but the DataTable has {columnCount} columns. Partial and extended table rows are not accepted by the DataTable object." + ); + } + + if (index < 0 || index >= data.Count) + { + data.Add(objects.ToList()); + rowMetadata.Add(metadata); + } + else + { + data.Insert(index, objects.ToList()); + rowMetadata.Insert(index, metadata); + } + } + + public void DefineColumn(Base metadata) + { + columnMetadata.Add(metadata); + } +} diff --git a/src/Objects/Organization/Deprecated/Collection.cs b/src/Objects/Organization/Deprecated/Collection.cs new file mode 100644 index 00000000..6c05f15e --- /dev/null +++ b/src/Objects/Organization/Deprecated/Collection.cs @@ -0,0 +1,11 @@ +using System; + +namespace Objects.Organization.Deprecated; + +[Obsolete("Replaced by " + nameof(Speckle.Core.Models.Collection))] +public class Collection : Speckle.Core.Models.Collection +{ + //Deserializer target for 2.13 Collection objects in the `Objects.Orgainzation` namespace + + //Objects.Organization.Deprecated.Collection:Speckle.Core.Models.Collection +} diff --git a/src/Objects/Organization/Model.cs b/src/Objects/Organization/Model.cs new file mode 100644 index 00000000..0461f844 --- /dev/null +++ b/src/Objects/Organization/Model.cs @@ -0,0 +1,72 @@ +using System.Collections.Generic; +using Objects.Other; +using Speckle.Core.Models; + +namespace Objects.Organization; + +/// +/// Basic model info class +/// It contains general information about the model and can be extended or subclassed to include more application-specific +/// information. +/// +public class ModelInfo : Base +{ + /// + /// The name of the model. + /// + public string name { get; set; } + + /// + /// The identifying number of the model. + /// + public string number { get; set; } + + // TODO: not sure about adding a typed `elements` list here? prob should let ppl add whatever named categories here? +} + +// TODO: not quite sure about this name? +/// +/// Extended to contain additional properties applicable to AEC projects. +/// +public class BIMModelInfo : ModelInfo +{ + /// + /// The name of the client + /// + public string clientName { get; set; } + + /// + /// The name of the building + /// + public string buildingName { get; set; } + + /// + /// The status or phase of the model. + /// + public string status { get; set; } + + /// + /// The address of the model. + /// + public string address { get; set; } + + /// + /// The name of the site location as a string. + /// + public string siteName { get; set; } + + /// + /// The latitude of the site location in radians. + /// + public double latitude { get; set; } + + /// + /// The longitude of the site location in radians. + /// + public double longitude { get; set; } + + /// + /// A list of origin locations within this model as a list of s + /// + public List locations { get; set; } +} diff --git a/src/Objects/Other/Block.cs b/src/Objects/Other/Block.cs new file mode 100644 index 00000000..fd455dd9 --- /dev/null +++ b/src/Objects/Other/Block.cs @@ -0,0 +1,45 @@ +using System.Collections.Generic; +using Objects.Geometry; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.Other; + +/// +/// Block definition class +/// +public class BlockDefinition : Base +{ + public BlockDefinition() { } + + [SchemaInfo("Block Definition", "A Speckle Block definition")] + public BlockDefinition(string name, List geometry, Point? basePoint = null) + { + this.name = name; + this.basePoint = basePoint ?? new() { units = Units.None }; + this.geometry = geometry; + } + + public string name { get; set; } + + /// + /// The definition base point of the block + /// + public Point basePoint { get; set; } = new() { units = Units.None }; + + [DetachProperty] + public List geometry { get; set; } + + public string units { get; set; } = Units.None; + + /// + /// Returns the translation transform of the base point to the internal origin [0,0,0] + /// + /// + public Transform GetBasePointTransform() + { + var translation = new Vector(-basePoint.x, -basePoint.y, -basePoint.z) { units = basePoint.units ?? Units.None }; + var transform = new Transform(new Vector(1, 0, 0), new Vector(0, 1, 0), new Vector(1, 0, 0), translation); + return transform; + } +} diff --git a/src/Objects/Other/Civil/CivilDataField.cs b/src/Objects/Other/Civil/CivilDataField.cs new file mode 100644 index 00000000..48e1a2ad --- /dev/null +++ b/src/Objects/Other/Civil/CivilDataField.cs @@ -0,0 +1,20 @@ +namespace Objects.Other.Civil; + +public class CivilDataField : DataField +{ + public CivilDataField() { } + + public CivilDataField(string name, string type, string units, string context, object? value = null) + { + this.name = name; + this.type = type; + this.units = units; + this.context = context; + this.value = value; + } + + /// + /// The context type of the Civil3D part + /// + public string context { get; set; } +} diff --git a/src/Objects/Other/DataField.cs b/src/Objects/Other/DataField.cs new file mode 100644 index 00000000..6770b3a8 --- /dev/null +++ b/src/Objects/Other/DataField.cs @@ -0,0 +1,27 @@ +using Speckle.Core.Models; + +namespace Objects.Other; + +/// +/// Generic class for a data field +/// +public class DataField : Base +{ + public DataField() { } + + public DataField(string name, string type, string units, object? value = null) + { + this.name = name; + this.type = type; + this.units = units; + this.value = value; + } + + public string name { get; set; } + + public string type { get; set; } + + public object? value { get; set; } + + public string units { get; set; } +} diff --git a/src/Objects/Other/Dimension.cs b/src/Objects/Other/Dimension.cs new file mode 100644 index 00000000..693e648e --- /dev/null +++ b/src/Objects/Other/Dimension.cs @@ -0,0 +1,97 @@ +using System.Collections.Generic; +using Objects.Geometry; +using Speckle.Core.Models; + +namespace Objects.Other; + +/// +/// Dimension class +/// +public class Dimension : Base, IDisplayValue> +{ + /// + /// The measurement of the dimension. + /// + public double measurement { get; set; } + + /// + /// The text of the dimension, without any formatting + /// + public string value { get; set; } + + /// + /// The text of the dimension, with rtf formatting + /// + public string richText { get; set; } + + /// + /// The position of the dimension + /// + public Point position { get; set; } + + /// + /// The position of the text of the dimension + /// + public Point textPosition { get; set; } + + public string units { get; set; } + + /// + /// Curves representing the annotation + /// + public List displayValue { get; set; } = new(); +} + +/// +/// Dimension class measuring a distance +/// +public class DistanceDimension : Dimension +{ + /// + /// The unitized normal of the dimension. + /// + public Vector direction { get; set; } + + /// + /// Indicates if this dimension is an ordinate dimension + /// + /// Ordinate dimensions (measuring distance between two points exclusively along the x or y axis) + /// are in practice drawn with different conventions than linear dimensions, and are treated as a special subset of them. + public bool isOrdinate { get; set; } + + /// + /// The objects being measured. + /// + /// + /// Distance measurements are between two points + /// + public List measured { get; set; } +} + +/// +/// Dimension class measuring a length +/// +public class LengthDimension : Dimension +{ + /// + /// The objects being measured. + /// + /// + /// For length dimensions, this is a curve + /// + public ICurve measured { get; set; } +} + +/// +/// Dimension class measuring an angle +/// +public class AngleDimension : Dimension +{ + /// + /// The objects being measured. + /// + /// + /// For angle dimensions, this is two lines + /// + public List measured { get; set; } +} diff --git a/src/Objects/Other/DisplayStyle.cs b/src/Objects/Other/DisplayStyle.cs new file mode 100644 index 00000000..92a6c103 --- /dev/null +++ b/src/Objects/Other/DisplayStyle.cs @@ -0,0 +1,23 @@ +using System.Drawing; +using Speckle.Core.Models; + +namespace Objects.Other; + +/// +/// Minimal display style class. Developed primarily for display styles in Rhino and AutoCAD. +/// Rhino object attributes uses OpenNURBS definition for linetypes and lineweights +/// +public class DisplayStyle : Base +{ + public string name { get; set; } + public int color { get; set; } = Color.LightGray.ToArgb(); // opacity assumed from a value + public string linetype { get; set; } + + /// + /// The plot weight in the style units + /// + /// A value of 0 indicates a default weight, and -1 indicates an invisible line + public double lineweight { get; set; } + + public string units { get; set; } +} diff --git a/src/Objects/Other/Hatch.cs b/src/Objects/Other/Hatch.cs new file mode 100644 index 00000000..c18b66f9 --- /dev/null +++ b/src/Objects/Other/Hatch.cs @@ -0,0 +1,43 @@ +using System; +using System.Collections.Generic; +using Speckle.Core.Models; + +namespace Objects.Other; + +public class Hatch : Base +{ + [Obsolete("Use Loops instead")] + public List curves { get; set; } + + public List loops { get; set; } + public string pattern { get; set; } + public double scale { get; set; } = 1; + public double rotation { get; set; } // relative angle +} + +/// +/// Represents a Hatch Loop from a 's curve. +/// +public class HatchLoop : Base +{ + public HatchLoop() { } + + public HatchLoop(ICurve curve, HatchLoopType type) + { + Curve = curve; + Type = type; + } + + public ICurve Curve { get; set; } + public HatchLoopType Type { get; set; } +} + +/// +/// Represents the type of a loop in a 's curves. +/// +public enum HatchLoopType +{ + Unknown, + Outer, + Inner +} diff --git a/src/Objects/Other/Instance.cs b/src/Objects/Other/Instance.cs new file mode 100644 index 00000000..34d5bdd4 --- /dev/null +++ b/src/Objects/Other/Instance.cs @@ -0,0 +1,164 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using Objects.Geometry; +using Speckle.Core.Kits; +using Speckle.Core.Models; +using Speckle.Core.Models.GraphTraversal; +using Speckle.Newtonsoft.Json; + +namespace Objects.Other; + +public abstract class Instance : Base +{ + protected Instance(Transform transform) + { + this.transform = transform ?? new Transform(); + } + + protected Instance() { } + + /// + /// The column-dominant 4x4 transform of this instance. + /// + /// + /// Indicates transform from internal origin [0,0,0] + /// + public Transform transform { get; set; } + + public abstract Base definition { get; internal set; } + + /// + /// The units of this Instance, should be the same as the instance transform units + /// + public string units { get; set; } + + // helper method that scans an Instance for all transformable geometry and nested instances + protected virtual IEnumerable GetTransformableGeometry() + { + var displayValueRule = TraversalRule + .NewTraversalRule() + .When(DefaultTraversal.HasDisplayValue) + .ContinueTraversing(_ => DefaultTraversal.DisplayValueAndElementsPropAliases); + + var instanceRule = TraversalRule + .NewTraversalRule() + .When(b => b is Instance instance && instance != null) + .ContinueTraversing(DefaultTraversal.None); + + var traversal = new GraphTraversal(instanceRule, displayValueRule, DefaultTraversal.DefaultRule); + + return traversal + .Traverse(definition) + .Select(tc => tc.Current) + .Where(b => b is ITransformable || b is Instance) + .Where(b => b != null); + } + + [SchemaComputed("transformedGeometry")] + public virtual IEnumerable GetTransformedGeometry() + { + return GetTransformableGeometry() + .SelectMany(b => + { + switch (b) + { + case Instance i: + return i.GetTransformedGeometry() + .Select(b => + { + b.TransformTo(transform, out var tranformed); + return tranformed; + }); + case ITransformable bt: + var res = bt.TransformTo(transform, out var transformed); + return res ? new List { transformed } : new(); + default: + return new List(); + } + }) + .Where(b => b != null); + } +} + +/// +/// Generic instance class +/// +public abstract class Instance : Instance + where T : Base +{ + protected Instance(T definition, Transform transform) + : base(transform) + { + typedDefinition = definition; + } + + protected Instance() + : base(new Transform()) { } + + [JsonIgnore] + public T typedDefinition { get; set; } + + [DetachProperty] + public override Base definition + { + get => typedDefinition; + internal set + { + if (value is T type) + { + typedDefinition = type; + } + } + } +} + +/// +/// Block instance class +/// +public class BlockInstance : Instance +{ + public BlockInstance() { } + + [SchemaInfo("Block Instance", "A Speckle Block Instance")] + public BlockInstance(BlockDefinition blockDefinition, Transform transform) + : base(blockDefinition, transform) + { + // OLD: TODO: need to verify + // Add base translation to transform. This assumes the transform is based on the world origin, + // whereas the instance transform assumes it contains the basePoint translation already. + //this.transform = transform * blockDefinition.GetBasePointTransform(); + } + + [DetachProperty, Obsolete("Use definition property", true), JsonIgnore] + public BlockDefinition blockDefinition + { + get => typedDefinition; + set => typedDefinition = value; + } + + protected override IEnumerable GetTransformableGeometry() + { + return typedDefinition.geometry; + } + + /// + /// Returns a plane representing the insertion point and orientation of this Block instance. + /// + /// This method will skip scaling. If you need scaling, we recommend using the transform instead. + /// A Plane on the insertion point of this Block Instance, with the correct 3-axis rotations. + [SchemaComputed("insertionPlane")] + public Plane GetInsertionPlane() + { + // TODO: UPDATE! + var plane = new Plane( + typedDefinition.basePoint ?? new Point(0, 0, 0, units), + new Vector(0, 0, 1, units), + new Vector(1, 0, 0, units), + new Vector(0, 1, 0, units), + units + ); + plane.TransformTo(transform, out Plane tPlane); + return tPlane; + } +} diff --git a/src/Objects/Other/MappedBlockWrapper.cs b/src/Objects/Other/MappedBlockWrapper.cs new file mode 100644 index 00000000..28daba64 --- /dev/null +++ b/src/Objects/Other/MappedBlockWrapper.cs @@ -0,0 +1,19 @@ +using Objects.BuiltElements.Revit; +using Speckle.Core.Models; + +namespace Objects.Other; + +public class MappedBlockWrapper : Base +{ + public string category { get; set; } = RevitCategory.GenericModel.ToString(); + public string? nameOverride { get; set; } + public BlockInstance instance { get; set; } + + public MappedBlockWrapper() { } + + public MappedBlockWrapper(BlockInstance instance, RevitCategory category) + { + this.instance = instance; + this.category = category.ToString(); + } +} diff --git a/src/Objects/Other/Material.cs b/src/Objects/Other/Material.cs new file mode 100644 index 00000000..83a0e2e8 --- /dev/null +++ b/src/Objects/Other/Material.cs @@ -0,0 +1,20 @@ +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.Other; + +/// +/// Generic class for materials containing generic parameters +/// +public class Material : Base +{ + public Material() { } + + [SchemaInfo("RevitMaterial", "Creates a Speckle material", "BIM", "Architecture")] + public Material(string name) + { + this.name = name; + } + + public string name { get; set; } +} diff --git a/src/Objects/Other/MaterialQuantity.cs b/src/Objects/Other/MaterialQuantity.cs new file mode 100644 index 00000000..790690a1 --- /dev/null +++ b/src/Objects/Other/MaterialQuantity.cs @@ -0,0 +1,33 @@ +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.Other; + +public class MaterialQuantity : Base +{ + public MaterialQuantity() { } + + [SchemaInfo("MaterialQuantity", "Creates the quantity of a material")] + public MaterialQuantity(Material m, double volume, double area, string units) + { + material = m; + this.volume = volume; + this.area = area; + this.units = units; + } + + [DetachProperty] + public Material material { get; set; } + + public double volume { get; set; } + + /// + /// Area of the material on a element + /// + public double area { get; set; } + + /// + /// UnitMeasure of the quantity,e.g meters implies squaremeters for area and cubicmeters for the volume + /// + public string units { get; set; } +} diff --git a/src/Objects/Other/RenderMaterial.cs b/src/Objects/Other/RenderMaterial.cs new file mode 100644 index 00000000..dfe7daa8 --- /dev/null +++ b/src/Objects/Other/RenderMaterial.cs @@ -0,0 +1,60 @@ +using System.Drawing; +using Speckle.Core.Kits; +using Speckle.Core.Models; +using Speckle.Newtonsoft.Json; + +namespace Objects.Other; + +/// +/// Minimal physically based material DTO class. Based on references from +/// https://threejs.org/docs/index.html#api/en/materials/MeshStandardMaterial +/// Theoretically has equivalents in Unity and Unreal. +/// +/// See: https://docs.unrealengine.com/en-US/RenderingAndGraphics/Materials/PhysicallyBased/index.html +/// And: https://blogs.unity3d.com/2014/10/29/physically-based-shading-in-unity-5-a-primer/ +/// +public class RenderMaterial : Base +{ + public RenderMaterial() { } + + [SchemaInfo("RenderMaterial", "Creates a render material.", "BIM", "Other")] + public RenderMaterial( + double opacity = 1, + double metalness = 0, + double roughness = 1, + Color? diffuse = null, + Color? emissive = null + ) + { + this.opacity = opacity; + this.metalness = metalness; + this.roughness = roughness; + this.diffuse = diffuse.HasValue ? diffuse.Value.ToArgb() : Color.LightGray.ToArgb(); + this.emissive = emissive.HasValue ? emissive.Value.ToArgb() : Color.Black.ToArgb(); + } + + public string name { get; set; } + public double opacity { get; set; } = 1; + public double metalness { get; set; } + public double roughness { get; set; } = 1; + + [SchemaIgnore] + public int diffuse { get; set; } = Color.LightGray.ToArgb(); + + [SchemaIgnore] + public int emissive { get; set; } = Color.Black.ToArgb(); + + [JsonIgnore] + public Color diffuseColor + { + get => Color.FromArgb(diffuse); + set => diffuse = value.ToArgb(); + } + + [JsonIgnore] + public Color emissiveColor + { + get => Color.FromArgb(emissive); + set => diffuse = value.ToArgb(); + } +} diff --git a/src/Objects/Other/Revit/RevitInstance.cs b/src/Objects/Other/Revit/RevitInstance.cs new file mode 100644 index 00000000..98614d75 --- /dev/null +++ b/src/Objects/Other/Revit/RevitInstance.cs @@ -0,0 +1,70 @@ +using System.Collections.Generic; +using System.Linq; +using Objects.BuiltElements; +using Objects.BuiltElements.Revit; +using Objects.Geometry; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.Other.Revit; + +public class RevitInstance : Instance +{ + public Level level { get; set; } + public bool facingFlipped { get; set; } + public bool handFlipped { get; set; } + public bool mirrored { get; set; } + public Base parameters { get; set; } + public string elementId { get; set; } + + protected override IEnumerable GetTransformableGeometry() + { + var allChildren = typedDefinition.elements ?? new List(); + if (typedDefinition.displayValue.Count != 0) + { + allChildren.AddRange(typedDefinition.displayValue); + } + + return allChildren; + } + + [SchemaComputed("transformedGeometry")] + public override IEnumerable GetTransformedGeometry() + { + var transformed = base.GetTransformedGeometry().ToList(); + + // add any dynamically attached elements on this instance + if ((this["elements"] ?? this["@elements"]) is List elements) + { + foreach (var element in elements) + { + if (((Base)element)["displayValue"] is List display) + { + transformed.AddRange(display.Cast()); + } + } + } + + return transformed; + } + + /// + /// Returns a plane representing the insertion point and orientation of this revit instance. + /// + /// This method will skip scaling. If you need scaling, we recommend using the transform instead. + /// A Plane on the insertion point of this Block Instance, with the correct 3-axis rotations. + [SchemaComputed("insertionPlane")] + public Plane GetInsertionPlane() + { + // TODO: Check for Revit in GH/DYN + var plane = new Plane( + new Point(0, 0, 0, units), + new Vector(0, 0, 1, units), + new Vector(1, 0, 0, units), + new Vector(0, 1, 0, units), + units + ); + plane.TransformTo(transform, out Plane tPlane); + return tPlane; + } +} diff --git a/src/Objects/Other/Revit/RevitMaterial.cs b/src/Objects/Other/Revit/RevitMaterial.cs new file mode 100644 index 00000000..943950fd --- /dev/null +++ b/src/Objects/Other/Revit/RevitMaterial.cs @@ -0,0 +1,44 @@ +using System.Collections.Generic; +using Objects.BuiltElements.Revit; +using Objects.Utils; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.Other.Revit; + +/// +/// Material in Revit defininf all revit properties from Autodesk.Revit.DB.Material +/// +public class RevitMaterial : Material +{ + public RevitMaterial() { } + + [SchemaInfo("RevitMaterial", "Creates a Speckle material", "Revit", "Architecture")] + public RevitMaterial( + string name, + string category, + string materialclass, + int shiny, + int smooth, + int transparent, + List? parameters = null + ) + { + this.parameters = parameters?.ToBase(); + this.name = name; + materialCategory = category; + materialClass = materialclass; + shininess = shiny; + smoothness = smooth; + transparency = transparent; + } + + public string materialCategory { get; set; } + public string materialClass { get; set; } + + public int shininess { get; set; } + public int smoothness { get; set; } + public int transparency { get; set; } + + public Base? parameters { get; set; } +} diff --git a/src/Objects/Other/Text.cs b/src/Objects/Other/Text.cs new file mode 100644 index 00000000..22802b19 --- /dev/null +++ b/src/Objects/Other/Text.cs @@ -0,0 +1,19 @@ +using System.Collections.Generic; +using Objects.Geometry; +using Speckle.Core.Models; + +namespace Objects.Other; + +/// +/// Text class for Rhino and AutoCAD +/// +public class Text : Base, IDisplayValue> +{ + public Plane plane { get; set; } // origin should be center + public double rotation { get; set; } // using radians + public string value { get; set; } // text without RTF + public string richText { get; set; } + public double height { get; set; } + public string units { get; set; } + public List displayValue { get; set; } = new(); +} diff --git a/src/Objects/Other/Transform.cs b/src/Objects/Other/Transform.cs new file mode 100644 index 00000000..d3a64844 --- /dev/null +++ b/src/Objects/Other/Transform.cs @@ -0,0 +1,465 @@ +using System; +using System.Collections.Generic; +using System.DoubleNumerics; +using Objects.Geometry; +using Speckle.Core.Kits; +using Speckle.Core.Logging; +using Speckle.Core.Models; +using Speckle.Newtonsoft.Json; +using Vector = Objects.Geometry.Vector; + +namespace Objects.Other; + +/// +/// Generic transform class +/// +public class Transform : Base +{ + public Transform() { } + + /// + /// Construct a transform from a row-based double array of size 16 + /// + /// + /// + /// + public Transform(double[] value, string units = Units.None) + { + if (value.Length != 16) + { + throw new ArgumentException( + $"{nameof(Transform)}.{nameof(value)} array is malformed: expected length to be 16", + nameof(value) + ); + } + + matrix = CreateMatrix(value); + this.units = units; + } + + /// + /// Construct a transform from a row-based float array of size 16 + /// + /// + /// + /// + public Transform(float[] value, string units = Units.None) + { + if (value.Length != 16) + { + throw new SpeckleException($"{nameof(Transform)}.{nameof(value)} array is malformed: expected length to be 16"); + } + + matrix = CreateMatrix(value); + this.units = units; + } + + /// + /// Construct a transform from a 4x4 matrix and translation units + /// + /// + /// + public Transform(Matrix4x4 matrix, string units = Units.None) + { + this.matrix = matrix; + this.units = units; + } + + /// + /// Construct a transform given the x, y, and z bases and the translation vector + /// + /// + /// + /// + /// + public Transform(Vector x, Vector y, Vector z, Vector translation) + { + matrix = new Matrix4x4( + x.x, + y.x, + z.x, + translation.x, + x.y, + y.y, + z.y, + translation.y, + x.z, + y.z, + z.z, + translation.z, + 0f, + 0f, + 0f, + 1f + ); + units = translation.units; + } + + /// + /// The column-based 4x4 transform matrix + /// + /// + /// Graphics based apps typically use column-based matrices, where the last column defines translation. + /// Modelling apps may use row-based matrices, where the last row defines translation. Transpose if so. + /// + public Matrix4x4 matrix { get; set; } = Matrix4x4.Identity; + + /// + /// Units for translation + /// + public string units { get; set; } = Units.None; + + /// + /// Decomposes matrix into its scaling, rotation, and translation components + /// + /// + /// + /// + /// True if successful, false otherwise + public void Decompose(out Vector3 scale, out Quaternion rotation, out Vector4 translation) + { + // translation + translation = new Vector4(matrix.M14, matrix.M24, matrix.M34, matrix.M44); + + // scale + // this should account for non-uniform scaling + Vector4 basis4dX = new(matrix.M11, matrix.M21, matrix.M31, matrix.M41); + Vector4 basis4dY = new(matrix.M12, matrix.M22, matrix.M32, matrix.M42); + Vector4 basis4dZ = new(matrix.M13, matrix.M23, matrix.M33, matrix.M43); + + // Check for mirroring + Vector3 basisX = new(matrix.M11, matrix.M21, matrix.M31); + Vector3 basisY = new(matrix.M12, matrix.M22, matrix.M32); + Vector3 basisZ = new(matrix.M13, matrix.M23, matrix.M33); + // Negative determinant means flip on Z. + // TODO: Add tests and figure out exactly why this is. Jedd and myself have some theories but it would be nice to document this properly + double determinant = Vector3.Dot(Vector3.Cross(basisX, basisY), basisZ) < 0 ? -1 : 1; + + // Compute the scale, but only multiply the Z scale by the determinant to flag negative scaling on Z axis (see todo above) + scale = new Vector3(basis4dX.Length(), basis4dY.Length(), basis4dZ.Length() * determinant); + + // rotation + // this is using a z-up convention for basis vectors + var up = new Vector3(matrix.M13, matrix.M23, matrix.M33); + var forward = new Vector3(matrix.M12, matrix.M22, matrix.M32); + rotation = LookRotation(forward, up); + } + + private static Quaternion LookRotation(Vector3 forward, Vector3 up) + { + Vector3 vector = new(forward.X / forward.Length(), forward.Y / forward.Length(), forward.Z / forward.Length()); + Vector3 vector2 = Vector3.Cross(up, forward); + Vector3 vector3 = Vector3.Cross(vector, vector2); + var m00 = vector2.X; + var m01 = vector2.Y; + var m02 = vector2.Z; + var m10 = vector3.X; + var m11 = vector3.Y; + var m12 = vector3.Z; + var m20 = vector.X; + var m21 = vector.Y; + var m22 = vector.Z; + + var num8 = m00 + m11 + m22; + if (num8 > 0d) + { + var num = Math.Sqrt(num8 + 1d); + num = 0.5d / num; + return new Quaternion((m12 - m21) * num, (m20 - m02) * num, (m01 - m10) * num, num * 0.5d); + } + if (m00 >= m11 && m00 >= m22) + { + var num7 = Math.Sqrt(1d + m00 - m11 - m22); + var num4 = 0.5d / num7; + return new Quaternion(0.5d * num7, (m01 + m10) * num4, (m02 + m20) * num4, (m12 - m21) * num4); + } + if (m11 > m22) + { + var num6 = Math.Sqrt(1d + m11 - m00 - m22); + var num3 = 0.5d / num6; + return new Quaternion((m10 + m01) * num3, 0.5d * num6, (m21 + m12) * num3, (m20 - m02) * num3); + } + var num5 = Math.Sqrt(1d + m22 - m00 - m11); + var num2 = 0.5d / num5; + return new Quaternion((m20 + m02) * num2, (m21 + m12) * num2, 0.5d * num5, (m01 - m10) * num2); + } + + /// + /// Converts this transform to the input units + /// + /// The target units + /// A matrix array with the translation scaled by input units + /// If either the transform's or the given is , will return the matrix array data unscaled + public double[] ConvertToUnits(string newUnits) + { + if (newUnits == null || units == null) + { + return ToArray(); + } + + var sf = Units.GetConversionFactor(units, newUnits); + + return new[] + { + matrix.M11, + matrix.M12, + matrix.M13, + matrix.M14 * sf, + matrix.M21, + matrix.M22, + matrix.M23, + matrix.M24 * sf, + matrix.M31, + matrix.M32, + matrix.M33, + matrix.M34 * sf, + matrix.M41, + matrix.M42, + matrix.M43, + matrix.M44 + }; + } + + public Transform Inverse() + { + if (Matrix4x4.Invert(matrix, out var transformed)) + { + return new Transform(transformed); + } + throw new SpeckleException("Could not create inverse transform"); + } + + /// + /// Returns the matrix that results from multiplying two matrices together. + /// + /// The first transform + /// The second transform + /// A transform matrix with the units of the first transform + public static Transform operator *(Transform t1, Transform t2) + { + var convertedTransform = CreateMatrix(t2.ConvertToUnits(t1.units)); + var newMatrix = t1.matrix * convertedTransform; + return new Transform(newMatrix, t1.units); + } + + /// + /// Returns the double array of the transform matrix + /// + /// + public double[] ToArray() + { + return new double[] + { + matrix.M11, + matrix.M12, + matrix.M13, + matrix.M14, + matrix.M21, + matrix.M22, + matrix.M23, + matrix.M24, + matrix.M31, + matrix.M32, + matrix.M33, + matrix.M34, + matrix.M41, + matrix.M42, + matrix.M43, + matrix.M44 + }; + } + + // Creates a matrix4x4 from a double array + internal static Matrix4x4 CreateMatrix(double[] value) + { + return new Matrix4x4( + value[0], + value[1], + value[2], + value[3], + value[4], + value[5], + value[6], + value[7], + value[8], + value[9], + value[10], + value[11], + value[12], + value[13], + value[14], + value[15] + ); + } + + // Creates a matrix from a float array + internal static Matrix4x4 CreateMatrix(float[] value) + { + return new Matrix4x4( + Convert.ToDouble(value[0]), + Convert.ToDouble(value[1]), + Convert.ToDouble(value[2]), + Convert.ToDouble(value[3]), + Convert.ToDouble(value[4]), + Convert.ToDouble(value[5]), + Convert.ToDouble(value[6]), + Convert.ToDouble(value[7]), + Convert.ToDouble(value[8]), + Convert.ToDouble(value[9]), + Convert.ToDouble(value[10]), + Convert.ToDouble(value[11]), + Convert.ToDouble(value[12]), + Convert.ToDouble(value[13]), + Convert.ToDouble(value[14]), + Convert.ToDouble(value[15]) + ); + } + + #region obsolete + + [JsonIgnore, Obsolete("Use the matrix property", true)] + [System.Diagnostics.CodeAnalysis.SuppressMessage( + "Performance", + "CA1819:Properties should not return arrays", + Justification = "Obsolete" + )] + public double[] value + { + get => ToArray(); + set => matrix = CreateMatrix(value); + } + + [JsonIgnore, Obsolete("Use Decompose method", true)] + public double rotationZ + { + get + { + Decompose(out _, out Quaternion rotation, out _); + return Math.Acos(rotation.W) * 2; + } + } + + /// + /// Transform a flat list of doubles representing points + /// + [Obsolete("Use transform method in Point class", true)] + public List ApplyToPoints(List points) + { + if (points.Count % 3 != 0) + { + throw new SpeckleException( + "Cannot apply transform as the points list is malformed: expected length to be multiple of 3" + ); + } + + var transformed = new List(points.Count); + for (var i = 0; i < points.Count; i += 3) + { + var point = new Point(points[i], points[i + 1], points[i + 2]); + point.TransformTo(this, out Point transformedPoint); + transformed.AddRange(transformedPoint.ToList()); + } + return transformed; + } + + /// + /// Transform a flat list of speckle Points + /// + [Obsolete("Use transform method in Point class", true)] + public List ApplyToPoints(List points) + { + var transformedPoints = new List(); + foreach (var point in points) + { + point.TransformTo(this, out Point transformedPoint); + transformedPoints.Add(transformedPoint); + } + return transformedPoints; + } + + /// + /// Transform a single speckle Point + /// + [Obsolete("Use transform method in Point class", true)] + public Point? ApplyToPoint(Point point) + { + if (point == null) + { + return null; + } + + point.TransformTo(this, out Point transformedPoint); + return transformedPoint; + } + + /// + /// Transform a list of three doubles representing a point + /// + [Obsolete("Use transform method in Point class", true)] + public List ApplyToPoint(List point) + { + var newPoint = new Point(point[0], point[1], point[2]); + newPoint.TransformTo(this, out Point transformed); + return transformed.ToList(); + } + + /// + /// Transform a single speckle Vector + /// + [Obsolete("Use transform method in Vector class", true)] + public Vector ApplyToVector(Vector vector) + { + var newCoords = ApplyToVector(new List { vector.x, vector.y, vector.z }); + + return new Vector(newCoords[0], newCoords[1], newCoords[2], vector.units, vector.applicationId); + } + + /// + /// Transform a list of three doubles representing a vector + /// + [Obsolete("Use transform method in Vector class", true)] + public List ApplyToVector(List vector) + { + var newPoint = new List(); + + for (var i = 0; i < 12; i += 4) + { + newPoint.Add(vector[0] * value[i] + vector[1] * value[i + 1] + vector[2] * value[i + 2]); + } + + return newPoint; + } + + /// + /// Transform a flat list of ICurves. Note that if any of the ICurves does not implement `ITransformable`, + /// it will not be returned. + /// + [Obsolete("Use transform method in Curve class", true)] + public List ApplyToCurves(List curves, out bool success) + { + // TODO: move to curve class + success = true; + var transformed = new List(); + foreach (var curve in curves) + { + if (curve is ITransformable c) + { + c.TransformTo(this, out ITransformable tc); + transformed.Add((ICurve)tc); + } + else + { + success = false; + } + } + + return transformed; + } + + public static Transform Multiply(Transform left, Transform right) + { + throw new NotImplementedException(); + } + + #endregion +} diff --git a/src/Objects/Primitive/Chunk.cs b/src/Objects/Primitive/Chunk.cs new file mode 100644 index 00000000..96e58c7e --- /dev/null +++ b/src/Objects/Primitive/Chunk.cs @@ -0,0 +1,9 @@ +//namespace Objects.Primitive +//{ +// [SchemaIgnore] +// public class Chunk : Base +// { +// public List data { get; set; } = new List(); +// public Chunk() { } +// } +//} diff --git a/src/Objects/Primitive/Interval.cs b/src/Objects/Primitive/Interval.cs new file mode 100644 index 00000000..b393db5f --- /dev/null +++ b/src/Objects/Primitive/Interval.cs @@ -0,0 +1,27 @@ +using System; +using Speckle.Core.Models; +using Speckle.Newtonsoft.Json; + +namespace Objects.Primitive; + +public class Interval : Base +{ + public Interval() { } + + public Interval(double start, double end) + { + this.start = start; + this.end = end; + } + + public double? start { get; set; } + public double? end { get; set; } + + [JsonIgnore] + public double Length => Math.Abs((end ?? 0) - (start ?? 0)); + + public override string ToString() + { + return base.ToString() + $"[{start}, {end}]"; + } +} diff --git a/src/Objects/Primitive/Interval2d.cs b/src/Objects/Primitive/Interval2d.cs new file mode 100644 index 00000000..fc2cd985 --- /dev/null +++ b/src/Objects/Primitive/Interval2d.cs @@ -0,0 +1,23 @@ +using Speckle.Core.Models; + +namespace Objects.Primitive; + +public class Interval2d : Base +{ + public Interval2d() { } + + public Interval2d(Interval u, Interval v) + { + this.u = u; + this.v = v; + } + + public Interval2d(double start_u, double end_u, double start_v, double end_v) + { + u = new Interval(start_u, end_u); + v = new Interval(start_v, end_v); + } + + public Interval u { get; set; } + public Interval v { get; set; } +} diff --git a/src/Objects/Structural/Analysis/Model.cs b/src/Objects/Structural/Analysis/Model.cs new file mode 100644 index 00000000..4092ec7c --- /dev/null +++ b/src/Objects/Structural/Analysis/Model.cs @@ -0,0 +1,63 @@ +using System.Collections.Generic; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.Structural.Analysis; + +public class Model : Base +{ + public Model() { } + + /// + /// SchemaBuilder constructor for a structural model object + /// + /// + /// + /// + /// + /// + /// + [SchemaInfo("Model", "Creates a Speckle structural model object", "Structural", "Analysis")] + public Model( + ModelInfo? specs = null, + List? nodes = null, + List? elements = null, + List? loads = null, + List? restraints = null, + List? properties = null, + List? materials = null + ) + { + this.specs = specs; + this.nodes = nodes; + this.elements = elements; + this.loads = loads; + this.restraints = restraints; + this.properties = properties; + this.materials = materials; + } + + public ModelInfo? specs { get; set; } //container for model and project specifications + + [DetachProperty, Chunkable(5000)] + public List? nodes { get; set; } //nodes list + + [DetachProperty, Chunkable(5000)] + public List? elements { get; set; } //element (or member) list + + [DetachProperty, Chunkable(5000)] + public List? loads { get; set; } //loads list + + [DetachProperty, Chunkable(5000)] + public List? restraints { get; set; } //supports list + + [DetachProperty, Chunkable(5000)] + public List? properties { get; set; } //properties list + + [DetachProperty, Chunkable(5000)] + public List? materials { get; set; } //materials list + + // add "other" - ex. assemblies, grid lines, grid planes, storeys etc? alignment/paths? + + public string? layerDescription { get; set; } //design layer, analysis layer +} diff --git a/src/Objects/Structural/Analysis/ModelInfo.cs b/src/Objects/Structural/Analysis/ModelInfo.cs new file mode 100644 index 00000000..9acd4ee6 --- /dev/null +++ b/src/Objects/Structural/Analysis/ModelInfo.cs @@ -0,0 +1,52 @@ +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.Structural.Analysis; + +public class ModelInfo : Base //titles +{ + public ModelInfo() { } + + /// + /// SchemaBuilder constructor for a model specifications (containing general model and project info) object + /// + /// + /// + /// + /// + /// + /// Initials that identify the creator of the model + /// + [SchemaInfo( + "ModelInfo", + "Creates a Speckle object which describes basic model and project information for a structural model", + "Structural", + "Analysis" + )] + public ModelInfo( + string? name = null, + string? description = null, + string? projectNumber = null, + string? projectName = null, + ModelSettings? settings = null, + string? engInitials = null, + string? application = null + ) + { + this.name = name; + this.description = description; + this.projectNumber = projectNumber; + this.projectName = projectName; + this.settings = settings; + initials = engInitials; + this.application = application; + } + + public string? name { get; set; } //title + public string? description { get; set; } //subtitle + public string? projectNumber { get; set; } //could a project info object be a potential upstream change, as addition to default Speckle Kit? + public string? projectName { get; set; } + public ModelSettings? settings { get; set; } + public string? initials { get; set; } //engineer initials + public string? application { get; set; } //ex. GSA, Tekla (reference Applications class?) +} diff --git a/src/Objects/Structural/Analysis/ModelSettings.cs b/src/Objects/Structural/Analysis/ModelSettings.cs new file mode 100644 index 00000000..3ec1a2c0 --- /dev/null +++ b/src/Objects/Structural/Analysis/ModelSettings.cs @@ -0,0 +1,48 @@ +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.Structural.Analysis; + +/// +/// Codes and standards references, model units, design settings, analysis settings, precision and tolerances +/// +public class ModelSettings : Base +{ + public ModelSettings() { } + + /// + /// SchemaBuilder constructor for a model settings object + /// + /// + /// + /// + /// + [SchemaInfo( + "ModelSettings", + "Creates a Speckle object which describes design and analysis settings for the structural model", + "Structural", + "Analysis" + )] + public ModelSettings( + ModelUnits? modelUnits = null, + string? steelCode = null, + string? concreteCode = null, + double coincidenceTolerance = 10 + ) + { + this.modelUnits = modelUnits ?? new ModelUnits(UnitsType.Metric); + this.steelCode = steelCode; + this.concreteCode = concreteCode; + this.coincidenceTolerance = coincidenceTolerance; + } + + /// + /// Units object containing units information for key structural model quantities + /// + [DetachProperty] + public ModelUnits modelUnits { get; set; } + + public string? steelCode { get; set; } //could be enum + public string? concreteCode { get; set; } //could be enum + public double coincidenceTolerance { get; set; } +} diff --git a/src/Objects/Structural/Analysis/ModelUnits.cs b/src/Objects/Structural/Analysis/ModelUnits.cs new file mode 100644 index 00000000..9413fb76 --- /dev/null +++ b/src/Objects/Structural/Analysis/ModelUnits.cs @@ -0,0 +1,107 @@ +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.Structural.Analysis; + +public class ModelUnits : Base +{ + public ModelUnits() { } + + [SchemaInfo( + "ModelUnits", + "Creates a Speckle object which specifies the units associated with the model", + "Structural", + "Analysis" + )] + public ModelUnits([SchemaParamInfo("Select a set of default units based on the unit system")] UnitsType unitsType) + { + if (unitsType == UnitsType.Metric) + { + length = "m"; + sections = "m"; + displacements = "m"; + stress = "Pa"; + force = "N"; + mass = "kg"; + time = "s"; + temperature = "K"; + velocity = "m/s"; + acceleration = "m/s2"; + energy = "J"; + angle = "deg"; + } + if (unitsType == UnitsType.Imperial) + { + length = "ft"; + sections = "in"; + displacements = "in"; + stress = "kip/in2"; + force = "kip"; + mass = "lb"; + time = "s"; + temperature = "F"; + velocity = "ft/s"; + acceleration = "ft/s2"; + energy = "ft.lbf"; + angle = "deg"; + } + } + + [SchemaInfo( + "ModelUnits (custom)", + "Creates a Speckle object which specifies the units associated with the model", + "Structural", + "Analysis" + )] + public ModelUnits( + [SchemaParamInfo("Used for length and length derived units such as area")] string length = "m", + [SchemaParamInfo("Used for cross-sectional properties")] string sections = "m", + [SchemaParamInfo("Used for displacements and cross-sectional dimensions")] string displacements = "m", + [SchemaParamInfo( + "Used for stress (distinct from force and length) and stress related quantities like the elastic modulus" + )] + string stress = "Pa", + [SchemaParamInfo("Used for force and force derived units such as moment, etc., but not for stress")] + string force = "N", + [SchemaParamInfo("Used for mass and mass derived units such as inertia")] string mass = "kg", + [SchemaParamInfo("Used for time and time derived units, such as frequency")] string time = "s", + [SchemaParamInfo("Used for temperature and temperature derived units such as coefficients of expansion")] + string temperature = "K", + [SchemaParamInfo("Used for velocity and velocity derived units")] string velocity = "m/s", + [SchemaParamInfo( + "Used for acceleration and acceleration derived units (considered as distinct from length and time units)" + )] + string acceleration = "m/s2", + [SchemaParamInfo("Used for energy and energy derived units (considered as distinct from force and length units)")] + string energy = "J", + [SchemaParamInfo("To allow selection between degrees and radians for angle measures")] string angle = "deg" + ) + { + this.length = length; + this.sections = sections; + this.displacements = displacements; + this.stress = stress; + this.force = force; + this.mass = mass; + this.time = time; + this.temperature = temperature; + this.velocity = velocity; + this.acceleration = acceleration; + this.energy = energy; + } + + // use enums instead of strings + public string length { get; set; } // m, cm, mm, ft, in + public string sections { get; set; } //m, cm, mm, ft, in + public string displacements { get; set; } // m, cm, mm, ft, in + public string stress { get; set; } //Pa, kPa, MPa, GPa, N/m², N/mm², kip/in², psi, psf, ksi + public string force { get; set; } //N, kN, MN, lbf, kip, tf + public string mass { get; set; } //kg, t, kt, g, lb, Ton, slug, kip.s²/in, kip.s²/ft, lbf.s²/in, lbf.s²/ft, kip + public string time { get; set; } // s, ms, min, h, d + public string temperature { get; set; } // °C, K, °F + public string velocity { get; set; } //m/s, cm/s, mm/s, ft/s, in/s, km/h, mph + public string acceleration { get; set; } //m/s², cm/s², mm/s², ft/s², in/s², g, %g, milli-g, Gal + public string energy { get; set; } //J, KJ, MJ, GJ, kWh, in.lbf, ft.lbf, cal, Btu + public string angle { get; set; } //deg, rad + public string strain { get; set; } //ε, %ε, mε, με +} diff --git a/src/Objects/Structural/Analysis/UnitTypes.cs b/src/Objects/Structural/Analysis/UnitTypes.cs new file mode 100644 index 00000000..07bca554 --- /dev/null +++ b/src/Objects/Structural/Analysis/UnitTypes.cs @@ -0,0 +1,7 @@ +namespace Objects.Structural.Analysis; + +public enum UnitsType +{ + Metric, + Imperial +} diff --git a/src/Objects/Structural/Axis.cs b/src/Objects/Structural/Axis.cs new file mode 100644 index 00000000..5b9c47f5 --- /dev/null +++ b/src/Objects/Structural/Axis.cs @@ -0,0 +1,15 @@ +namespace Objects.Structural; + +public enum AxisType +{ + Cartesian, + Cylindrical, + Spherical +} + +public enum LoadAxisType +{ + Global, + Local, // local element axes + DeformedLocal // element local axis that is embedded in the element as it deforms +} diff --git a/src/Objects/Structural/CSI/Analysis/CSIStories.cs b/src/Objects/Structural/CSI/Analysis/CSIStories.cs new file mode 100644 index 00000000..e6fafb11 --- /dev/null +++ b/src/Objects/Structural/CSI/Analysis/CSIStories.cs @@ -0,0 +1,47 @@ +using System.Collections.Generic; +using Objects.Structural.Geometry; +using Speckle.Core.Models; + +namespace Objects.Structural.CSI.Analysis; + +public class CSIStories : Base +{ + public double BaseElevation { get; set; } + public int NumberStories { get; set; } + + [DetachProperty] + public List CSIStory { get; set; } +} + +public class CSIStorey : Storey +{ + public CSIStorey( + string name, + double elevation, + double storeyHeight, + bool isMasterStory, + string similarToStory, + bool spliceAbove, + double spliceHeight + ) + { + this.name = name; + this.elevation = elevation; + this.storeyHeight = storeyHeight; + IsMasterStory = isMasterStory; + SimilarToStory = similarToStory; + SpliceAbove = spliceAbove; + SpliceHeight = spliceHeight; + Color = 0; + } + + public CSIStorey() { } + + public double storeyHeight { get; set; } + public bool IsMasterStory { get; set; } + public string SimilarToStory { get; set; } + public bool SpliceAbove { get; set; } + + public double SpliceHeight { get; set; } + public int Color { get; set; } +} diff --git a/src/Objects/Structural/CSI/Analysis/ETABSAnalysis.cs b/src/Objects/Structural/CSI/Analysis/ETABSAnalysis.cs new file mode 100644 index 00000000..ff502125 --- /dev/null +++ b/src/Objects/Structural/CSI/Analysis/ETABSAnalysis.cs @@ -0,0 +1,93 @@ +using Speckle.Core.Models; + +namespace Objects.Structural.CSI.Analysis; + +public class CSIAnalysis : Base +{ + public ActiveDOFs activeDOFs { get; set; } + public FloorMeshSettings floorMeshSettings { get; set; } +} + +public class ActiveDOFs : Base +{ + public ActiveDOFs() { } + + public ActiveDOFs(bool UX, bool UY, bool UZ, bool RX, bool RY, bool RZ) + { + this.UX = UX; + this.UY = UY; + this.UZ = UZ; + this.RX = RX; + this.RY = RY; + this.RZ = RZ; + } + + public bool UX { get; set; } + public bool UY { get; set; } + public bool UZ { get; set; } + public bool RX { get; set; } + public bool RY { get; set; } + public bool RZ { get; set; } +} + +public class FloorMeshSettings : Base +{ + public FloorMeshSettings() { } + + public FloorMeshSettings(MeshOption meshOption, double maximumMeshSize) + { + this.meshOption = meshOption; + this.maximumMeshSize = maximumMeshSize; + } + + public MeshOption meshOption { get; set; } + public double maximumMeshSize { get; set; } +} + +public class WallMeshSettings : Base +{ + public WallMeshSettings() { } + + public WallMeshSettings(double maximumMeshSize) + { + this.maximumMeshSize = maximumMeshSize; + } + + public double maximumMeshSize { get; set; } +} + +public class CrackingAnalysisOptions : Base +{ + public CrackingAnalysisOptions() { } + + public CrackingAnalysisOptions(string reinforcementSource, double minTensionRatio, double minCompressionRatio) + { + this.reinforcementSource = reinforcementSource; + this.minTensionRatio = minTensionRatio; + this.minCompressionRatio = minCompressionRatio; + } + + public string reinforcementSource { get; set; } + public double minTensionRatio { get; set; } + public double minCompressionRatio { get; set; } +} + +public class SAPFireOptions : Base +{ + public SAPFireOptions() { } + + public SAPFireOptions(SolverOption solverOption, AnalysisProcess analysisProcess) + { + this.solverOption = solverOption; + this.analysisProcess = analysisProcess; + } + + public SolverOption solverOption { get; set; } + public AnalysisProcess analysisProcess { get; set; } +} + +public enum MeshOption { } + +public enum SolverOption { } + +public enum AnalysisProcess { } diff --git a/src/Objects/Structural/CSI/Analysis/ETABSAreaType.cs b/src/Objects/Structural/CSI/Analysis/ETABSAreaType.cs new file mode 100644 index 00000000..23fdcd1f --- /dev/null +++ b/src/Objects/Structural/CSI/Analysis/ETABSAreaType.cs @@ -0,0 +1,37 @@ +namespace Objects.Structural.CSI.Analysis; + +public enum CSIPropertyType2D +{ + Deck, + Slab, + Shell, + Wall +} + +public enum SlabType +{ + Slab, + Drop, + Ribbed, + Waffle, + Mat, + Footing, + Null +} + +public enum ShellType +{ + ShellThin, + ShellThick, + Membrane, + Layered, + Null +} + +public enum DeckType +{ + Filled, + Unfilled, + SolidSlab, + Null +} diff --git a/src/Objects/Structural/CSI/Analysis/ETABSLoadingType.cs b/src/Objects/Structural/CSI/Analysis/ETABSLoadingType.cs new file mode 100644 index 00000000..abb38360 --- /dev/null +++ b/src/Objects/Structural/CSI/Analysis/ETABSLoadingType.cs @@ -0,0 +1,7 @@ +namespace Objects.Structural.CSI.Analysis; + +public enum WindPressureType +{ + Windward, + other +} diff --git a/src/Objects/Structural/CSI/Geometry/CSIElement1D.cs b/src/Objects/Structural/CSI/Geometry/CSIElement1D.cs new file mode 100644 index 00000000..44d682b4 --- /dev/null +++ b/src/Objects/Structural/CSI/Geometry/CSIElement1D.cs @@ -0,0 +1,123 @@ +using Objects.Geometry; +using Objects.Structural.CSI.Properties; +using Objects.Structural.Geometry; +using Objects.Structural.Properties; +using Objects.Structural.Results; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.Structural.CSI.Geometry; + +public class CSIElement1D : Element1D +{ + /// + /// SchemaBuilder constructor for structural 1D element (based on local axis) + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + [SchemaInfo("Element1D (from local axis)", "Creates a Speckle CSI 1D element (from local axis)", "CSI", "Geometry")] + public CSIElement1D( + Line baseLine, + Property1D property, + ElementType1D type, + string? name = null, + [SchemaParamInfo("If null, restraint condition defaults to unreleased (fully fixed translations and rotations)")] + Restraint? end1Releases = null, + [SchemaParamInfo("If null, restraint condition defaults to unreleased (fully fixed translations and rotations)")] + Restraint? end2Releases = null, + [SchemaParamInfo("If null, defaults to no offsets")] Vector? end1Offset = null, + [SchemaParamInfo("If null, defaults to no offsets")] Vector? end2Offset = null, + Plane? localAxis = null, + CSILinearSpring? CSILinearSpring = null, + [SchemaParamInfo("an Array of 8 values referring to the modifiers as seen in CSI in order")] + double[]? Modifier = null, + DesignProcedure DesignProcedure = DesignProcedure.NoDesign + ) + { + this.baseLine = baseLine; + this.property = property; + this.type = type; + this.name = name; + this.end1Releases = end1Releases ?? new Restraint("FFFFFF"); + this.end2Releases = end2Releases ?? new Restraint("FFFFFF"); + this.end1Offset = end1Offset ?? new Vector(0, 0, 0); + this.end2Offset = end2Offset ?? new Vector(0, 0, 0); + this.localAxis = localAxis; + this.CSILinearSpring = CSILinearSpring; + this.DesignProcedure = DesignProcedure; + Modifiers = Modifier; + } + + /// + /// SchemaBuilder constructor for structural 1D element (based on orientation node and angle) + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + [SchemaInfo( + "Element1D (from orientation node and angle)", + "Creates a Speckle CSI 1D element (from orientation node and angle)", + "CSI", + "Geometry" + )] + public CSIElement1D( + Line baseLine, + Property1D property, + ElementType1D type, + string? name = null, + [SchemaParamInfo("If null, restraint condition defaults to unreleased (fully fixed translations and rotations)")] + Restraint? end1Releases = null, + [SchemaParamInfo("If null, restraint condition defaults to unreleased (fully fixed translations and rotations)")] + Restraint? end2Releases = null, + [SchemaParamInfo("If null, defaults to no offsets")] Vector? end1Offset = null, + [SchemaParamInfo("If null, defaults to no offsets")] Vector? end2Offset = null, + Node? orientationNode = null, + double orientationAngle = 0, + CSILinearSpring? CSILinearSpring = null, + [SchemaParamInfo("an Array of 8 values referring to the modifiers as seen in CSI in order")] + double[]? Modifier = null, + DesignProcedure DesignProcedure = DesignProcedure.NoDesign + ) + { + this.baseLine = baseLine; + this.property = property; + this.type = type; + this.name = name; + this.end1Releases = end1Releases ?? new Restraint("FFFFFF"); + this.end2Releases = end2Releases ?? new Restraint("FFFFFF"); + this.end1Offset = end1Offset ?? new Vector(0, 0, 0); + this.end2Offset = end2Offset ?? new Vector(0, 0, 0); + this.orientationNode = orientationNode; + this.orientationAngle = orientationAngle; + this.CSILinearSpring = CSILinearSpring; + this.DesignProcedure = DesignProcedure; + Modifiers = Modifier; + } + + public CSIElement1D() { } + + [DetachProperty] + public CSILinearSpring? CSILinearSpring { get; set; } + + public string PierAssignment { get; set; } + public string SpandrelAssignment { get; set; } + public double[]? Modifiers { get; set; } + public DesignProcedure DesignProcedure { get; set; } + + [DetachProperty] + public AnalyticalResults? AnalysisResults { get; set; } +} diff --git a/src/Objects/Structural/CSI/Geometry/CSIElement2D.cs b/src/Objects/Structural/CSI/Geometry/CSIElement2D.cs new file mode 100644 index 00000000..cdf6a586 --- /dev/null +++ b/src/Objects/Structural/CSI/Geometry/CSIElement2D.cs @@ -0,0 +1,51 @@ +using System.Collections.Generic; +using Objects.Structural.CSI.Properties; +using Objects.Structural.Geometry; +using Objects.Structural.Properties; +using Objects.Structural.Results; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.Structural.CSI.Geometry; + +public class CSIElement2D : Element2D +{ + [SchemaInfo( + "Element2D", + "Creates a Speckle CSI 2D element (based on a list of edge ie. external, geometry defining nodes)", + "CSI", + "Geometry" + )] + public CSIElement2D( + List nodes, + Property2D property, + double offset = 0, + double orientationAngle = 0, + double[]? modifiers = null, + CSIAreaSpring? CSIAreaSpring = null, + CSIDiaphragm? CSIDiaphragm = null + ) + { + topology = nodes; + this.property = property; + this.offset = offset; + this.orientationAngle = orientationAngle; + DiaphragmAssignment = CSIDiaphragm?.name; + this.CSIAreaSpring = CSIAreaSpring; + this.modifiers = modifiers; + } + + public CSIElement2D() { } + + [DetachProperty] + public CSIAreaSpring? CSIAreaSpring { get; set; } + + public string? DiaphragmAssignment { get; set; } + public string? PierAssignment { get; set; } + public string? SpandrelAssignment { get; set; } + public double[]? modifiers { get; set; } + public bool Opening { get; set; } + + [DetachProperty] + public AnalyticalResults? AnalysisResults { get; set; } +} diff --git a/src/Objects/Structural/CSI/Geometry/CSIGridLines.cs b/src/Objects/Structural/CSI/Geometry/CSIGridLines.cs new file mode 100644 index 00000000..77b4b153 --- /dev/null +++ b/src/Objects/Structural/CSI/Geometry/CSIGridLines.cs @@ -0,0 +1,16 @@ +using System.Collections.Generic; +using Objects.BuiltElements; +using Speckle.Core.Models; + +namespace Objects.Structural.CSI.Geometry; + +public class CSIGridLines : Base +{ + public double Xo { get; set; } + public double Yo { get; set; } + public double Rz { get; set; } + public string GridSystemType { get; set; } + + [DetachProperty] + public List gridLines { get; set; } +} diff --git a/src/Objects/Structural/CSI/Geometry/CSINode.cs b/src/Objects/Structural/CSI/Geometry/CSINode.cs new file mode 100644 index 00000000..95bff5b5 --- /dev/null +++ b/src/Objects/Structural/CSI/Geometry/CSINode.cs @@ -0,0 +1,62 @@ +using Objects.Geometry; +using Objects.Structural.CSI.Properties; +using Objects.Structural.Geometry; +using Objects.Structural.Properties; +using Objects.Structural.Results; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.Structural.CSI.Geometry; + +public class CSINode : Node +{ + [SchemaInfo( + "Node with properties", + "Creates a Speckle CSI node with spring, mass and/or damper properties", + "CSI", + "Geometry" + )] + public CSINode( + Point basePoint, + string? name = null, + [SchemaParamInfo("If null, restraint condition defaults to free/fully released")] Restraint? restraint = null, + [SchemaParamInfo( + "If null, axis defaults to world xy (z axis defines the vertical direction, positive direction is up)" + )] + Axis? constraintAxis = null, + CSISpringProperty? springProperty = null, + PropertyMass? massProperty = null, + PropertyDamper? damperProperty = null, + CSIDiaphragm? CSIDiaphragm = null, + DiaphragmOption DiaphragmOption = DiaphragmOption.FromShellObject + ) + { + this.basePoint = basePoint; + this.name = name; + this.restraint = restraint ?? new Restraint("RRRRRR"); + this.constraintAxis = + constraintAxis + ?? new Axis( + "Global", + AxisType.Cartesian, + new Plane(new Point(0, 0), new Vector(0, 0, 1), new Vector(1, 0, 0), new Vector(0, 1, 0)) + ); + CSISpringProperty = springProperty; + this.massProperty = massProperty; + this.damperProperty = damperProperty; + DiaphragmAssignment = CSIDiaphragm?.name; + this.DiaphragmOption = DiaphragmOption; + } + + public CSINode() { } + + [DetachProperty] + public CSISpringProperty? CSISpringProperty { get; set; } + + public string? DiaphragmAssignment { get; set; } + + public DiaphragmOption DiaphragmOption { get; set; } + + [DetachProperty] + public AnalyticalResults? AnalysisResults { get; set; } +} diff --git a/src/Objects/Structural/CSI/Geometry/CSIPier.cs b/src/Objects/Structural/CSI/Geometry/CSIPier.cs new file mode 100644 index 00000000..6e727ada --- /dev/null +++ b/src/Objects/Structural/CSI/Geometry/CSIPier.cs @@ -0,0 +1,65 @@ +using Speckle.Core.Models; + +namespace Objects.Structural.CSI.Geometry; + +public class CSIPier : Base +{ + public CSIPier( + string name, + int numberStories, + string[] storyName, + double[] axisAngle, + int[] numAreaObjs, + int[] numLineObjs, + double[] widthBot, + double[] thicknessBot, + double[] widthTop, + double[] thicknessTop, + string[] matProp, + double[] centerofGravityBotX, + double[] centerofGravityBotY, + double[] centerofGravityBotZ, + double[] centerofGravityTopX, + double[] centerofGravityTopY, + double[] centerofGravityTopZ + ) + { + this.name = name; + this.numberStories = numberStories; + this.storyName = storyName; + this.axisAngle = axisAngle; + this.numAreaObjs = numAreaObjs; + this.numLineObjs = numLineObjs; + this.widthBot = widthBot; + this.thicknessBot = thicknessBot; + this.widthTop = widthTop; + this.thicknessTop = thicknessTop; + this.matProp = matProp; + this.centerofGravityBotX = centerofGravityBotX; + this.centerofGravityBotY = centerofGravityBotY; + this.centerofGravityBotZ = centerofGravityBotZ; + this.centerofGravityTopX = centerofGravityTopX; + this.centerofGravityTopY = centerofGravityTopY; + this.centerofGravityTopZ = centerofGravityTopZ; + } + + public CSIPier() { } + + public string name { get; set; } + public int numberStories { get; set; } + public string[] storyName { get; set; } + public double[] axisAngle { get; set; } + public int[] numAreaObjs { get; set; } + public int[] numLineObjs { get; set; } + public double[] widthBot { get; set; } + public double[] thicknessBot { get; set; } + public double[] widthTop { get; set; } + public double[] thicknessTop { get; set; } + public string[] matProp { get; set; } + public double[] centerofGravityBotX { get; set; } + public double[] centerofGravityBotY { get; set; } + public double[] centerofGravityBotZ { get; set; } + public double[] centerofGravityTopX { get; set; } + public double[] centerofGravityTopY { get; set; } + public double[] centerofGravityTopZ { get; set; } +} diff --git a/src/Objects/Structural/CSI/Geometry/CSISpandrel.cs b/src/Objects/Structural/CSI/Geometry/CSISpandrel.cs new file mode 100644 index 00000000..bc7b3819 --- /dev/null +++ b/src/Objects/Structural/CSI/Geometry/CSISpandrel.cs @@ -0,0 +1,68 @@ +using Speckle.Core.Models; + +namespace Objects.Structural.CSI.Geometry; + +public class CSISpandrel : Base +{ + public CSISpandrel( + string name, + bool multistory, + int numberStories, + string[] storyName, + int[] numAreaObjs, + int[] numLineObjs, + double[] length, + double[] depthLeft, + double[] thickLeft, + double[] depthRight, + double[] thickRight, + string[] matProp, + double[] centerofGravityLeftX, + double[] centerofGravityLeftY, + double[] centerofGravityLeftZ, + double[] centerofGravityRightX, + double[] centerofGravityRightY, + double[] centerofGravityRightZ + ) + { + this.name = name; + this.multistory = multistory; + this.numberStories = numberStories; + this.storyName = storyName; + this.numAreaObjs = numAreaObjs; + this.numLineObjs = numLineObjs; + this.length = length; + this.depthLeft = depthLeft; + this.thickLeft = thickLeft; + this.depthRight = depthRight; + this.thickRight = thickRight; + this.matProp = matProp; + this.centerofGravityLeftX = centerofGravityLeftX; + this.centerofGravityLeftY = centerofGravityLeftY; + this.centerofGravityLeftZ = centerofGravityLeftZ; + this.centerofGravityRightX = centerofGravityRightX; + this.centerofGravityRightY = centerofGravityRightY; + this.centerofGravityRightZ = centerofGravityRightZ; + } + + public CSISpandrel() { } + + public string name { get; set; } + public bool multistory { get; set; } + public int numberStories { get; set; } + public string[] storyName { get; set; } + public int[] numAreaObjs { get; set; } + public int[] numLineObjs { get; set; } + public double[] length { get; set; } + public double[] depthLeft { get; set; } + public double[] thickLeft { get; set; } + public double[] depthRight { get; set; } + public double[] thickRight { get; set; } + public string[] matProp { get; set; } + public double[] centerofGravityLeftX { get; set; } + public double[] centerofGravityLeftY { get; set; } + public double[] centerofGravityLeftZ { get; set; } + public double[] centerofGravityRightX { get; set; } + public double[] centerofGravityRightY { get; set; } + public double[] centerofGravityRightZ { get; set; } +} diff --git a/src/Objects/Structural/CSI/Geometry/CSITendon.cs b/src/Objects/Structural/CSI/Geometry/CSITendon.cs new file mode 100644 index 00000000..ad7c1a08 --- /dev/null +++ b/src/Objects/Structural/CSI/Geometry/CSITendon.cs @@ -0,0 +1,22 @@ +using Objects.Geometry; +using Objects.Structural.CSI.Properties; +using Speckle.Core.Models; + +namespace Objects.Structural.CSI.Geometry; + +public class CSITendon : CSIElement1D +{ + public CSITendon(string name, Polycurve polycurve, CSITendonProperty CSITendonProperty) + { + this.name = name; + this.polycurve = polycurve; + this.CSITendonProperty = CSITendonProperty; + } + + public CSITendon() { } + + public Polycurve polycurve { get; set; } + + [DetachProperty] + public CSITendonProperty CSITendonProperty { get; set; } +} diff --git a/src/Objects/Structural/CSI/Loading/CSIWindLoading.cs b/src/Objects/Structural/CSI/Loading/CSIWindLoading.cs new file mode 100644 index 00000000..c1143183 --- /dev/null +++ b/src/Objects/Structural/CSI/Loading/CSIWindLoading.cs @@ -0,0 +1,11 @@ +using Objects.Structural.CSI.Analysis; +using Objects.Structural.Loading; + +namespace Objects.Structural.CSI.Loading; + +public class CSIWindLoadingFace : LoadFace +{ + public double Cp { get; set; } + + public WindPressureType WindPressureType { get; set; } +} diff --git a/src/Objects/Structural/CSI/Materials/CSIConcrete.cs b/src/Objects/Structural/CSI/Materials/CSIConcrete.cs new file mode 100644 index 00000000..9c7c2073 --- /dev/null +++ b/src/Objects/Structural/CSI/Materials/CSIConcrete.cs @@ -0,0 +1,14 @@ +using Objects.Structural.Materials; + +namespace Objects.Structural.CSI.Materials; + +public class CSIConcrete : Concrete +{ + public int SSHysType { get; set; } + public int SSType { get; set; } + + public double finalSlope { get; set; } + + public double frictionAngle { get; set; } + public double dialationalAngle { get; set; } +} diff --git a/src/Objects/Structural/CSI/Materials/CSIRebar.cs b/src/Objects/Structural/CSI/Materials/CSIRebar.cs new file mode 100644 index 00000000..960494ea --- /dev/null +++ b/src/Objects/Structural/CSI/Materials/CSIRebar.cs @@ -0,0 +1,5 @@ +using Objects.Structural.Materials; + +namespace Objects.Structural.CSI.Materials; + +public class CSIRebar : StructuralMaterial { } diff --git a/src/Objects/Structural/CSI/Materials/CSISteel.cs b/src/Objects/Structural/CSI/Materials/CSISteel.cs new file mode 100644 index 00000000..da707278 --- /dev/null +++ b/src/Objects/Structural/CSI/Materials/CSISteel.cs @@ -0,0 +1,13 @@ +using Objects.Structural.Materials; + +namespace Objects.Structural.CSI.Materials; + +public class CSISteel : Steel +{ + public int SSHysType { get; set; } + public int SSType { get; set; } + public double EFy { get; set; } + public double EFu { get; set; } + public double strainAtMaxStress { get; set; } + public double strainAtHardening { get; set; } +} diff --git a/src/Objects/Structural/CSI/Properties/CSIDiaphragm.cs b/src/Objects/Structural/CSI/Properties/CSIDiaphragm.cs new file mode 100644 index 00000000..8326fcba --- /dev/null +++ b/src/Objects/Structural/CSI/Properties/CSIDiaphragm.cs @@ -0,0 +1,19 @@ +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.Structural.CSI.Properties; + +public class CSIDiaphragm : Base +{ + [SchemaInfo("CSI Diaphragm", "Create an CSI Diaphragm", "CSI", "Properties")] + public CSIDiaphragm(string name, bool semiRigid) + { + this.name = name; + SemiRigid = semiRigid; + } + + public CSIDiaphragm() { } + + public string name { get; set; } + public bool SemiRigid { get; set; } +} diff --git a/src/Objects/Structural/CSI/Properties/CSILinkProperty.cs b/src/Objects/Structural/CSI/Properties/CSILinkProperty.cs new file mode 100644 index 00000000..25f03032 --- /dev/null +++ b/src/Objects/Structural/CSI/Properties/CSILinkProperty.cs @@ -0,0 +1,45 @@ +using Objects.Structural.Properties; +using Speckle.Core.Kits; + +namespace Objects.Structural.CSI.Properties; + +public class CSILinkProperty : Property1D +{ + [SchemaInfo("CSILink", "Create an CSI Link Property", "CSI", "Properties")] + public CSILinkProperty( + string name, + double mass, + double weight, + double rotationalInertia1, + double rotationalInertia2, + double rotationalInertia3, + double m2PdeltaEnd1, + double mP2deltaEnd2, + double mP3deltaEnd1, + double mP3deltaEnd2 + ) + { + this.name = name; + this.mass = mass; + this.weight = weight; + this.rotationalInertia1 = rotationalInertia1; + this.rotationalInertia2 = rotationalInertia2; + this.rotationalInertia3 = rotationalInertia3; + M2PdeltaEnd1 = m2PdeltaEnd1; + MP2deltaEnd2 = mP2deltaEnd2; + MP3deltaEnd1 = mP3deltaEnd1; + MP3deltaEnd2 = mP3deltaEnd2; + } + + public CSILinkProperty() { } + + public double mass { get; set; } + public double weight { get; set; } + public double rotationalInertia1 { get; set; } + public double rotationalInertia2 { get; set; } + public double rotationalInertia3 { get; set; } + public double M2PdeltaEnd1 { get; set; } + public double MP2deltaEnd2 { get; set; } + public double MP3deltaEnd1 { get; set; } + public double MP3deltaEnd2 { get; set; } +} diff --git a/src/Objects/Structural/CSI/Properties/CSIProperty2D.cs b/src/Objects/Structural/CSI/Properties/CSIProperty2D.cs new file mode 100644 index 00000000..92eac668 --- /dev/null +++ b/src/Objects/Structural/CSI/Properties/CSIProperty2D.cs @@ -0,0 +1,265 @@ +using Objects.Structural.CSI.Analysis; +using Objects.Structural.Materials; +using Objects.Structural.Properties; +using Speckle.Core.Kits; + +namespace Objects.Structural.CSI.Properties; + +public class CSIOpening : Property2D +{ + [SchemaInfo("Opening", "Create an CSI Opening", "CSI", "Properties")] + public CSIOpening(bool isOpening) + { + this.isOpening = isOpening; + } + + public CSIOpening() { } + + public bool isOpening { get; set; } +} + +public class CSIProperty2D : Property2D +{ + public CSIPropertyType2D type2D { get; set; } + public SlabType slabType { get; set; } + public DeckType deckType { get; set; } + public ShellType shellType { get; set; } + + public class WaffleSlab : CSIProperty2D + { + public WaffleSlab() { } + + [SchemaInfo("WaffleSlab", "Create an CSI Waffle Slab", "CSI", "Properties")] + public WaffleSlab( + string PropertyName, + ShellType shell, + StructuralMaterial ConcreteMaterial, + double Thickness, + double overAllDepth, + double stemWidthBot, + double stemWidthTop, + double ribSpacingDir1, + double ribSpacingDir2 + ) + { + type2D = CSIPropertyType2D.Slab; + slabType = SlabType.Waffle; + deckType = DeckType.Null; + + name = PropertyName; + shellType = shell; + material = ConcreteMaterial; + thickness = Thickness; + + OverAllDepth = overAllDepth; + StemWidthBot = stemWidthBot; + StemWidthTop = stemWidthTop; + RibSpacingDir1 = ribSpacingDir1; + RibSpacingDir2 = ribSpacingDir2; + } + + public double OverAllDepth { get; set; } + public double StemWidthBot { get; set; } + public double StemWidthTop { get; set; } + public double RibSpacingDir1 { get; set; } + public double RibSpacingDir2 { get; set; } + + //[SchemaInfo("WaffleSlab","Define a WaffleSlab Area Property")] + } + + public class RibbedSlab : CSIProperty2D + { + public RibbedSlab() { } + + [SchemaInfo("RibbedSlab", "Create an CSI Ribbed Slab", "CSI", "Properties")] + public RibbedSlab( + string PropertyName, + ShellType shell, + StructuralMaterial ConcreteMaterial, + double Thickness, + double overAllDepth, + double stemWidthBot, + double stemWidthTop, + double ribSpacing, + int ribsParallelTo + ) + { + type2D = CSIPropertyType2D.Slab; + slabType = SlabType.Ribbed; + deckType = DeckType.Null; + + name = PropertyName; + shellType = shell; + material = ConcreteMaterial; + thickness = Thickness; + + OverAllDepth = overAllDepth; + StemWidthBot = stemWidthBot; + StemWidthTop = stemWidthTop; + RibSpacing = ribSpacing; + RibsParallelTo = ribsParallelTo; + } + + public double OverAllDepth { get; set; } + public double StemWidthBot { get; set; } + public double StemWidthTop { get; set; } + public double RibSpacing { get; set; } + public int RibsParallelTo { get; set; } + } + + public class Slab : CSIProperty2D + { + public Slab() { } + + [SchemaInfo("Slab", "Create an CSI Slab", "CSI", "Properties")] + public Slab(string PropertyName, ShellType shell, StructuralMaterial ConcreteMaterial, double Thickness) + { + type2D = CSIPropertyType2D.Slab; + slabType = SlabType.Slab; + deckType = DeckType.Null; + + name = PropertyName; + shellType = shell; + material = ConcreteMaterial; + thickness = Thickness; + } + } + + public class DeckFilled : CSIProperty2D + { + public DeckFilled() { } + + [SchemaInfo("DeckFilled", "Create an CSI Filled Deck", "CSI", "Properties")] + public DeckFilled( + string PropertyName, + ShellType shell, + StructuralMaterial ConcreteMaterial, + double DeckThickness, + double slabDepth, + double shearStudDia, + double shearStudFu, + double shearStudHt, + double ribDepth, + double ribWidthTop, + double ribWidthBot, + double ribSpacing, + double shearThickness, + double unitWeight + ) + { + type2D = CSIPropertyType2D.Deck; + slabType = SlabType.Null; + deckType = DeckType.Filled; + + name = PropertyName; + shellType = shell; + material = ConcreteMaterial; + thickness = DeckThickness; + + SlabDepth = slabDepth; + ShearStudDia = shearStudDia; + ShearStudFu = shearStudFu; + ShearStudHt = shearStudHt; + RibDepth = ribDepth; + RibWidthTop = ribWidthTop; + RibWidthBot = ribWidthBot; + RibSpacing = ribSpacing; + ShearThickness = shearThickness; + UnitWeight = unitWeight; + } + + public double SlabDepth { get; set; } + public double ShearStudDia { get; set; } + public double ShearStudFu { get; set; } + public double ShearStudHt { get; set; } + public double RibDepth { get; set; } + public double RibWidthTop { get; set; } + public double RibWidthBot { get; set; } + public double RibSpacing { get; set; } + public double ShearThickness { get; set; } + public double UnitWeight { get; set; } + } + + public class DeckUnFilled : CSIProperty2D + { + [SchemaInfo("DeckUnFilled", "Create an CSI UnFilled Deck", "CSI", "Properties")] + public DeckUnFilled( + string PropertyName, + ShellType shell, + StructuralMaterial Material, + double DeckThickness, + double slabDepth, + double ribDepth, + double ribWidthTop, + double ribWidthBot, + double ribSpacing, + double shearThickness, + double unitWeight + ) + { + type2D = CSIPropertyType2D.Deck; + slabType = SlabType.Null; + deckType = DeckType.Unfilled; + + name = PropertyName; + shellType = shell; + material = Material; + thickness = DeckThickness; + + SlabDepth = slabDepth; + RibDepth = ribDepth; + RibWidthTop = ribWidthTop; + RibWidthBot = ribWidthBot; + RibSpacing = ribSpacing; + ShearThickness = shearThickness; + UnitWeight = unitWeight; + } + + public DeckUnFilled() { } + + public double SlabDepth { get; set; } + public double RibDepth { get; set; } + public double RibWidthTop { get; set; } + public double RibWidthBot { get; set; } + public double RibSpacing { get; set; } + public double ShearThickness { get; set; } + public double UnitWeight { get; set; } + } + + public class DeckSlab : CSIProperty2D + { + [SchemaInfo("DeckSlab", "Create an CSI Slab Deck", "CSI", "Properties")] + public DeckSlab( + string PropertyName, + ShellType shell, + StructuralMaterial ConcreteMaterial, + double DeckThickness, + double slabDepth, + double shearStudDia, + double shearStudFu, + double shearStudHt + ) + { + type2D = CSIPropertyType2D.Deck; + slabType = SlabType.Null; + deckType = DeckType.SolidSlab; + + name = PropertyName; + shellType = shell; + material = ConcreteMaterial; + thickness = DeckThickness; + + SlabDepth = slabDepth; + ShearStudDia = shearStudDia; + ShearStudFu = shearStudFu; + ShearStudHt = shearStudHt; + } + + public DeckSlab() { } + + public double SlabDepth { get; set; } + public double ShearStudDia { get; set; } + public double ShearStudFu { get; set; } + public double ShearStudHt { get; set; } + } +} diff --git a/src/Objects/Structural/CSI/Properties/CSISpringProperty.cs b/src/Objects/Structural/CSI/Properties/CSISpringProperty.cs new file mode 100644 index 00000000..eb2bd17c --- /dev/null +++ b/src/Objects/Structural/CSI/Properties/CSISpringProperty.cs @@ -0,0 +1,103 @@ +using Objects.Structural.Properties; +using Speckle.Core.Kits; + +namespace Objects.Structural.CSI.Properties; + +public class CSISpringProperty : PropertySpring +{ + public CSISpringProperty() { } + + [SchemaInfo("PointSpring from Link", "Create an CSI PointSpring from Link", "CSI", "Properties")] + public CSISpringProperty( + string name, + string cYs, + double StiffnessX, + double StiffnessY, + double StiffnessZ, + double StiffnessXX, + double StiffnezzYY, + double StiffnessZZ + ) + { + this.name = name; + springOption = SpringOption.Link; + stiffnessX = StiffnessX; + stiffnessY = StiffnessY; + stiffnessZ = StiffnessZ; + stiffnessXX = StiffnessXX; + stiffnessYY = StiffnezzYY; + stiffnessZZ = StiffnessZZ; + CYs = cYs; + } + + [SchemaInfo("PointSpring from Soil Profile", "Create an CSI PointSpring from Soil Profile", "CSI", "Properties")] + public CSISpringProperty(string name, string soilProfile, string footing, double period) + { + this.name = name; + springOption = SpringOption.SoilProfileFooting; + SoilProfile = soilProfile; + this.footing = footing; + this.period = period; + } + + public SpringOption springOption { get; set; } + public string CYs { get; set; } + public string SoilProfile { get; set; } + public string footing { get; set; } + public double period { get; set; } +} + +public class CSILinearSpring : PropertySpring +{ + public CSILinearSpring() { } + + [SchemaInfo("LinearSpring", "Create an CSI LinearSpring", "CSI", "Properties")] + public CSILinearSpring( + string name, + double StiffnessX, + double StiffnessY, + double StiffnessZ, + double StiffnessXX, + NonLinearOptions linearOption1, + NonLinearOptions linearOption2, + string? applicationID = null + ) + { + this.name = name; + stiffnessX = StiffnessX; + stiffnessY = StiffnessY; + stiffnessZ = StiffnessZ; + stiffnessXX = StiffnessXX; + LinearOption1 = linearOption1; + LinearOption2 = linearOption2; + applicationId = applicationID; + } + + public NonLinearOptions LinearOption1 { get; set; } + public NonLinearOptions LinearOption2 { get; set; } +} + +public class CSIAreaSpring : PropertySpring +{ + public CSIAreaSpring() { } + + [SchemaInfo("LinearSpring", "Create an CSI AreaSpring", "CSI", "Properties")] + public CSIAreaSpring( + string name, + double StiffnessX, + double StiffnessY, + double StiffnessZ, + NonLinearOptions linearOption3, + string? applicationID = null + ) + { + this.name = name; + stiffnessX = StiffnessX; + stiffnessY = StiffnessY; + stiffnessZ = StiffnessZ; + LinearOption3 = linearOption3; + applicationId = applicationID; + } + + public NonLinearOptions LinearOption3 { get; set; } +} diff --git a/src/Objects/Structural/CSI/Properties/CSITendonProperty.cs b/src/Objects/Structural/CSI/Properties/CSITendonProperty.cs new file mode 100644 index 00000000..7ecccfd4 --- /dev/null +++ b/src/Objects/Structural/CSI/Properties/CSITendonProperty.cs @@ -0,0 +1,9 @@ +using Objects.Structural.Properties; + +namespace Objects.Structural.CSI.Properties; + +public class CSITendonProperty : Property1D +{ + public ModelingOption modelingOption { get; set; } + public double Area { get; set; } +} diff --git a/src/Objects/Structural/CSI/Properties/ETABSProperty.cs b/src/Objects/Structural/CSI/Properties/ETABSProperty.cs new file mode 100644 index 00000000..a5f2cb39 --- /dev/null +++ b/src/Objects/Structural/CSI/Properties/ETABSProperty.cs @@ -0,0 +1,38 @@ +namespace Objects.Structural.CSI.Properties; + +public enum DiaphragmOption +{ + Disconnect, + FromShellObject, + DefinedDiaphragm +} + +public enum NonLinearOptions +{ + Linear, + CompressionOnly, + TensionOnly +} + +public enum SpringOption +{ + Link, + SoilProfileFooting +} + +public enum ModelingOption +{ + Loads, + Elements +} + +public enum DesignProcedure +{ + ProgramDetermined, + SteelFrameDesign, + ConcreteFrameDesign, + CompositeBeamDesign, + SteelJoistDesign, + NoDesign, + CompositeColumnDesign +} diff --git a/src/Objects/Structural/GSA/Analysis/GSAAnalysisCase.cs b/src/Objects/Structural/GSA/Analysis/GSAAnalysisCase.cs new file mode 100644 index 00000000..09390a02 --- /dev/null +++ b/src/Objects/Structural/GSA/Analysis/GSAAnalysisCase.cs @@ -0,0 +1,44 @@ +using System; +using System.Collections.Generic; +using Objects.Structural.Loading; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.Structural.GSA.Analysis; + +public class GSAAnalysisCase : Base +{ + public GSAAnalysisCase() { } + + [SchemaInfo("GSAAnalysisCase", "Creates a Speckle structural analysis case for GSA", "GSA", "Analysis")] + public GSAAnalysisCase( + int nativeId, + string name, + GSATask task, + [SchemaParamInfo("A list of load cases")] List loadCases, + [SchemaParamInfo("A list of load factors (to be mapped to provided load cases)")] List loadFactors + ) + { + if (loadCases.Count != loadFactors.Count) + { + throw new ArgumentException("Number of load cases provided does not match number of load factors provided"); + } + + this.nativeId = nativeId; + this.name = name; + this.task = task; + this.loadCases = loadCases; + this.loadFactors = loadFactors; + } + + public int nativeId { get; set; } + public string name { get; set; } + + [DetachProperty] + public GSATask task { get; set; } //task reference + + [DetachProperty] + public List loadCases { get; set; } + + public List loadFactors { get; set; } +} diff --git a/src/Objects/Structural/GSA/Analysis/GSAStage.cs b/src/Objects/Structural/GSA/Analysis/GSAStage.cs new file mode 100644 index 00000000..d158d459 --- /dev/null +++ b/src/Objects/Structural/GSA/Analysis/GSAStage.cs @@ -0,0 +1,43 @@ +using System.Collections.Generic; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.Structural.GSA.Analysis; + +public class GSAStage : Base +{ + public GSAStage() { } + + [SchemaInfo("GSAStage", "Creates a Speckle structural analysis stage for GSA", "GSA", "Analysis")] + public GSAStage( + int nativeId, + string name, + string colour, + List elements, + double creepFactor, + int stageTime, + List lockedElements + ) + { + this.nativeId = nativeId; + this.name = name; + this.colour = colour; + this.elements = elements; + this.creepFactor = creepFactor; + this.stageTime = stageTime; + this.lockedElements = lockedElements; + } + + public int nativeId { get; set; } + public string name { get; set; } + public string colour { get; set; } + + [DetachProperty, Chunkable(5000)] + public List elements { get; set; } + + public double creepFactor { get; set; } //Phi + public int stageTime { get; set; } //number of days + + [DetachProperty, Chunkable(5000)] + public List lockedElements { get; set; } //elements not part of the current analysis stage +} diff --git a/src/Objects/Structural/GSA/Analysis/GSATask.cs b/src/Objects/Structural/GSA/Analysis/GSATask.cs new file mode 100644 index 00000000..8aaf3def --- /dev/null +++ b/src/Objects/Structural/GSA/Analysis/GSATask.cs @@ -0,0 +1,53 @@ +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.Structural.GSA.Analysis; + +public class GSATask : Base +{ + public GSATask() { } + + [SchemaInfo("GSAAnalysisTask", "Creates a Speckle structural analysis task for GSA", "GSA", "Analysis")] + public GSATask(int nativeId, string name) + { + this.nativeId = nativeId; + this.name = name; + } + + public int nativeId { get; set; } //equiv to num + public string name { get; set; } + public string stage { get; set; } + public string solver { get; set; } + public SolutionType solutionType { get; set; } + public int modeParameter1 { get; set; } //start mode + public int modeParameter2 { get; set; } //number of modes + public int numIterations { get; set; } + public string PDeltaOption { get; set; } + public string PDeltaCase { get; set; } + public string PrestressCase { get; set; } + public string resultSyntax { get; set; } + public PruningOption prune { get; set; } +} + +public enum SolutionType +{ + Undefined, //no solution specified + Static, + Modal, + Ritz, + Buckling, + StaticPDelta, + ModalPDelta, + RitzPDelta, + Mass, + Stability, + StabilityPDelta, + BucklingNonLinear, + Influence +} + +public enum PruningOption +{ + None, + Influence +} diff --git a/src/Objects/Structural/GSA/Bridge/GSAAlignment.cs b/src/Objects/Structural/GSA/Bridge/GSAAlignment.cs new file mode 100644 index 00000000..75301910 --- /dev/null +++ b/src/Objects/Structural/GSA/Bridge/GSAAlignment.cs @@ -0,0 +1,46 @@ +using System.Collections.Generic; +using Objects.Structural.GSA.Geometry; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.Structural.GSA.Bridge; + +public class GSAAlignment : Base +{ + public GSAAlignment() { } + + [SchemaInfo( + "GSAAlignment", + "Creates a Speckle structural alignment for GSA (as a setting out feature for bridge models)", + "GSA", + "Bridge" + )] + public GSAAlignment( + int nativeId, + string name, + GSAGridSurface gridSurface, + List chainage, + List curvature + ) + { + this.nativeId = nativeId; + this.name = name; + this.gridSurface = gridSurface; + this.chainage = chainage; + this.curvature = curvature; + } + + public int nativeId { get; set; } + public string name { get; set; } + + [DetachProperty] + public GSAGridSurface gridSurface { get; set; } + + public List chainage { get; set; } + public List curvature { get; set; } + + public int GetNumAlignmentPoints() + { + return chainage.Count + curvature.Count; + } +} diff --git a/src/Objects/Structural/GSA/Bridge/GSAInfluence.cs b/src/Objects/Structural/GSA/Bridge/GSAInfluence.cs new file mode 100644 index 00000000..6440d9ad --- /dev/null +++ b/src/Objects/Structural/GSA/Bridge/GSAInfluence.cs @@ -0,0 +1,31 @@ +using Objects.Structural.Loading; +using Speckle.Core.Models; + +namespace Objects.Structural.GSA.Bridge; + +public class GSAInfluence : Base +{ + public GSAInfluence() { } + + public GSAInfluence(int nativeId, string name, double factor, InfluenceType type, LoadDirection direction) + { + this.nativeId = nativeId; + this.name = name; + this.factor = factor; + this.type = type; + this.direction = direction; + } + + public int nativeId { get; set; } + public string name { get; set; } + public double factor { get; set; } + public InfluenceType type { get; set; } + public LoadDirection direction { get; set; } +} + +public enum InfluenceType +{ + NotSet = 0, + FORCE, + DISPLACEMENT +} diff --git a/src/Objects/Structural/GSA/Bridge/GSAInfluenceBeam.cs b/src/Objects/Structural/GSA/Bridge/GSAInfluenceBeam.cs new file mode 100644 index 00000000..3457977b --- /dev/null +++ b/src/Objects/Structural/GSA/Bridge/GSAInfluenceBeam.cs @@ -0,0 +1,41 @@ +using Objects.Structural.GSA.Geometry; +using Objects.Structural.Loading; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.Structural.GSA.Bridge; + +public class GSAInfluenceBeam : GSAInfluence +{ + public GSAInfluenceBeam() { } + + [SchemaInfo( + "GSAInfluenceBeam", + "Creates a Speckle structural beam influence effect for GSA (for an influence analysis)", + "GSA", + "Bridge" + )] + public GSAInfluenceBeam( + int nativeId, + string name, + double factor, + InfluenceType type, + LoadDirection direction, + GSAElement1D element, + double position + ) + { + this.nativeId = nativeId; + this.name = name; + this.factor = factor; + this.type = type; + this.direction = direction; + this.element = element; + this.position = position; + } + + [DetachProperty] + public GSAElement1D element { get; set; } + + public double position { get; set; } +} diff --git a/src/Objects/Structural/GSA/Bridge/GSAInfluenceNode.cs b/src/Objects/Structural/GSA/Bridge/GSAInfluenceNode.cs new file mode 100644 index 00000000..7794fc96 --- /dev/null +++ b/src/Objects/Structural/GSA/Bridge/GSAInfluenceNode.cs @@ -0,0 +1,42 @@ +using Objects.Structural.Geometry; +using Objects.Structural.Loading; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.Structural.GSA.Bridge; + +public class GSAInfluenceNode : GSAInfluence +{ + public GSAInfluenceNode() { } + + [SchemaInfo( + "GSAInfluenceBeam", + "Creates a Speckle structural node influence effect for GSA (for an influence analysis)", + "GSA", + "Bridge" + )] + public GSAInfluenceNode( + int nativeId, + string name, + double factor, + InfluenceType type, + LoadDirection direction, + Node node, + Axis axis + ) + { + this.nativeId = nativeId; + this.name = name; + this.factor = factor; + this.type = type; + this.direction = direction; + this.node = node; + this.axis = axis; + } + + [DetachProperty] + public Node node { get; set; } + + [DetachProperty] + public Axis axis { get; set; } +} diff --git a/src/Objects/Structural/GSA/Bridge/GSAPath.cs b/src/Objects/Structural/GSA/Bridge/GSAPath.cs new file mode 100644 index 00000000..d2b6254c --- /dev/null +++ b/src/Objects/Structural/GSA/Bridge/GSAPath.cs @@ -0,0 +1,62 @@ +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.Structural.GSA.Bridge; + +public class GSAPath : Base +{ + public GSAPath() { } + + [SchemaInfo( + "GSAPath", + "Creates a Speckle structural path for GSA (a path defines traffic lines along a bridge relative to an alignments, for influence analysis)", + "GSA", + "Bridge" + )] + public GSAPath( + int nativeId, + string name, + PathType type, + int group, + GSAAlignment alignment, + double left, + double right, + double factor, + int numMarkedLanes + ) + { + this.nativeId = nativeId; + this.name = name; + this.type = type; + this.group = group; + this.alignment = alignment; + this.left = left; + this.right = right; + this.factor = factor; + this.numMarkedLanes = numMarkedLanes; + } + + public int nativeId { get; set; } + public string name { get; set; } + public PathType type { get; set; } + public int group { get; set; } + + [DetachProperty] + public GSAAlignment alignment { get; set; } + + public double left { get; set; } //left / centre offset + public double right { get; set; } //right offset / gauge + public double factor { get; set; } //left factor + public int numMarkedLanes { get; set; } +} + +public enum PathType +{ + NotSet = 0, + LANE, + FOOTWAY, + TRACK, + VEHICLE, + CWAY_1WAY, + CWAY_2WAY +} diff --git a/src/Objects/Structural/GSA/Bridge/GSAUserVehicle.cs b/src/Objects/Structural/GSA/Bridge/GSAUserVehicle.cs new file mode 100644 index 00000000..9a49cba8 --- /dev/null +++ b/src/Objects/Structural/GSA/Bridge/GSAUserVehicle.cs @@ -0,0 +1,43 @@ +using System.Collections.Generic; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.Structural.GSA.Bridge; + +public class GSAUserVehicle : Base +{ + public GSAUserVehicle() { } + + [SchemaInfo( + "GSAUserVehicle", + "Creates a Speckle structural user-defined vehicle (as a pattern of loading based on axle and wheel positions, for influence analysis) for GSA", + "GSA", + "Bridge" + )] + public GSAUserVehicle( + int nativeId, + string name, + double width, + List axlePositions, + List axleOffsets, + List axleLeft, + List axleRight + ) + { + this.nativeId = nativeId; + this.name = name; + this.width = width; + this.axlePositions = axlePositions; + this.axleOffsets = axleOffsets; + this.axleLeft = axleLeft; + this.axleRight = axleRight; + } + + public int nativeId { get; set; } + public string name { get; set; } + public double width { get; set; } //vehicle width + public List axlePositions { get; set; } + public List axleOffsets { get; set; } // offset from centreline + public List axleLeft { get; set; } //load on left side + public List axleRight { get; set; } //load on right side +} diff --git a/src/Objects/Structural/GSA/Geometry/GSAAssembly.cs b/src/Objects/Structural/GSA/Geometry/GSAAssembly.cs new file mode 100644 index 00000000..ca4e54d9 --- /dev/null +++ b/src/Objects/Structural/GSA/Geometry/GSAAssembly.cs @@ -0,0 +1,67 @@ +using System.Collections.Generic; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.Structural.GSA.Geometry; + +public class GSAAssembly : Base +{ + public GSAAssembly() { } + + [SchemaInfo( + "GSAAssembly", + "Creates a Speckle structural assembly (ie. a way to define an entity that is formed from a collection of elements or members) for GSA", + "GSA", + "Bridge" + )] + public GSAAssembly( + int nativeId, + string name, + List entities, + GSANode end1Node, + GSANode end2Node, + GSANode orientationNode, + double sizeY, + double sizeZ, + string curveType, + int curveOrder, + string pointDefinition, + List points + ) + { + this.nativeId = nativeId; + this.name = name; + this.entities = entities; + this.end1Node = end1Node; + this.end2Node = end2Node; + this.orientationNode = orientationNode; + this.sizeY = sizeY; + this.sizeZ = sizeZ; + this.curveType = curveType; + this.curveOrder = curveOrder; + this.pointDefinition = pointDefinition; + this.points = points; + } + + public int nativeId { get; set; } //equiv to num record of gwa keyword + public string name { get; set; } + + [DetachProperty, Chunkable(5000)] + public List entities { get; set; } //nodes, elements, members + + [DetachProperty] + public GSANode end1Node { get; set; } + + [DetachProperty] + public GSANode end2Node { get; set; } + + [DetachProperty] + public GSANode orientationNode { get; set; } + + public double sizeY { get; set; } + public double sizeZ { get; set; } + public string curveType { get; set; } // enum? circular or lagrange sufficient? + public int curveOrder { get; set; } + public string pointDefinition { get; set; } // enum as well? points and spacing to start? || points and storeys to be supported + public List points { get; set; } // or make this Base type to accomdate storey list and explicit range? or add sep property for those cases? +} diff --git a/src/Objects/Structural/GSA/Geometry/GSAElement1D.cs b/src/Objects/Structural/GSA/Geometry/GSAElement1D.cs new file mode 100644 index 00000000..e59c1113 --- /dev/null +++ b/src/Objects/Structural/GSA/Geometry/GSAElement1D.cs @@ -0,0 +1,81 @@ +using Objects.Geometry; +using Objects.Structural.Geometry; +using Objects.Structural.Properties; +using Speckle.Core.Kits; + +namespace Objects.Structural.GSA.Geometry; + +public class GSAElement1D : Element1D +{ + public GSAElement1D() { } + + [SchemaInfo( + "GSAElement1D (from local axis)", + "Creates a Speckle structural 1D element for GSA (from local axis)", + "GSA", + "Geometry" + )] + public GSAElement1D( + int nativeId, + Line baseLine, + Property1D property, + ElementType1D type, + [SchemaParamInfo("If null, restraint condition defaults to unreleased (fully fixed translations and rotations)")] + Restraint? end1Releases = null, + [SchemaParamInfo("If null, restraint condition defaults to unreleased (fully fixed translations and rotations)")] + Restraint? end2Releases = null, + [SchemaParamInfo("If null, defaults to no offsets")] Vector? end1Offset = null, + [SchemaParamInfo("If null, defaults to no offsets")] Vector? end2Offset = null, + Plane? localAxis = null + ) + { + this.nativeId = nativeId; + this.baseLine = baseLine; + this.property = property; + this.type = type; + this.end1Releases = end1Releases ?? new Restraint("FFFFFF"); + this.end2Releases = end2Releases ?? new Restraint("FFFFFF"); + this.end1Offset = end1Offset ?? new Vector(0, 0, 0); + this.end2Offset = end2Offset ?? new Vector(0, 0, 0); + this.localAxis = localAxis; + } + + [SchemaInfo( + "GSAElement1D (from orientation node and angle)", + "Creates a Speckle structural 1D element for GSA (from orientation node and angle)", + "GSA", + "Geometry" + )] + public GSAElement1D( + int nativeId, + Line baseLine, + Property1D property, + ElementType1D type, + [SchemaParamInfo("If null, restraint condition defaults to unreleased (fully fixed translations and rotations)")] + Restraint? end1Releases = null, + [SchemaParamInfo("If null, restraint condition defaults to unreleased (fully fixed translations and rotations)")] + Restraint? end2Releases = null, + [SchemaParamInfo("If null, defaults to no offsets")] Vector? end1Offset = null, + [SchemaParamInfo("If null, defaults to no offsets")] Vector? end2Offset = null, + Node? orientationNode = null, + double orientationAngle = 0 + ) + { + this.nativeId = nativeId; + this.baseLine = baseLine; + this.property = property; + this.type = type; + this.end1Releases = end1Releases ?? new Restraint("FFFFFF"); + this.end2Releases = end2Releases ?? new Restraint("FFFFFF"); + this.end1Offset = end1Offset ?? new Vector(0, 0, 0); + this.end2Offset = end2Offset ?? new Vector(0, 0, 0); + this.orientationNode = orientationNode; + this.orientationAngle = orientationAngle; + } + + public int nativeId { get; set; } //equiv to num record of gwa keyword + public int group { get; set; } + public string colour { get; set; } + public string action { get; set; } + public bool isDummy { get; set; } +} diff --git a/src/Objects/Structural/GSA/Geometry/GSAElement2D.cs b/src/Objects/Structural/GSA/Geometry/GSAElement2D.cs new file mode 100644 index 00000000..06a921dd --- /dev/null +++ b/src/Objects/Structural/GSA/Geometry/GSAElement2D.cs @@ -0,0 +1,43 @@ +using System.Collections.Generic; +using Objects.Structural.Geometry; +using Objects.Structural.Properties; +using Speckle.Core.Kits; + +namespace Objects.Structural.GSA.Geometry; + +public class GSAElement2D : Element2D +{ + public GSAElement2D() { } + + [SchemaInfo("GSAElement2D", "Creates a Speckle structural 2D element for GSA", "GSA", "Geometry")] + public GSAElement2D( + int nativeId, + List nodes, + Property2D property, + ElementType2D type, + string? name = null, + double offset = 0, + double orientationAngle = 0, + int group = 0, + string colour = "NO_RGB", + bool isDummy = false + ) + { + this.nativeId = nativeId; + topology = nodes; + this.property = property; + this.type = type; + this.name = name ?? ""; + this.nativeId = nativeId; + this.offset = offset; + this.orientationAngle = orientationAngle; + this.group = group; + this.colour = colour; + this.isDummy = isDummy; + } + + public int nativeId { get; set; } + public int group { get; set; } + public string colour { get; set; } + public bool isDummy { get; set; } +} diff --git a/src/Objects/Structural/GSA/Geometry/GSAElement3D.cs b/src/Objects/Structural/GSA/Geometry/GSAElement3D.cs new file mode 100644 index 00000000..ab9c97ad --- /dev/null +++ b/src/Objects/Structural/GSA/Geometry/GSAElement3D.cs @@ -0,0 +1,40 @@ +using Objects.Geometry; +using Objects.Structural.Geometry; +using Objects.Structural.Properties; +using Speckle.Core.Kits; + +namespace Objects.Structural.GSA.Geometry; + +public class GSAElement3D : Element3D +{ + public GSAElement3D() { } + + [SchemaInfo("GSAElement3D", "Creates a Speckle structural 3D element for GSA", "GSA", "Geometry")] + public GSAElement3D( + int nativeId, + Mesh baseMesh, + Property3D property, + ElementType3D type, + string? name = null, + double orientationAngle = 0, + int group = 0, + string colour = "NO_RGB", + bool isDummy = false + ) + { + this.nativeId = nativeId; + this.baseMesh = baseMesh; + this.property = property; + this.type = type; + this.name = name ?? ""; + this.orientationAngle = orientationAngle; + this.group = group; + this.colour = colour; + this.isDummy = isDummy; + } + + public int nativeId { get; set; } + public int group { get; set; } + public string colour { get; set; } + public bool isDummy { get; set; } +} diff --git a/src/Objects/Structural/GSA/Geometry/GSAGeneralisedRestraint.cs b/src/Objects/Structural/GSA/Geometry/GSAGeneralisedRestraint.cs new file mode 100644 index 00000000..54ee9384 --- /dev/null +++ b/src/Objects/Structural/GSA/Geometry/GSAGeneralisedRestraint.cs @@ -0,0 +1,45 @@ +using System.Collections.Generic; +using Objects.Structural.Geometry; +using Objects.Structural.GSA.Analysis; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.Structural.GSA.Geometry; + +public class GSAGeneralisedRestraint : Base +{ + public GSAGeneralisedRestraint() { } + + [SchemaInfo( + "GSAGeneralisedRestraint", + "Creates a Speckle structural generalised restraint (a set of restraint conditions to be applied to a list of nodes) for GSA", + "GSA", + "Geometry" + )] + public GSAGeneralisedRestraint( + int nativeId, + string name, + Restraint restraint, + List nodes, + List stages + ) + { + this.nativeId = nativeId; + this.name = name; + this.restraint = restraint; + this.nodes = nodes; + this.stages = stages; + } + + public int nativeId { get; set; } + public string name { get; set; } + + [DetachProperty] + public Restraint restraint { get; set; } + + [DetachProperty, Chunkable(5000)] + public List nodes { get; set; } + + [DetachProperty] + public List stages { get; set; } +} diff --git a/src/Objects/Structural/GSA/Geometry/GSAGridLine.cs b/src/Objects/Structural/GSA/Geometry/GSAGridLine.cs new file mode 100644 index 00000000..6a878cfb --- /dev/null +++ b/src/Objects/Structural/GSA/Geometry/GSAGridLine.cs @@ -0,0 +1,19 @@ +using Objects.BuiltElements; +using Speckle.Core.Kits; + +namespace Objects.Structural.GSA.Geometry; + +public class GSAGridLine : GridLine +{ + public GSAGridLine() { } + + [SchemaInfo("GSAGridLine", "Creates a Speckle structural grid line for GSA", "GSA", "Geometry")] + public GSAGridLine(int nativeId, string name, ICurve line) + { + this.nativeId = nativeId; + label = name; + baseLine = line; + } + + public int nativeId { get; set; } +} diff --git a/src/Objects/Structural/GSA/Geometry/GSAGridPlane.cs b/src/Objects/Structural/GSA/Geometry/GSAGridPlane.cs new file mode 100644 index 00000000..e2b80a89 --- /dev/null +++ b/src/Objects/Structural/GSA/Geometry/GSAGridPlane.cs @@ -0,0 +1,27 @@ +using Objects.Structural.Geometry; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.Structural.GSA.Geometry; + +public class GSAGridPlane : Storey +{ + public GSAGridPlane() { } + + [SchemaInfo("GSAGridPlane", "Creates a Speckle structural grid plane for GSA", "GSA", "Geometry")] + public GSAGridPlane(int nativeId, string name, Axis axis, double elevation) + { + this.nativeId = nativeId; + this.name = name; + this.axis = axis; + this.elevation = elevation; // the height of the grid plane above the origin (of the specified axis) + } + + public int nativeId { get; set; } + + [DetachProperty] + public Axis axis { get; set; } + + public double? toleranceBelow { get; set; } + public double? toleranceAbove { get; set; } +} diff --git a/src/Objects/Structural/GSA/Geometry/GSAGridSurface.cs b/src/Objects/Structural/GSA/Geometry/GSAGridSurface.cs new file mode 100644 index 00000000..efe0f39d --- /dev/null +++ b/src/Objects/Structural/GSA/Geometry/GSAGridSurface.cs @@ -0,0 +1,62 @@ +using System.Collections.Generic; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.Structural.GSA.Geometry; + +public class GSAGridSurface : Base +{ + public GSAGridSurface() { } + + [SchemaInfo("GSAGridSurface", "Creates a Speckle structural grid surface for GSA", "GSA", "Geometry")] + public GSAGridSurface( + string name, + int nativeId, + GSAGridPlane gridPlane, + double tolerance, + double spanDirection, + LoadExpansion loadExpansion, + GridSurfaceSpanType span, + List elements + ) + { + this.name = name; + this.nativeId = nativeId; + this.gridPlane = gridPlane; + this.tolerance = tolerance; + this.spanDirection = spanDirection; + this.loadExpansion = loadExpansion; + this.span = span; + this.elements = elements; + } + + public string name { get; set; } + public int nativeId { get; set; } + + [DetachProperty] + public GSAGridPlane gridPlane { get; set; } + + public double tolerance { get; set; } + public double spanDirection { get; set; } + public LoadExpansion loadExpansion { get; set; } + public GridSurfaceSpanType span { get; set; } + + [DetachProperty, Chunkable(5000)] + public List elements { get; set; } +} + +public enum GridSurfaceSpanType +{ + NotSet = 0, + OneWay, + TwoWay +} + +public enum LoadExpansion +{ + NotSet = 0, + Legacy = 1, + PlaneAspect = 2, + PlaneSmooth = 3, + PlaneCorner = 4 +} diff --git a/src/Objects/Structural/GSA/Geometry/GSAMember1D.cs b/src/Objects/Structural/GSA/Geometry/GSAMember1D.cs new file mode 100644 index 00000000..0503a7a8 --- /dev/null +++ b/src/Objects/Structural/GSA/Geometry/GSAMember1D.cs @@ -0,0 +1,78 @@ +using Objects.Geometry; +using Objects.Structural.Geometry; +using Objects.Structural.Properties; +using Speckle.Core.Kits; + +namespace Objects.Structural.GSA.Geometry; + +public class GSAMember1D : Element1D +{ + public GSAMember1D() { } + + [SchemaInfo( + "GSAMember1D (from local axis)", + "Creates a Speckle structural 1D member for GSA (from local axis)", + "GSA", + "Geometry" + )] + public GSAMember1D( + int nativeId, + Line baseLine, + Property1D property, + ElementType1D type, + Restraint end1Releases, + Restraint end2Releases, + Vector end1Offset, + Vector end2Offset, + Plane localAxis + ) + { + this.nativeId = nativeId; + this.baseLine = baseLine; + this.property = property; + this.type = type; + this.end1Releases = end1Releases; + this.end2Releases = end2Releases; + this.end1Offset = end1Offset; + this.end2Offset = end2Offset; + this.localAxis = localAxis; + } + + [SchemaInfo( + "GSAMember1D (from orientation node and angle)", + "Creates a Speckle structural 1D member for GSA (from orientation node and angle)", + "GSA", + "Geometry" + )] + public GSAMember1D( + int nativeId, + Line baseLine, + Property1D property, + ElementType1D type, + Restraint end1Releases, + Restraint end2Releases, + Vector end1Offset, + Vector end2Offset, + GSANode orientationNode, + double orientationAngle + ) + { + this.nativeId = nativeId; + this.baseLine = baseLine; + this.property = property; + this.type = type; + this.end1Releases = end1Releases; + this.end2Releases = end2Releases; + this.end1Offset = end1Offset; + this.end2Offset = end2Offset; + this.orientationNode = orientationNode; + this.orientationAngle = orientationAngle; + } + + public int nativeId { get; set; } + public int group { get; set; } + public string colour { get; set; } + public bool isDummy { get; set; } + public bool intersectsWithOthers { get; set; } + public double targetMeshSize { get; set; } +} diff --git a/src/Objects/Structural/GSA/Geometry/GSAMember2D.cs b/src/Objects/Structural/GSA/Geometry/GSAMember2D.cs new file mode 100644 index 00000000..2e1eccd8 --- /dev/null +++ b/src/Objects/Structural/GSA/Geometry/GSAMember2D.cs @@ -0,0 +1,43 @@ +using System.Collections.Generic; +using Objects.Structural.Geometry; +using Objects.Structural.Properties; +using Speckle.Core.Kits; + +namespace Objects.Structural.GSA.Geometry; + +public class GSAMember2D : Element2D +{ + public GSAMember2D() { } + + [SchemaInfo("GSAMember2D", "Creates a Speckle structural 2D member for GSA", "GSA", "Geometry")] + public GSAMember2D( + [SchemaParamInfo( + "An ordered list of nodes which represents the perimeter of a member (ie. order of points should based on valid polyline)" + )] + List perimeter, + Property2D property, + ElementType2D type, + [SchemaParamInfo( + "A list of ordered lists of nodes representing the voids within a member (ie. order of points should be based on valid polyline)" + )] + List>? voids = null, + double offset = 0, + double orientationAngle = 0 + ) + { + topology = perimeter; //needs to be ordered properly (ie. matching the point order of a valid polyline) + this.property = property; + this.type = type; + this.voids = voids; //needs to be ordered properly (ie. matching the point order of a valid polyline) + this.offset = offset; + this.orientationAngle = orientationAngle; + } + + public int nativeId { get; set; } + public int group { get; set; } + public string colour { get; set; } + public bool isDummy { get; set; } + public bool intersectsWithOthers { get; set; } + public double targetMeshSize { get; set; } + public List>? voids { get; set; } +} diff --git a/src/Objects/Structural/GSA/Geometry/GSANode.cs b/src/Objects/Structural/GSA/Geometry/GSANode.cs new file mode 100644 index 00000000..4d864476 --- /dev/null +++ b/src/Objects/Structural/GSA/Geometry/GSANode.cs @@ -0,0 +1,56 @@ +using Objects.Geometry; +using Objects.Structural.Geometry; +using Objects.Structural.Properties; +using Speckle.Core.Kits; + +namespace Objects.Structural.GSA.Geometry; + +public class GSANode : Node +{ + public GSANode() { } + + /// + /// SchemaBuilder constructor for a GSA node + /// + /// + /// + /// + /// + /// + /// + /// + [SchemaInfo("GSANode", "Creates a Speckle structural node for GSA", "GSA", "Geometry")] + public GSANode( + int nativeId, + Point basePoint, + Restraint restraint, + Axis? constraintAxis = null, + PropertySpring? springProperty = null, + PropertyMass? massProperty = null, + PropertyDamper? damperProperty = null, + double localElementSize = 0, + string colour = "NO_RGB" + ) + { + this.nativeId = nativeId; + this.basePoint = basePoint; + this.restraint = restraint; + this.constraintAxis = + constraintAxis == null + ? new Axis( + "Global", + AxisType.Cartesian, + new Plane(new Point(0, 0), new Vector(0, 0, 1), new Vector(1, 0, 0), new Vector(0, 1, 0)) + ) + : constraintAxis; + this.springProperty = springProperty; + this.massProperty = massProperty; + this.damperProperty = damperProperty; + this.localElementSize = localElementSize; + this.colour = colour; + } + + public int nativeId { get; set; } + public double localElementSize { get; set; } + public string colour { get; set; } +} diff --git a/src/Objects/Structural/GSA/Geometry/GSARigidConstraint.cs b/src/Objects/Structural/GSA/Geometry/GSARigidConstraint.cs new file mode 100644 index 00000000..2dbea840 --- /dev/null +++ b/src/Objects/Structural/GSA/Geometry/GSARigidConstraint.cs @@ -0,0 +1,88 @@ +using System.Collections.Generic; +using Objects.Structural.Geometry; +using Objects.Structural.GSA.Analysis; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.Structural.GSA.Geometry; + +public class GSARigidConstraint : Base +{ + public GSARigidConstraint() { } + + [SchemaInfo( + "GSARigidConstraint", + "Creates a Speckle structural rigid restraint (a set of nodes constrained to move as a rigid body) for GSA", + "GSA", + "Geometry" + )] + public GSARigidConstraint( + string name, + int nativeId, + Node primaryNode, + List constrainedNodes, + Base parentMember, + List stages, + LinkageType type, + Dictionary> constraintCondition + ) + { + this.name = name; + this.nativeId = nativeId; + this.primaryNode = primaryNode; + this.constrainedNodes = constrainedNodes; + this.parentMember = parentMember; + this.stages = stages; + this.type = type; + this.constraintCondition = constraintCondition; + } + + public string name { get; set; } + public int nativeId { get; set; } + + [DetachProperty] + public Node primaryNode { get; set; } + + [DetachProperty, Chunkable(5000)] + public List constrainedNodes { get; set; } + + [DetachProperty] + public Base parentMember { get; set; } + + [DetachProperty] + public List stages { get; set; } + + public LinkageType type { get; set; } + public Dictionary> constraintCondition { get; set; } +} + +public enum AxisDirection6 +{ + NotSet = 0, + X, + Y, + Z, + XX, + YY, + ZZ +} + +public enum LinkageType +{ + NotSet = 0, + ALL, + XY_PLANE, + YZ_PLANE, + ZX_PLANE, + XY_PLATE, + YZ_PLATE, + ZX_PLATE, + PIN, + XY_PLANE_PIN, + YZ_PLANE_PIN, + ZX_PLANE_PIN, + XY_PLATE_PIN, + YZ_PLATE_PIN, + ZX_PLATE_PIN, + Custom +} diff --git a/src/Objects/Structural/GSA/Geometry/GSAStorey.cs b/src/Objects/Structural/GSA/Geometry/GSAStorey.cs new file mode 100644 index 00000000..8cda0be4 --- /dev/null +++ b/src/Objects/Structural/GSA/Geometry/GSAStorey.cs @@ -0,0 +1,34 @@ +using Objects.Structural.Geometry; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.Structural.GSA.Geometry; + +public class GSAStorey : Storey +{ + public GSAStorey() { } + + [SchemaInfo( + "GSAStorey", + "Creates a Speckle structural storey (to describe floor levels/storeys in the structural model) for GSA", + "GSA", + "Geometry" + )] + public GSAStorey(int nativeId, string name, Axis axis, double elevation, double toleranceBelow, double toleranceAbove) + { + this.nativeId = nativeId; + this.name = name; + this.axis = axis; + this.elevation = elevation; + this.toleranceBelow = toleranceBelow; + this.toleranceAbove = toleranceAbove; + } + + public int nativeId { get; set; } + + [DetachProperty] + public Axis axis { get; set; } + + public double toleranceBelow { get; set; } + public double toleranceAbove { get; set; } +} diff --git a/src/Objects/Structural/GSA/Loading/GSALoadBeam.cs b/src/Objects/Structural/GSA/Loading/GSALoadBeam.cs new file mode 100644 index 00000000..1efc98da --- /dev/null +++ b/src/Objects/Structural/GSA/Loading/GSALoadBeam.cs @@ -0,0 +1,79 @@ +using System.Collections.Generic; +using Objects.Structural.Geometry; +using Objects.Structural.Loading; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.Structural.GSA.Loading; + +public class GSALoadBeam : LoadBeam +{ + public GSALoadBeam() { } + + [SchemaInfo("GSALoadBeam", "Creates a Speckle structural beam (1D elem/member) load for GSA", "GSA", "Loading")] + public GSALoadBeam( + int nativeId, + LoadCase loadCase, + List elements, + BeamLoadType loadType, + LoadDirection direction, + LoadAxisType loadAxisType = LoadAxisType.Global, + [SchemaParamInfo( + "A list that represents load magnitude (number of values varies based on load type - Point: 1, Uniform: 1, Linear: 2, Patch: 2, Tri-linear:2)" + )] + List? values = null, + [SchemaParamInfo( + "A list that represents load locations (number of values varies based on load type - Point: 1, Uniform: null, Linear: null, Patch: 2, Tri-linear: 2)" + )] + List? positions = null, + bool isProjected = false + ) + { + this.nativeId = nativeId; + this.loadCase = loadCase; + this.elements = elements; + this.loadType = loadType; + this.direction = direction; + this.loadAxisType = loadAxisType; + this.values = values; + this.positions = positions; + this.isProjected = isProjected; + } + + [SchemaInfo( + "GSALoadBeam (user-defined axis)", + "Creates a Speckle structural beam (1D elem/member) load (specified for a user-defined axis) for GSA", + "GSA", + "Loading" + )] + public GSALoadBeam( + int nativeId, + LoadCase loadCase, + List elements, + BeamLoadType loadType, + LoadDirection direction, + Axis loadAxis, + [SchemaParamInfo( + "A list that represents load magnitude (number of values varies based on load type - Point: 1, Uniform: 1, Linear: 2, Patch: 2, Tri-linear:2)" + )] + List? values = null, + [SchemaParamInfo( + "A list that represents load locations (number of values varies based on load type - Point: 1, Uniform: null, Linear: null, Patch: 2, Tri-linear: 2)" + )] + List? positions = null, + bool isProjected = false + ) + { + this.nativeId = nativeId; + this.loadCase = loadCase; + this.elements = elements; + this.loadType = loadType; + this.direction = direction; + this.values = values; + this.positions = positions; + this.isProjected = isProjected; + this.nativeId = nativeId; + } + + public int nativeId { get; set; } +} diff --git a/src/Objects/Structural/GSA/Loading/GSALoadCase.cs b/src/Objects/Structural/GSA/Loading/GSALoadCase.cs new file mode 100644 index 00000000..7afb86c8 --- /dev/null +++ b/src/Objects/Structural/GSA/Loading/GSALoadCase.cs @@ -0,0 +1,38 @@ +using Objects.Structural.Loading; +using Speckle.Core.Kits; + +namespace Objects.Structural.GSA.Loading; + +public class GSALoadCase : LoadCase +{ + public GSALoadCase() { } + + [SchemaInfo("GSALoadCase", "Creates a Speckle structural load case for GSA", "GSA", "Loading")] + public GSALoadCase( + int nativeId, + string name, + LoadType loadType, + LoadDirection2D loadDirection, + string? source = null, + ActionType actionType = ActionType.None, + string? description = null, + string? include = null, + bool bridge = false + ) + { + this.nativeId = nativeId; + this.name = name; + this.loadType = loadType; + group = source ?? ""; + this.actionType = actionType; + this.description = description ?? ""; + direction = loadDirection; + this.include = include ?? ""; + this.bridge = bridge; + } + + public int nativeId { get; set; } + public LoadDirection2D direction { get; set; } + public string include { get; set; } + public bool bridge { get; set; } +} diff --git a/src/Objects/Structural/GSA/Loading/GSALoadCombination.cs b/src/Objects/Structural/GSA/Loading/GSALoadCombination.cs new file mode 100644 index 00000000..7adedb02 --- /dev/null +++ b/src/Objects/Structural/GSA/Loading/GSALoadCombination.cs @@ -0,0 +1,35 @@ +using System; +using System.Collections.Generic; +using Objects.Structural.Loading; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.Structural.GSA.Loading; + +public class GSALoadCombination : LoadCombination +{ + public GSALoadCombination() { } + + [SchemaInfo("GSALoadCombination", "Creates a Speckle load combination for GSA", "GSA", "Loading")] + public GSALoadCombination( + int nativeId, + string name, + [SchemaParamInfo("A list of load cases")] List loadCases, + [SchemaParamInfo("A list of load factors (to be mapped to provided load cases)")] List loadFactors + ) + { + this.nativeId = nativeId; + this.name = name; + + if (loadCases.Count != loadFactors.Count) + { + throw new ArgumentException("Number of load cases provided does not match number of load factors provided"); + } + + this.loadFactors = loadFactors; + this.loadCases = loadCases; + this.nativeId = nativeId; + } + + public int nativeId { get; set; } +} diff --git a/src/Objects/Structural/GSA/Loading/GSALoadFace.cs b/src/Objects/Structural/GSA/Loading/GSALoadFace.cs new file mode 100644 index 00000000..28b49439 --- /dev/null +++ b/src/Objects/Structural/GSA/Loading/GSALoadFace.cs @@ -0,0 +1,43 @@ +using System.Collections.Generic; +using Objects.Structural.Loading; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.Structural.GSA.Loading; + +public class GSALoadFace : LoadFace +{ + public GSALoadFace() { } + + [SchemaInfo("GSALoadFace", "Creates a Speckle structural face (2D elem/member) load for GSA", "GSA", "Loading")] + public GSALoadFace( + int nativeId, + LoadCase loadCase, + List elements, + FaceLoadType loadType, + LoadDirection2D direction, + LoadAxisType loadAxisType = LoadAxisType.Global, + [SchemaParamInfo( + "A list that represents load magnitude (number of values varies based on load type - Uniform: 1, Variable: 4 (corner nodes), Point: 1)" + )] + List? values = null, + [SchemaParamInfo( + "A list that represents load locations (number of values varies based on load type - Uniform: null, Variable: null, Point: 2)" + )] + List? positions = null, + bool isProjected = false + ) + { + this.nativeId = nativeId; + this.loadCase = loadCase; + this.elements = elements; + this.loadType = loadType; + this.direction = direction; + this.loadAxisType = loadAxisType; + this.values = values; + this.positions = positions; + this.isProjected = isProjected; + } + + public int nativeId { get; set; } +} diff --git a/src/Objects/Structural/GSA/Loading/GSALoadGravity.cs b/src/Objects/Structural/GSA/Loading/GSALoadGravity.cs new file mode 100644 index 00000000..f820fa27 --- /dev/null +++ b/src/Objects/Structural/GSA/Loading/GSALoadGravity.cs @@ -0,0 +1,73 @@ +using System.Collections.Generic; +using Objects.Geometry; +using Objects.Structural.Loading; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.Structural.GSA.Loading; + +public class GSALoadGravity : LoadGravity +{ + public GSALoadGravity() { } + + [SchemaInfo( + "GSALoadGravity", + "Creates a Speckle structural gravity load (applied to all nodes and elements) for GSA", + "GSA", + "Loading" + )] + public GSALoadGravity(int nativeId, string name, LoadCase loadCase, Vector? gravityFactors = null) + { + this.nativeId = nativeId; + this.name = name; + this.loadCase = loadCase; + this.gravityFactors = gravityFactors ?? new Vector(0, 0, -1); + } + + [SchemaInfo( + "GSALoadGravity (specified elements)", + "Creates a Speckle structural gravity load (applied to specified elements) for GSA", + "GSA", + "Loading" + )] + public GSALoadGravity( + int nativeId, + string name, + LoadCase loadCase, + List elements, + Vector? gravityFactors = null + ) + { + this.nativeId = nativeId; + this.name = name; + this.elements = elements; + this.loadCase = loadCase; + this.gravityFactors = gravityFactors ?? new Vector(0, 0, -1); + } + + [SchemaInfo( + "GSALoadGravity (specified elements and nodes)", + "Creates a Speckle structural gravity load (applied to specified nodes and elements) for GSA", + "GSA", + "Loading" + )] + public GSALoadGravity( + int nativeId, + string name, + LoadCase loadCase, + List elements, + List nodes, + Vector? gravityFactors = null, + string? nativedId = null + ) + { + this.nativeId = nativeId; + this.name = name; + this.elements = elements; + this.nodes = nodes; + this.loadCase = loadCase; + this.gravityFactors = gravityFactors ?? new Vector(0, 0, -1); + } + + public int nativeId { get; set; } +} diff --git a/src/Objects/Structural/GSA/Loading/GSALoadGrid.cs b/src/Objects/Structural/GSA/Loading/GSALoadGrid.cs new file mode 100644 index 00000000..d908b01c --- /dev/null +++ b/src/Objects/Structural/GSA/Loading/GSALoadGrid.cs @@ -0,0 +1,23 @@ +using Objects.Structural.Geometry; +using Objects.Structural.GSA.Geometry; +using Objects.Structural.Loading; + +namespace Objects.Structural.GSA.Loading; + +public abstract class GSALoadGrid : Load +{ + protected GSALoadGrid() { } + + protected GSALoadGrid(int nativeId, GSAGridSurface gridSurface, Axis loadAxis, LoadDirection2D direction) + { + this.nativeId = nativeId; + this.gridSurface = gridSurface; + this.loadAxis = loadAxis; + this.direction = direction; + } + + public int nativeId { get; set; } + public GSAGridSurface gridSurface { get; set; } + public Axis loadAxis { get; set; } + public LoadDirection2D direction { get; set; } +} diff --git a/src/Objects/Structural/GSA/Loading/GSALoadGridArea.cs b/src/Objects/Structural/GSA/Loading/GSALoadGridArea.cs new file mode 100644 index 00000000..420c7646 --- /dev/null +++ b/src/Objects/Structural/GSA/Loading/GSALoadGridArea.cs @@ -0,0 +1,34 @@ +using Objects.Geometry; +using Objects.Structural.Geometry; +using Objects.Structural.GSA.Geometry; +using Objects.Structural.Loading; + +namespace Objects.Structural.GSA.Loading; + +public class GSALoadGridArea : GSALoadGrid +{ + public GSALoadGridArea() { } + + public GSALoadGridArea( + int nativeId, + GSAGridSurface gridSurface, + Axis loadAxis, + LoadDirection2D direction, + Polyline polyline, + bool isProjected, + double value + ) + { + this.nativeId = nativeId; + this.gridSurface = gridSurface; + this.loadAxis = loadAxis; + this.direction = direction; + this.polyline = polyline; + this.isProjected = isProjected; + this.value = value; + } + + public Polyline polyline { get; set; } + public bool isProjected { get; set; } + public double value { get; set; } +} diff --git a/src/Objects/Structural/GSA/Loading/GSALoadGridLine.cs b/src/Objects/Structural/GSA/Loading/GSALoadGridLine.cs new file mode 100644 index 00000000..3e5590c7 --- /dev/null +++ b/src/Objects/Structural/GSA/Loading/GSALoadGridLine.cs @@ -0,0 +1,35 @@ +using System.Collections.Generic; +using Objects.Geometry; +using Objects.Structural.Geometry; +using Objects.Structural.GSA.Geometry; +using Objects.Structural.Loading; + +namespace Objects.Structural.GSA.Loading; + +public class GSALoadGridLine : GSALoadGrid +{ + public GSALoadGridLine() { } + + public GSALoadGridLine( + int nativeId, + GSAGridSurface gridSurface, + Axis loadAxis, + LoadDirection2D direction, + Polyline polyline, + bool isProjected, + List values + ) + { + this.nativeId = nativeId; + this.gridSurface = gridSurface; + this.loadAxis = loadAxis; + this.direction = direction; + this.polyline = polyline; + this.isProjected = isProjected; + this.values = values; + } + + public Polyline polyline { get; set; } + public bool isProjected { get; set; } + public List values { get; set; } +} diff --git a/src/Objects/Structural/GSA/Loading/GSALoadGridPoint.cs b/src/Objects/Structural/GSA/Loading/GSALoadGridPoint.cs new file mode 100644 index 00000000..077322c2 --- /dev/null +++ b/src/Objects/Structural/GSA/Loading/GSALoadGridPoint.cs @@ -0,0 +1,31 @@ +using Objects.Geometry; +using Objects.Structural.Geometry; +using Objects.Structural.GSA.Geometry; +using Objects.Structural.Loading; + +namespace Objects.Structural.GSA.Loading; + +public class GSALoadGridPoint : GSALoadGrid +{ + public GSALoadGridPoint() { } + + public GSALoadGridPoint( + int nativeId, + GSAGridSurface gridSurface, + Axis loadAxis, + LoadDirection2D direction, + Point position, + double value + ) + { + this.nativeId = nativeId; + this.gridSurface = gridSurface; + this.loadAxis = loadAxis; + this.direction = direction; + this.position = position; + this.value = value; + } + + public Point position { get; set; } + public double value { get; set; } +} diff --git a/src/Objects/Structural/GSA/Loading/GSALoadNode.cs b/src/Objects/Structural/GSA/Loading/GSALoadNode.cs new file mode 100644 index 00000000..cbd483ec --- /dev/null +++ b/src/Objects/Structural/GSA/Loading/GSALoadNode.cs @@ -0,0 +1,58 @@ +using System.Collections.Generic; +using Objects.Structural.Geometry; +using Objects.Structural.GSA.Geometry; +using Objects.Structural.Loading; +using Speckle.Core.Kits; + +namespace Objects.Structural.GSA.Loading; + +public class GSALoadNode : LoadNode +{ + public GSALoadNode() { } + + [SchemaInfo("GSALoadNode", "Creates a Speckle node load for GSA", "GSA", "Loading")] + public GSALoadNode( + int nativeId, + string name, + LoadCase loadCase, + List nodes, + LoadDirection direction, + double value + ) + { + this.nativeId = nativeId; + this.name = name; + this.loadCase = loadCase; + List baseNodes = nodes.ConvertAll(x => (Node)x); + this.nodes = baseNodes; + this.direction = direction; + this.value = value; + } + + [SchemaInfo( + "GSALoadNode (user-defined axis)", + "Creates a Speckle node load (user-defined axis) for GSA", + "GSA", + "Loading" + )] + public GSALoadNode( + int nativeId, + string name, + LoadCase loadCase, + List nodes, + Axis loadAxis, + LoadDirection direction, + double value + ) + { + this.nativeId = nativeId; + this.name = name; + this.loadCase = loadCase; + this.nodes = nodes; + this.loadAxis = loadAxis; + this.direction = direction; + this.value = value; + } + + public int nativeId { get; set; } +} diff --git a/src/Objects/Structural/GSA/Loading/GSALoadThermal2d.cs b/src/Objects/Structural/GSA/Loading/GSALoadThermal2d.cs new file mode 100644 index 00000000..1b25a028 --- /dev/null +++ b/src/Objects/Structural/GSA/Loading/GSALoadThermal2d.cs @@ -0,0 +1,35 @@ +using System.Collections.Generic; +using Objects.Structural.Geometry; +using Objects.Structural.Loading; +using Speckle.Core.Models; + +namespace Objects.Structural.GSA.Loading; + +public class GSALoadThermal2d : Load +{ + public GSALoadThermal2d() { } + + public GSALoadThermal2d(int nativeId, List elements, Thermal2dLoadType type, List values) + { + this.nativeId = nativeId; + this.elements = elements; + this.type = type; + this.values = values; + } + + public int nativeId { get; set; } + + [DetachProperty, Chunkable(5000)] + public List elements { get; set; } + + public Thermal2dLoadType type { get; set; } + public List values { get; set; } +} + +public enum Thermal2dLoadType +{ + NotSet = 0, + Uniform, + Gradient, + General +} diff --git a/src/Objects/Structural/GSA/Loading/GSAPolyline.cs b/src/Objects/Structural/GSA/Loading/GSAPolyline.cs new file mode 100644 index 00000000..ab8281a5 --- /dev/null +++ b/src/Objects/Structural/GSA/Loading/GSAPolyline.cs @@ -0,0 +1,36 @@ +using System.Collections.Generic; +using System.Linq; +using Objects.Geometry; +using Objects.Structural.GSA.Geometry; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.Structural.GSA.Loading; + +public class GSAPolyline : Polyline +{ + public GSAPolyline() { } + + [SchemaInfo("GSAPolyline", "Creates a Speckle structural polyline for GSA", "GSA", "Geometry")] + public GSAPolyline( + string name, + int nativeId, + IEnumerable coordinatesArray, + string colour, + GSAGridPlane gridPlane + ) + { + this.name = name; + this.nativeId = nativeId; + value = coordinatesArray.ToList(); + this.colour = colour; + this.gridPlane = gridPlane; + } + + public string name { get; set; } + public int nativeId { get; set; } + public string colour { get; set; } + + [DetachProperty] + public GSAGridPlane gridPlane { get; set; } +} diff --git a/src/Objects/Structural/GSA/Materials/GSAConcrete.cs b/src/Objects/Structural/GSA/Materials/GSAConcrete.cs new file mode 100644 index 00000000..4773361d --- /dev/null +++ b/src/Objects/Structural/GSA/Materials/GSAConcrete.cs @@ -0,0 +1,88 @@ +using Objects.Structural.Materials; +using Speckle.Core.Kits; + +namespace Objects.Structural.GSA.Materials; + +public class GSAConcrete : Concrete +{ + public GSAConcrete() { } + + [SchemaInfo("GSAConcrete", "Creates a Speckle structural concrete material for GSA", "GSA", "Materials")] + public GSAConcrete( + int nativeId, + string name, + string? grade = null, + string? designCode = null, + string? codeYear = null, + double elasticModulus = 0, + double compressiveStrength = 0, + double tensileStrength = 0, + double flexuralStrength = 0, + double maxCompressiveStrain = 0, + double maxTensileStrain = 0, + double maxAggregateSize = 0, + bool lightweight = false, + double poissonsRatio = 0, + double shearModulus = 0, + double density = 0, + double alpha = 0, + double dampingRatio = 0, + double cost = 0, + string colour = "NO_RGB" + ) + { + this.nativeId = nativeId; + this.name = name; + this.grade = grade; + materialType = MaterialType.Concrete; + this.designCode = designCode; + this.codeYear = codeYear; + this.elasticModulus = elasticModulus; + this.compressiveStrength = compressiveStrength; + this.tensileStrength = tensileStrength; + this.flexuralStrength = flexuralStrength; + this.maxCompressiveStrain = maxCompressiveStrain; + this.maxTensileStrain = maxTensileStrain; + this.maxAggregateSize = maxAggregateSize; + this.lightweight = lightweight; + this.poissonsRatio = poissonsRatio; + this.shearModulus = shearModulus; + this.density = density; + this.dampingRatio = dampingRatio; + this.cost = cost; + this.colour = colour; + } + + public int nativeId { get; set; } + public string colour { get; set; } + + // FROM GWA + //public string Name { get => name; set { name = value; } } + //public GsaMat Mat; + //public MatConcreteType Type; + //public MatConcreteCement Cement; + //public double? Fc; // + //public double? Fcd; // + //public double? Fcdc; // + //public double? Fcdt; // + //public double? Fcfib; + //public double? EmEs; + //public double? N; + //public double? Emod; + //public double? EpsPeak; + //public double? EpsMax; + //public double? EpsU; // have tens and comp represented separately + //public double? EpsAx; // have tens and comp represented separately + //public double? EpsTran; + //public double? EpsAxs; + //public bool Light; // add this + //public double? Agg; + //public double? XdMin; + //public double? XdMax; + //public double? Beta; + //public double? Shrink; + //public double? Confine; + //public double? Fcc; + //public double? EpsPlasC; + //public double? EpsUC; +} diff --git a/src/Objects/Structural/GSA/Materials/GSAMaterial.cs b/src/Objects/Structural/GSA/Materials/GSAMaterial.cs new file mode 100644 index 00000000..66d0a0c2 --- /dev/null +++ b/src/Objects/Structural/GSA/Materials/GSAMaterial.cs @@ -0,0 +1,48 @@ +using Objects.Structural.Materials; +using Speckle.Core.Kits; + +namespace Objects.Structural.GSA.Materials; + +public class GSAMaterial : StructuralMaterial +{ + public GSAMaterial() { } + + [SchemaInfo("GSAMaterial", "Creates a Speckle structural material for GSA", "GSA", "Materials")] + public GSAMaterial( + int nativeId, + string name, + MaterialType type, + string? grade = null, + string? designCode = null, + string? codeYear = null, + double strength = 0, + double elasticModulus = 0, + double poissonsRatio = 0, + double shearModulus = 0, + double rho = 0, + double alpha = 0, + double dampingRatio = 0, + double cost = 0, + string colour = "NO_RGB" + ) + { + this.nativeId = nativeId; + this.name = name; + this.grade = grade; + materialType = type; + this.designCode = designCode; + this.codeYear = codeYear; + this.strength = strength; + this.elasticModulus = elasticModulus; + this.poissonsRatio = poissonsRatio; + this.shearModulus = shearModulus; + density = rho; + thermalExpansivity = alpha; + this.dampingRatio = dampingRatio; + this.cost = cost; + this.colour = colour; + } + + public int nativeId { get; set; } + public string colour { get; set; } +} diff --git a/src/Objects/Structural/GSA/Materials/GSASteel.cs b/src/Objects/Structural/GSA/Materials/GSASteel.cs new file mode 100644 index 00000000..b87a91c7 --- /dev/null +++ b/src/Objects/Structural/GSA/Materials/GSASteel.cs @@ -0,0 +1,63 @@ +using Objects.Structural.Materials; +using Speckle.Core.Kits; + +namespace Objects.Structural.GSA.Materials; + +public class GSASteel : Steel +{ + public GSASteel() { } + + [SchemaInfo( + "Steel", + "Creates a Speckle structural material for steel (to be used in structural analysis models)", + "Structural", + "Materials" + )] + public GSASteel( + int nativeId, + string name, + string? grade = null, + string? designCode = null, + string? codeYear = null, + double elasticModulus = 0, + double yieldStrength = 0, + double ultimateStrength = 0, + double maxStrain = 0, + double poissonsRatio = 0, + double shearModulus = 0, + double density = 0, + double alpha = 0, + double dampingRatio = 0, + double cost = 0, + string colour = "NO_RGB" + ) + { + this.nativeId = nativeId; + this.name = name; + this.grade = grade; + materialType = MaterialType.Concrete; + this.designCode = designCode; + this.codeYear = codeYear; + this.elasticModulus = elasticModulus; + this.yieldStrength = yieldStrength; + this.ultimateStrength = ultimateStrength; + this.maxStrain = maxStrain; + this.poissonsRatio = poissonsRatio; + this.shearModulus = shearModulus; + this.density = density; + this.dampingRatio = dampingRatio; + this.cost = cost; + this.colour = colour; + } + + public int nativeId { get; set; } + public string colour { get; set; } + + // FROM GWA + //public string Name { get => name; set { name = value; } } + //public GsaMat Mat; + //public double? Fy; + //public double? Fu; + //public double? EpsP; + //public double? Eh; // +} diff --git a/src/Objects/Structural/GSA/Properties/GSAProperty1D.cs b/src/Objects/Structural/GSA/Properties/GSAProperty1D.cs new file mode 100644 index 00000000..cd3986f3 --- /dev/null +++ b/src/Objects/Structural/GSA/Properties/GSAProperty1D.cs @@ -0,0 +1,40 @@ +using Objects.Structural.Materials; +using Objects.Structural.Properties; +using Objects.Structural.Properties.Profiles; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.Structural.GSA.Properties; + +public class GSAProperty1D : Property1D +{ + public GSAProperty1D() { } + + [SchemaInfo("GSAProperty1D", "Creates a Speckle structural 1D element property for GSA", "GSA", "Properties")] + public GSAProperty1D( + int nativeId, + string name, + StructuralMaterial material, + SectionProfile profile, + double cost = 0, + double additionalMass = 0 + ) + { + this.nativeId = nativeId; + this.name = name; + this.material = material; + this.profile = profile; + this.cost = cost; + this.additionalMass = additionalMass; + } + + public int nativeId { get; set; } + + [DetachProperty] + public StructuralMaterial designMaterial { get; set; } + + public double additionalMass { get; set; } + public double? cost { get; set; } + public int? poolRef { get; set; } + public string colour { get; set; } +} diff --git a/src/Objects/Structural/GSA/Properties/GSAProperty2D.cs b/src/Objects/Structural/GSA/Properties/GSAProperty2D.cs new file mode 100644 index 00000000..ad0f081a --- /dev/null +++ b/src/Objects/Structural/GSA/Properties/GSAProperty2D.cs @@ -0,0 +1,30 @@ +using Objects.Structural.Materials; +using Objects.Structural.Properties; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.Structural.GSA.Properties; + +public class GSAProperty2D : Property2D +{ + public GSAProperty2D() { } + + [SchemaInfo("GSAProperty2D", "Creates a Speckle structural 2D element property for GSA", "GSA", "Properties")] + public GSAProperty2D(int nativeId, string name, StructuralMaterial material, double thickness) + { + this.nativeId = nativeId; + this.name = name; + this.material = material; + this.thickness = thickness; + } + + public int nativeId { get; set; } + + [DetachProperty] + public StructuralMaterial designMaterial { get; set; } + + public double cost { get; set; } + public double additionalMass { get; set; } + public string concreteSlabProp { get; set; } + public string colour { get; set; } +} diff --git a/src/Objects/Structural/Geometry/Axis.cs b/src/Objects/Structural/Geometry/Axis.cs new file mode 100644 index 00000000..aaf7df8b --- /dev/null +++ b/src/Objects/Structural/Geometry/Axis.cs @@ -0,0 +1,22 @@ +using Objects.Geometry; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.Structural.Geometry; + +public class Axis : Base +{ + public Axis() { } + + [SchemaInfo("Axis", "Creates a Speckle structural axis (a user-defined axis)", "Structural", "Geometry")] + public Axis(string name, AxisType axisType = AxisType.Cartesian, Plane? definition = null) + { + this.name = name; + this.axisType = axisType; + this.definition = definition; + } + + public string name { get; set; } + public AxisType axisType { get; set; } + public Plane? definition { get; set; } +} diff --git a/src/Objects/Structural/Geometry/Element1D.cs b/src/Objects/Structural/Geometry/Element1D.cs new file mode 100644 index 00000000..d1d74eab --- /dev/null +++ b/src/Objects/Structural/Geometry/Element1D.cs @@ -0,0 +1,138 @@ +using System.Collections.Generic; +using Objects.Geometry; +using Objects.Structural.Properties; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.Structural.Geometry; + +public class Element1D : Base, IDisplayValue> +{ + public Element1D() { } + + public Element1D(Line baseLine) + { + this.baseLine = baseLine; + } + + /// + /// SchemaBuilder constructor for structural 1D element (based on local axis) + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + [SchemaInfo( + "Element1D (from local axis)", + "Creates a Speckle structural 1D element (from local axis)", + "Structural", + "Geometry" + )] + public Element1D( + Line baseLine, + Property1D property, + ElementType1D type, + string? name = null, + [SchemaParamInfo("If null, restraint condition defaults to unreleased (fully fixed translations and rotations)")] + Restraint? end1Releases = null, + [SchemaParamInfo("If null, restraint condition defaults to unreleased (fully fixed translations and rotations)")] + Restraint? end2Releases = null, + [SchemaParamInfo("If null, defaults to no offsets")] Vector? end1Offset = null, + [SchemaParamInfo("If null, defaults to no offsets")] Vector? end2Offset = null, + Plane? localAxis = null + ) + { + this.baseLine = baseLine; + this.property = property; + this.type = type; + this.name = name; + this.end1Releases = end1Releases ?? new Restraint("FFFFFF"); + this.end2Releases = end2Releases ?? new Restraint("FFFFFF"); + this.end1Offset = end1Offset ?? new Vector(0, 0, 0); + this.end2Offset = end2Offset ?? new Vector(0, 0, 0); + this.localAxis = localAxis; + } + + /// + /// SchemaBuilder constructor for structural 1D element (based on orientation node and angle) + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + [SchemaInfo( + "Element1D (from orientation node and angle)", + "Creates a Speckle structural 1D element (from orientation node and angle)", + "Structural", + "Geometry" + )] + public Element1D( + Line baseLine, + Property1D property, + ElementType1D type, + string? name = null, + [SchemaParamInfo("If null, restraint condition defaults to unreleased (fully fixed translations and rotations)")] + Restraint? end1Releases = null, + [SchemaParamInfo("If null, restraint condition defaults to unreleased (fully fixed translations and rotations)")] + Restraint? end2Releases = null, + [SchemaParamInfo("If null, defaults to no offsets")] Vector? end1Offset = null, + [SchemaParamInfo("If null, defaults to no offsets")] Vector? end2Offset = null, + Node? orientationNode = null, + double orientationAngle = 0 + ) + { + this.baseLine = baseLine; + this.property = property; + this.type = type; + this.name = name; + this.end1Releases = end1Releases ?? new Restraint("FFFFFF"); + this.end2Releases = end2Releases ?? new Restraint("FFFFFF"); + this.end1Offset = end1Offset ?? new Vector(0, 0, 0); + this.end2Offset = end2Offset ?? new Vector(0, 0, 0); + this.orientationNode = orientationNode; + this.orientationAngle = orientationAngle; + } + + public string? name { get; set; } //add unique id as base identifier, name can change too easily + public Line baseLine { get; set; } + + [DetachProperty] + public Property1D property { get; set; } + + public ElementType1D type { get; set; } + public Restraint end1Releases { get; set; } + public Restraint end2Releases { get; set; } + public Vector end1Offset { get; set; } + public Vector end2Offset { get; set; } + public Node? orientationNode { get; set; } + public double orientationAngle { get; set; } + public Plane? localAxis { get; set; } + + [DetachProperty] + public Base parent { get; set; } //parent element + + [DetachProperty] + public Node end1Node { get; set; } //startNode + + [DetachProperty] + public Node end2Node { get; set; } //endNode + + [DetachProperty] + public List topology { get; set; } + + public string units { get; set; } + + [DetachProperty] + public List displayValue { get; set; } +} diff --git a/src/Objects/Structural/Geometry/Element2D.cs b/src/Objects/Structural/Geometry/Element2D.cs new file mode 100644 index 00000000..abbf973e --- /dev/null +++ b/src/Objects/Structural/Geometry/Element2D.cs @@ -0,0 +1,52 @@ +using System.Collections.Generic; +using Objects.Geometry; +using Objects.Structural.Properties; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.Structural.Geometry; + +public class Element2D : Base, IDisplayValue> +{ + public Element2D() { } + + public Element2D(List nodes) + { + topology = nodes; + } + + [SchemaInfo( + "Element2D", + "Creates a Speckle structural 2D element (based on a list of edge ie. external, geometry defining nodes)", + "Structural", + "Geometry" + )] + public Element2D(List nodes, Property2D property, double offset = 0, double orientationAngle = 0) + { + topology = nodes; + this.property = property; + this.offset = offset; + this.orientationAngle = orientationAngle; + } + + public string name { get; set; } + + [DetachProperty] + public Property2D property { get; set; } + + public ElementType2D type { get; set; } + public double offset { get; set; } //z direction (normal) + public double orientationAngle { get; set; } + + [DetachProperty] + public Base parent { get; set; } //parent element + + [DetachProperty] + public List topology { get; set; } + public List openings { get; set; } + + public string units { get; set; } + + [DetachProperty] + public List displayValue { get; set; } +} diff --git a/src/Objects/Structural/Geometry/Element3D.cs b/src/Objects/Structural/Geometry/Element3D.cs new file mode 100644 index 00000000..9b52f57b --- /dev/null +++ b/src/Objects/Structural/Geometry/Element3D.cs @@ -0,0 +1,50 @@ +using System.Collections.Generic; +using Objects.Geometry; +using Objects.Structural.Properties; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.Structural.Geometry; + +public class Element3D : Base +{ + public Element3D() { } + + public Element3D(Mesh baseMesh) + { + this.baseMesh = baseMesh; + } + + [SchemaInfo("Element3D", "Creates a Speckle structural 3D element", "Structural", "Geometry")] + public Element3D( + Mesh baseMesh, + Property3D property, + ElementType3D type, + string? name = null, + double orientationAngle = 0 + ) + { + this.baseMesh = baseMesh; + this.property = property; + this.type = type; + this.name = name; + this.orientationAngle = orientationAngle; + } + + public string? name { get; set; } + public Mesh baseMesh { get; set; } //rhino - parent mesh? elements (including props/materias) explicitly defined in a list + + [DetachProperty] + public Property3D property { get; set; } + + public ElementType3D type { get; set; } + public double orientationAngle { get; set; } + + [DetachProperty] + public Base parent { get; set; } //parent element + + [DetachProperty] + public List topology { get; set; } + + public string units { get; set; } +} diff --git a/src/Objects/Structural/Geometry/ElementType.cs b/src/Objects/Structural/Geometry/ElementType.cs new file mode 100644 index 00000000..e8f87ca7 --- /dev/null +++ b/src/Objects/Structural/Geometry/ElementType.cs @@ -0,0 +1,35 @@ +namespace Objects.Structural.Geometry; + +public enum ElementType1D +{ + Beam, + Brace, + Bar, + Column, + Rod, + Spring, + Tie, + Strut, + Link, + Damper, + Cable, + Spacer, + Other, + Null +} + +public enum ElementType2D +{ + Quad4, + Quad8, + Triangle3, + Triangle6 +} + +public enum ElementType3D +{ + Brick8, + Wedge6, + Pyramid5, + Tetra4 +} diff --git a/src/Objects/Structural/Geometry/MemberType.cs b/src/Objects/Structural/Geometry/MemberType.cs new file mode 100644 index 00000000..d7c64ad7 --- /dev/null +++ b/src/Objects/Structural/Geometry/MemberType.cs @@ -0,0 +1,13 @@ +namespace Objects.Structural.Geometry; + +public enum MemberType +{ + Beam, + Column, + Generic1D, + Slab, + Wall, + Generic2D, + VoidCutter1D, + VoidCutter2D +} diff --git a/src/Objects/Structural/Geometry/MemberType1D.cs b/src/Objects/Structural/Geometry/MemberType1D.cs new file mode 100644 index 00000000..fba712d1 --- /dev/null +++ b/src/Objects/Structural/Geometry/MemberType1D.cs @@ -0,0 +1,17 @@ +namespace Objects.Structural.Geometry; + +public enum MemberType1D +{ + Beam, + Column, + Generic1D +} + +public enum MemberType2D +{ + Slab, + Wall, + Generic2D, + VoidCutter1D, + VoidCutter2D +} diff --git a/src/Objects/Structural/Geometry/Node.cs b/src/Objects/Structural/Geometry/Node.cs new file mode 100644 index 00000000..b1f4893f --- /dev/null +++ b/src/Objects/Structural/Geometry/Node.cs @@ -0,0 +1,71 @@ +using Objects.Geometry; +using Objects.Structural.Properties; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.Structural.Geometry; + +public class Node : Base +{ + public Node() { } + + public Node(Point basePoint) + { + this.basePoint = basePoint; + } + + [SchemaInfo( + "Node with properties", + "Creates a Speckle structural node with spring, mass and/or damper properties", + "Structural", + "Geometry" + )] + public Node( + Point basePoint, + string? name = null, + [SchemaParamInfo("If null, restraint condition defaults to free/fully released")] Restraint? restraint = null, + [SchemaParamInfo( + "If null, axis defaults to world xy (z axis defines the vertical direction, positive direction is up)" + )] + Axis? constraintAxis = null, + PropertySpring? springProperty = null, + PropertyMass? massProperty = null, + PropertyDamper? damperProperty = null + ) + { + this.basePoint = basePoint; + this.name = name; + this.restraint = restraint ?? new Restraint("RRRRRR"); + this.constraintAxis = + constraintAxis + ?? new Axis( + "Global", + AxisType.Cartesian, + new Plane(new Point(0, 0), new Vector(0, 0, 1), new Vector(1, 0, 0), new Vector(0, 1, 0)) + ); + this.springProperty = springProperty; + this.massProperty = massProperty; + this.damperProperty = damperProperty; + } + + //public int nativeId { get; set; } //equivalent to num record in GWA keyword, can be used as a unique identifier for other software + public string? name { get; set; } + public Point basePoint { get; set; } + + [DetachProperty] + public Axis? constraintAxis { get; set; } // can be detachable? ex. a user-specified axis + + [DetachProperty] + public Restraint? restraint { get; set; } // can be detachable? ex. reuse pinned support condition + + [DetachProperty] + public PropertySpring? springProperty { get; set; } + + [DetachProperty] + public PropertyMass? massProperty { get; set; } + + [DetachProperty] + public PropertyDamper? damperProperty { get; set; } + + public string units { get; set; } = Units.None; +} diff --git a/src/Objects/Structural/Geometry/Restraint.cs b/src/Objects/Structural/Geometry/Restraint.cs new file mode 100644 index 00000000..a8038324 --- /dev/null +++ b/src/Objects/Structural/Geometry/Restraint.cs @@ -0,0 +1,88 @@ +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.Structural.Geometry; + +public class Restraint : Base +{ + public Restraint() { } + + [SchemaInfo("Restraint (by code)", "Creates a Speckle restraint object", "Structural", "Geometry")] + public Restraint( + [SchemaParamInfo( + "A 6-character string to describe the restraint condition (F = Fixed, R = Released) for each degree of freedom - the first 3 characters represent translational degrees of freedom in the X, Y, and Z axes and the last 3 characters represent rotational degrees of freedom about the X, Y, and Z axes (ex. FFFRRR denotes a pinned condition, FFFFFF denotes a fixed condition)" + )] + string code + ) + { + this.code = code.ToUpper(); + } + + [SchemaInfo( + "Restraint (by code and stiffness)", + "Creates a Speckle restraint object (to describe support conditions with an explicit stiffness)", + "Structural", + "Geometry" + )] + public Restraint( + [SchemaParamInfo( + "A 6-character string to describe the restraint condition (F = Fixed, R = Released, K = Stiffness) for each degree of freedom - the first 3 characters represent translational degrees of freedom in the X, Y, and Z axes and the last 3 characters represent rotational degrees of freedom about the X, Y, and Z axes (ex. FFSRRR denotes fixed translation about the x and y axis, a spring stiffness for translation in the z axis and releases for all rotational degrees of freedom)" + )] + string code, + [SchemaParamInfo("Applies only if the restraint code character for translation in x is 'K'")] double stiffnessX = 0, + [SchemaParamInfo("Applies only if the restraint code character for translation in y is 'K'")] double stiffnessY = 0, + [SchemaParamInfo("Applies only if the restraint code character for translation in z is 'K'")] double stiffnessZ = 0, + [SchemaParamInfo("Applies only if the restraint code character for rotation about x is 'K'")] + double stiffnessXX = 0, + [SchemaParamInfo("Applies only if the restraint code character for rotation about y is 'K'")] + double stiffnessYY = 0, + [SchemaParamInfo("Applies only if the restraint code character for rotation about z is 'K'")] double stiffnessZZ = 0 + ) + { + this.code = code.ToUpper(); + this.stiffnessX = code[0] == 'K' || code[0] == 'k' ? stiffnessX : 0; + this.stiffnessY = code[1] == 'K' || code[1] == 'k' ? stiffnessY : 0; + this.stiffnessZ = code[2] == 'K' || code[2] == 'k' ? stiffnessZ : 0; + this.stiffnessXX = code[3] == 'K' || code[3] == 'k' ? stiffnessXX : 0; + this.stiffnessYY = code[4] == 'K' || code[4] == 'k' ? stiffnessYY : 0; + this.stiffnessZZ = code[5] == 'K' || code[5] == 'k' ? stiffnessZZ : 0; + } + + [SchemaInfo( + "Restraint (by enum)", + "Creates a Speckle restraint object (for pinned condition or fixed condition)", + "Structural", + "Geometry" + )] + public Restraint(RestraintType restraintType) + { + if (restraintType == RestraintType.Free) + { + code = "RRRRRR"; + } + + if (restraintType == RestraintType.Pinned) + { + code = "FFFRRR"; + } + + if (restraintType == RestraintType.Fixed) + { + code = "FFFFFF"; + } + + if (restraintType == RestraintType.Roller) + { + code = "RRFRRR"; + } + } + + public string code { get; set; } //a string to describe the restraint type for each degree of freedom - ex. FFFRRR (pin) / FFFFFF (fix) + public double stiffnessX { get; set; } + public double stiffnessY { get; set; } + public double stiffnessZ { get; set; } + public double stiffnessXX { get; set; } + public double stiffnessYY { get; set; } + public double stiffnessZZ { get; set; } + public string units { get; set; } +} diff --git a/src/Objects/Structural/Geometry/RestraintType.cs b/src/Objects/Structural/Geometry/RestraintType.cs new file mode 100644 index 00000000..cf428a6c --- /dev/null +++ b/src/Objects/Structural/Geometry/RestraintType.cs @@ -0,0 +1,24 @@ +namespace Objects.Structural.Geometry; + +public enum RestraintType +{ + Free, //Release + Pinned, + Fixed, + Roller + //Spring //flexible + //rigid, free, flexible, comp only, tens only, flex comp only, flex tens only, non lin <-- SAF + //free, fixed, fixed negative, fixed positive, spring, spring negative, spring positive, spring relative, spring relative neg, spring relative pos, non lin, friction, damped, gap <-- BHoM +} + +public enum RestraintDescription +{ + none, + all, + x, + y, + z, + xy, + xz, + yz +} diff --git a/src/Objects/Structural/Geometry/Storey.cs b/src/Objects/Structural/Geometry/Storey.cs new file mode 100644 index 00000000..62cb377e --- /dev/null +++ b/src/Objects/Structural/Geometry/Storey.cs @@ -0,0 +1,26 @@ +using Objects.BuiltElements; +using Speckle.Core.Kits; + +namespace Objects.Structural.Geometry; + +public class Storey : Level // or inherit from Base? +{ + public Storey() { } + + /// + /// A storey in the structural model + /// + /// The name of the storey + /// The elevation of the storey (along the global z-axis, ie. storey exists in the global XY plane) + [SchemaInfo( + "Storey", + "Creates a Speckle structural storey (to describe floor levels/storeys in the structural model)", + "Structural", + "Geometry" + )] + public Storey(string name, double elevation) + { + this.name = name; + this.elevation = elevation; + } +} diff --git a/src/Objects/Structural/Loading/Load.cs b/src/Objects/Structural/Loading/Load.cs new file mode 100644 index 00000000..9da845cc --- /dev/null +++ b/src/Objects/Structural/Loading/Load.cs @@ -0,0 +1,26 @@ +using Speckle.Core.Models; + +namespace Objects.Structural.Loading; + +public class Load : Base +{ + public Load() { } + + /// + /// A generalised structural load, described by a name and load case + /// + /// Name of the load + /// Load case specification for the load + public Load(string? name, LoadCase loadCase) + { + this.name = name; + this.loadCase = loadCase; + } + + public string? name { get; set; } + + [DetachProperty] + public LoadCase loadCase { get; set; } + + public string units { get; set; } +} diff --git a/src/Objects/Structural/Loading/LoadBeam.cs b/src/Objects/Structural/Loading/LoadBeam.cs new file mode 100644 index 00000000..c47462e7 --- /dev/null +++ b/src/Objects/Structural/Loading/LoadBeam.cs @@ -0,0 +1,114 @@ +using System.Collections.Generic; +using Objects.Structural.Geometry; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.Structural.Loading; + +public class LoadBeam : Load +{ + public LoadBeam() { } + + /// + /// A beam load (for 1D elements) + /// + /// The load case in which the load applies + /// A list of 1D elements to apply the load to + /// The type of loading applied + /// The direction of the load, with respect to the specified axis + /// The axis in which the direction of the load is defined + /// The magnitude of the load, either a force or moment + /// The locations of the load + /// Whether the load is projected (ie. whether the distributed load is specified as the intensity applied to the projection of the element on the surface normal to the direction of the load, like snow in an inclined roof) + /// A name or description to identify the load + [SchemaInfo("Beam Load", "Creates a Speckle structural beam (1D elem/member) load", "Structural", "Loading")] + public LoadBeam( + LoadCase loadCase, + List elements, + BeamLoadType loadType, + LoadDirection direction, + LoadAxisType loadAxis = LoadAxisType.Global, + [SchemaParamInfo( + "A list that represents load magnitude (number of values varies based on load type - Point: 1, Uniform: 1, Linear: 2, Patch: 2, Tri-linear:2)" + )] + List? values = null, + [SchemaParamInfo( + "A list that represents load locations (number of values varies based on load type - Point: 1, Uniform: null, Linear: null, Patch: 2, Tri-linear: 2)" + )] + List? positions = null, + bool isProjected = false, + string? name = null + ) + { + this.loadCase = loadCase; + this.elements = elements; + this.loadType = loadType; + this.direction = direction; + loadAxisType = loadAxis; + this.values = values; + this.positions = positions; + this.isProjected = isProjected; + this.name = name; + } + + /// + /// A beam load (for 1D elements) with a user-defined axis + /// + /// The load case in which the load applies + /// A list of 1D elements to apply the load to + /// The type of loading applied + /// The direction of the load, with respect to the specified axis + /// The axis in which the direction of the load is defined (can be a user-defined axis) + /// The magnitude of the load, either a force or moment + /// The locations of the load + /// Whether the load is projected (ie. whether the distributed load is specified as the intensity applied to the projection of the element on the surface normal to the direction of the load, like snow in an inclined roof) + /// A name or description to identify the load + [SchemaInfo( + "Beam Load (user-defined axis)", + "Creates a Speckle structural beam (1D elem/member) load (specified using a user-defined axis)", + "Structural", + "Loading" + )] + public LoadBeam( + LoadCase loadCase, + List elements, + BeamLoadType loadType, + LoadDirection direction, + Axis loadAxis, + [SchemaParamInfo( + "A list that represents load magnitude (number of values varies based on load type - Point: 1, Uniform: 1, Linear: 2, Patch: 2, Tri-linear:2)" + )] + List? values = null, + [SchemaParamInfo( + "A list that represents load locations (number of values varies based on load type - Point: 1, Uniform: null, Linear: null, Patch: 2, Tri-linear: 2)" + )] + List? positions = null, + bool isProjected = false, + string? name = null + ) + { + this.loadCase = loadCase; + this.elements = elements; + this.loadType = loadType; + this.direction = direction; + this.loadAxis = loadAxis; + this.values = values; + this.positions = positions; + this.isProjected = isProjected; + this.name = name; + } + + [DetachProperty, Chunkable(5000)] + public List elements { get; set; } + + public BeamLoadType loadType { get; set; } + public LoadDirection direction { get; set; } + + [DetachProperty] + public Axis loadAxis { get; set; } + + public LoadAxisType loadAxisType { get; set; } + public bool isProjected { get; set; } + public List? values { get; set; } + public List? positions { get; set; } +} diff --git a/src/Objects/Structural/Loading/LoadCase.cs b/src/Objects/Structural/Loading/LoadCase.cs new file mode 100644 index 00000000..8aa17e43 --- /dev/null +++ b/src/Objects/Structural/Loading/LoadCase.cs @@ -0,0 +1,39 @@ +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.Structural.Loading; + +public class LoadCase : Base // or LoadPattern? (per CSI) +{ + public LoadCase() { } + + /// + /// A structural load case, a load case gives a way of grouping load effects together + /// + /// The name of the load case (the names of individual loads that are associated with the load case are defined elsewhere, in the loads themselves) + /// The type of the load case + /// A way of grouping load cases with the similar characteristics (ex. the source/mass source/origin of the loads) + /// The type of action of the load + /// A description of the load case + [SchemaInfo("Load Case", "Creates a Speckle structural load case", "Structural", "Loading")] + public LoadCase( + string name, + LoadType loadType, + string? group = null, + ActionType actionType = ActionType.None, + string? description = null + ) + { + this.name = name; + this.loadType = loadType; + this.group = group; + this.actionType = actionType; + this.description = description ?? ""; + } + + public string name { get; set; } //load case title, ex. "Dead load" + public LoadType loadType { get; set; } //ex. Dead load + public string? group { get; set; } //or load group, "A" + public ActionType actionType { get; set; } //ex. Permanent + public string description { get; set; } = ""; //category as alternative, ex. Offices – Cat.B, assembly area +} diff --git a/src/Objects/Structural/Loading/LoadCombination.cs b/src/Objects/Structural/Loading/LoadCombination.cs new file mode 100644 index 00000000..cd3e8108 --- /dev/null +++ b/src/Objects/Structural/Loading/LoadCombination.cs @@ -0,0 +1,45 @@ +using System; +using System.Collections.Generic; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.Structural.Loading; + +public class LoadCombination : Base //combination case +{ + public LoadCombination() { } + + /// + /// + /// + /// + /// + /// + /// + [SchemaInfo("Load Combination", "Creates a Speckle load combination", "Structural", "Loading")] + public LoadCombination( + string name, + [SchemaParamInfo("A list of load cases")] List loadCases, + [SchemaParamInfo("A list of load factors (to be mapped to provided load cases)")] List loadFactors, + CombinationType combinationType + ) + { + if (loadCases.Count != loadFactors.Count) + { + throw new ArgumentException("Number of load cases provided does not match number of load factors provided"); + } + + this.name = name; + this.loadCases = loadCases; + this.loadFactors = loadFactors; + this.combinationType = combinationType; + } + + public string name { get; set; } + + [DetachProperty] + public List loadCases { get; set; } + + public List loadFactors { get; set; } + public CombinationType combinationType { get; set; } +} diff --git a/src/Objects/Structural/Loading/LoadFace.cs b/src/Objects/Structural/Loading/LoadFace.cs new file mode 100644 index 00000000..85c5f664 --- /dev/null +++ b/src/Objects/Structural/Loading/LoadFace.cs @@ -0,0 +1,114 @@ +using System.Collections.Generic; +using Objects.Structural.Geometry; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.Structural.Loading; + +public class LoadFace : Load +{ + public LoadFace() { } + + /// + /// A face load (for 2D elements) + /// + /// The load case in which the load applies + /// A list of 2D elements to apply the load to + /// The type of loading applied + /// The direction of the load, with respect to the specified axis + /// The axis in which the direction of the load is defined + /// The magnitude of the load, either a pressure or a force at the specified point + /// The locations of the load + /// Whether the load is projected (ie. whether the distributed load is specified as the intensity applied to the projection of the element on the surface normal to the direction of the load, like snow in an inclined roof) + /// A name or description to identify the load + [SchemaInfo("Face Load", "Creates a Speckle structural face (2D elem/member) load", "Structural", "Loading")] + public LoadFace( + LoadCase loadCase, + List elements, + FaceLoadType loadType, + LoadDirection2D direction, + LoadAxisType loadAxis = LoadAxisType.Global, + [SchemaParamInfo( + "A list that represents load magnitude (number of values varies based on load type - Uniform: 1, Variable: 4 (corner nodes), Point: 1)" + )] + List? values = null, + [SchemaParamInfo( + "A list that represents load locations (number of values varies based on load type - Uniform: null, Variable: null, Point: 2)" + )] + List? positions = null, + bool isProjected = false, + string? name = null + ) + { + this.loadCase = loadCase; + this.elements = elements; + this.loadType = loadType; + this.direction = direction; + loadAxisType = loadAxis; + this.values = values; + this.positions = positions; + this.isProjected = isProjected; + this.name = name; + } + + /// + /// A face load (for 2D elements) with a user-defined axis + /// + /// The load case in which the load applies + /// A list of 2D elements to apply the load to + /// The type of loading applied + /// The direction of the load, with respect to the specified axis + /// The axis in which the direction of the load is defined (can be a user-defined axis) + /// The magnitude of the load, either a pressure or a force at the specified point + /// The locations of the load + /// Whether the load is projected (ie. whether the distributed load is specified as the intensity applied to the projection of the element on the surface normal to the direction of the load, like snow in an inclined roof) + /// A name or description to identify the load + [SchemaInfo( + "Face Load (user-defined axis)", + "Creates a Speckle structural face (2D elem/member) load (specified using a user-defined axis)", + "Structural", + "Loading" + )] + public LoadFace( + LoadCase loadCase, + List elements, + FaceLoadType loadType, + LoadDirection2D direction, + Axis loadAxis, + [SchemaParamInfo( + "A list that represents load magnitude (number of values varies based on load type - Uniform: 1, Variable: 4 (corner nodes), Point: 1)" + )] + List? values = null, + [SchemaParamInfo( + "A list that represents load locations (number of values varies based on load type - Uniform: null, Variable: null, Point: 2)" + )] + List? positions = null, + bool isProjected = false, + string? name = null + ) + { + this.loadCase = loadCase; + this.elements = elements; + this.loadType = loadType; + this.direction = direction; + this.loadAxis = loadAxis; + this.values = values; + this.positions = positions; + this.isProjected = isProjected; + this.name = name; + } + + [DetachProperty, Chunkable(5000)] + public List elements { get; set; } + + public FaceLoadType loadType { get; set; } + public LoadDirection2D direction { get; set; } + + [DetachProperty] + public Axis loadAxis { get; set; } + + public LoadAxisType loadAxisType { get; set; } + public bool isProjected { get; set; } + public List? values { get; set; } + public List? positions { get; set; } +} diff --git a/src/Objects/Structural/Loading/LoadGravity.cs b/src/Objects/Structural/Loading/LoadGravity.cs new file mode 100644 index 00000000..0fd0c63d --- /dev/null +++ b/src/Objects/Structural/Loading/LoadGravity.cs @@ -0,0 +1,88 @@ +using System.Collections.Generic; +using Objects.Geometry; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.Structural.Loading; + +public class LoadGravity : Load +{ + public LoadGravity() { } + + /// + /// A gravity load (applied to all elements) + /// + /// A name or description to identify the load + /// The load case in which the load applies + /// A list of factors that apply on the “magnitude" of gravity (in terms of g, accleration of gravity) in each of the global axis (x, y and z) directions. Ex. For a model with global z-axis vertically upwards, the gravity factors of (0, 0, −1) represent a normal vertical gravity load on the structure + [SchemaInfo( + "Gravity Load (all elements)", + "Creates a Speckle structural gravity load (applied to all nodes and elements)", + "Structural", + "Loading" + )] + public LoadGravity(LoadCase loadCase, Vector? gravityFactors = null, string? name = null) + { + this.loadCase = loadCase; + this.gravityFactors = gravityFactors ?? new Vector(0, 0, -1); + this.name = name; + } + + /// + /// A gravity load (applied to the specified elements) + /// + /// A name or description to identify the load + /// The load case in which the load applies + /// A list of elements to apply the load to + /// A list of factors that apply on the “magnitude" of gravity (in terms of g, accleration of gravity) in each of the global axis (x, y and z) directions. Ex. For a model with global z-axis vertically upwards, the gravity factors of (0, 0, −1) represent a normal vertical gravity load on the structure + [SchemaInfo( + "Gravity Load (specified elements)", + "Creates a Speckle structural gravity load (applied to specified elements)", + "Structural", + "Loading" + )] + public LoadGravity(LoadCase loadCase, List elements, Vector? gravityFactors = null, string? name = null) + { + this.elements = elements; + this.loadCase = loadCase; + this.gravityFactors = gravityFactors ?? new Vector(0, 0, -1); + this.name = name; + } + + /// + /// A gravity load (applied to the specified elements and nodes) + /// + /// A name or description to identify the load + /// The load case in which the load applies + /// A list of elements to apply the load to + /// A list of nodes to apply the load to + /// A list of factors that apply on the “magnitude" of gravity (in terms of g, accleration of gravity) in each of the global axis (x, y and z) directions. Ex. For a model with global z-axis vertically upwards, the gravity factors of (0, 0, −1) represent a normal vertical gravity load on the structure + [SchemaInfo( + "Gravity Load (specified elements and nodes)", + "Creates a Speckle structural gravity load (applied to specified nodes and elements)", + "Structural", + "Loading" + )] + public LoadGravity( + LoadCase loadCase, + List elements, + List nodes, + Vector? gravityFactors = null, + string? name = null + ) + { + this.elements = elements; + this.nodes = nodes; + this.loadCase = loadCase; + this.gravityFactors = gravityFactors ?? new Vector(0, 0, -1); + this.name = name; + } + + [DetachProperty, Chunkable(5000)] + public List elements { get; set; } + + [DetachProperty, Chunkable(5000)] + public List nodes { get; set; } + + public Vector gravityFactors { get; set; } // a normal vertical gravity load is Z = -1 +} diff --git a/src/Objects/Structural/Loading/LoadNode.cs b/src/Objects/Structural/Loading/LoadNode.cs new file mode 100644 index 00000000..d0bb9bb9 --- /dev/null +++ b/src/Objects/Structural/Loading/LoadNode.cs @@ -0,0 +1,70 @@ +using System.Collections.Generic; +using Objects.Structural.Geometry; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.Structural.Loading; + +public class LoadNode : Load +{ + public LoadNode() { } + + /// + /// A node load (applied in the global axis) + /// + /// The load case in which the load applies + /// >A list of nodes to apply the load to + /// The direction of the loading, relative to the specified axis + /// The magnitude of the load, either a force or moment + /// A name or description to identify the load/// + [SchemaInfo("Node Load", "Creates a Speckle node load", "Structural", "Loading")] + public LoadNode(LoadCase loadCase, List nodes, LoadDirection direction, double value, string? name = null) + { + this.name = name; + this.loadCase = loadCase; + this.nodes = nodes; + this.direction = direction; + this.value = value; + } + + /// + /// A node load (based on a user-defined axis) + /// + /// The load case in which the load applies + /// >A list of nodes to apply the load to + /// The axis in which the load is applied + /// The direction of the loading, relative to the specified axis + /// The magnitude of the load, either a force or moment + /// A name or description to identify the load/// + [SchemaInfo( + "Node Load (user-defined axis)", + "Creates a Speckle node load (specifed using a user-defined axis)", + "Structural", + "Loading" + )] + public LoadNode( + LoadCase loadCase, + List nodes, + Axis loadAxis, + LoadDirection direction, + double value, + string? name = null + ) + { + this.name = name; + this.loadCase = loadCase; + this.nodes = nodes; + this.loadAxis = loadAxis; + this.direction = direction; + this.value = value; + } + + [DetachProperty, Chunkable(5000)] + public List nodes { get; set; } + + [DetachProperty] + public Axis loadAxis { get; set; } + + public LoadDirection direction { get; set; } + public double value { get; set; } //a force or a moment, displacement (translation or rotation) and settlement to be covered in other classes +} diff --git a/src/Objects/Structural/Loading/Loads.cs b/src/Objects/Structural/Loading/Loads.cs new file mode 100644 index 00000000..c376cd47 --- /dev/null +++ b/src/Objects/Structural/Loading/Loads.cs @@ -0,0 +1,74 @@ +namespace Objects.Structural.Loading; + +public enum LoadType +{ + None, + Dead, + SuperDead, + Soil, + Live, + LiveRoof, + ReducibleLive, + Wind, + Snow, + Rain, + Thermal, + Notional, + Prestress, + Equivalent, + Accidental, + SeismicRSA, + SeismicAccTorsion, + SeismicStatic, + Other +} + +public enum ActionType +{ + None, + Permanent, + Variable, + Accidental +} + +public enum BeamLoadType +{ + Point, + Uniform, + Linear, + Patch, + TriLinear +} + +public enum FaceLoadType +{ + Constant, + Variable, + Point +} + +public enum LoadDirection2D +{ + X, + Y, + Z +} + +public enum LoadDirection +{ + X, + Y, + Z, + XX, + YY, + ZZ +} + +public enum CombinationType +{ + LinearAdd, + Envelope, + AbsoluteAdd, + SRSS, + RangeAdd // what's this? +} diff --git a/src/Objects/Structural/MaterialType.cs b/src/Objects/Structural/MaterialType.cs new file mode 100644 index 00000000..74fe1a0f --- /dev/null +++ b/src/Objects/Structural/MaterialType.cs @@ -0,0 +1,17 @@ +namespace Objects.Structural; + +public enum MaterialType +{ + Concrete, + Steel, + Timber, + Aluminium, + Masonry, + FRP, + Glass, + Fabric, + Rebar, + Tendon, + ColdFormed, + Other +} diff --git a/src/Objects/Structural/Materials/Concrete.cs b/src/Objects/Structural/Materials/Concrete.cs new file mode 100644 index 00000000..8266b780 --- /dev/null +++ b/src/Objects/Structural/Materials/Concrete.cs @@ -0,0 +1,62 @@ +using Speckle.Core.Kits; + +namespace Objects.Structural.Materials; + +public class Concrete : StructuralMaterial +{ + public Concrete() { } + + [SchemaInfo( + "Concrete", + "Creates a Speckle structural material for concrete (to be used in structural analysis models)", + "Structural", + "Materials" + )] + public Concrete( + string name, + string? grade = null, + string? designCode = null, + string? codeYear = null, + double elasticModulus = 0, + double compressiveStrength = 0, + double tensileStrength = 0, + double flexuralStrength = 0, + double maxCompressiveStrain = 0, + double maxTensileStrain = 0, + double maxAggregateSize = 0, + bool lightweight = false, + double poissonsRatio = 0, + double shearModulus = 0, + double density = 0, + double thermalExpansivity = 0, + double dampingRatio = 0 + ) + { + this.name = name; + this.grade = grade; + materialType = MaterialType.Concrete; + this.designCode = designCode; + this.codeYear = codeYear; + this.elasticModulus = elasticModulus; + this.compressiveStrength = compressiveStrength; + this.tensileStrength = tensileStrength; + this.flexuralStrength = flexuralStrength; + this.maxCompressiveStrain = maxCompressiveStrain; + this.maxTensileStrain = maxTensileStrain; + this.maxAggregateSize = maxAggregateSize; + this.lightweight = lightweight; + this.poissonsRatio = poissonsRatio; + this.shearModulus = shearModulus; + this.density = density; + this.thermalExpansivity = thermalExpansivity; + this.dampingRatio = dampingRatio; + } + + public double compressiveStrength { get; set; } //forgo using "strength" property in Material class + public double tensileStrength { get; set; } //design calc impacts + public double flexuralStrength { get; set; } //design calc impacts + public double maxCompressiveStrain { get; set; } //failure strain + public double maxTensileStrain { get; set; } + public double maxAggregateSize { get; set; } + public bool lightweight { get; set; } //whether or not it's a lightweight concrete +} diff --git a/src/Objects/Structural/Materials/Steel.cs b/src/Objects/Structural/Materials/Steel.cs new file mode 100644 index 00000000..4523da1f --- /dev/null +++ b/src/Objects/Structural/Materials/Steel.cs @@ -0,0 +1,50 @@ +using Speckle.Core.Kits; + +namespace Objects.Structural.Materials; + +public class Steel : StructuralMaterial +{ + public Steel() { } + + [SchemaInfo( + "Steel", + "Creates a Speckle structural material for steel (to be used in structural analysis models)", + "Structural", + "Materials" + )] + public Steel( + string name, + string? grade = null, + string? designCode = null, + string? codeYear = null, + double elasticModulus = 0, + double yieldStrength = 0, + double ultimateStrength = 0, + double maxStrain = 0, + double poissonsRatio = 0, + double shearModulus = 0, + double density = 0, + double alpha = 0, + double dampingRatio = 0 + ) + { + this.name = name; + this.grade = grade; + materialType = MaterialType.Steel; + this.designCode = designCode; + this.codeYear = codeYear; + this.elasticModulus = elasticModulus; + this.yieldStrength = yieldStrength; + this.ultimateStrength = ultimateStrength; + this.maxStrain = maxStrain; + this.poissonsRatio = poissonsRatio; + this.shearModulus = shearModulus; + this.density = density; + this.dampingRatio = dampingRatio; + } + + public double yieldStrength { get; set; } //or yieldStress + public double ultimateStrength { get; set; } //ultimateStress + public double maxStrain { get; set; } //failureStrain + public double strainHardeningModulus { get; set; } +} diff --git a/src/Objects/Structural/Materials/StructuralMaterial.cs b/src/Objects/Structural/Materials/StructuralMaterial.cs new file mode 100644 index 00000000..78eb813c --- /dev/null +++ b/src/Objects/Structural/Materials/StructuralMaterial.cs @@ -0,0 +1,81 @@ +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.Structural.Materials; + +public class StructuralMaterial : Base +{ + // add carbon/environmental parameters? + + public StructuralMaterial() { } + + [SchemaInfo("Structural Material", "Creates a Speckle structural material", "Structural", "Materials")] + public StructuralMaterial( + string name, + MaterialType type, + string? grade = null, + string? designCode = null, + string? codeYear = null + ) + { + this.name = name; + materialType = type; + this.grade = grade; + this.designCode = designCode; + this.codeYear = codeYear; + } + + [SchemaInfo( + "Structural Material (with properties)", + "Creates a Speckle structural material with (isotropic) properties", + "Structural", + "Materials" + )] + public StructuralMaterial( + string name, + MaterialType type, + string? grade = null, + string? designCode = null, + string? codeYear = null, + double strength = 0, + double elasticModulus = 0, + double poissonsRatio = 0, + double shearModulus = 0, + double rho = 0, + double alpha = 0, + double dampingRatio = 0, + double materialSafetyFactor = 0, + double cost = 0 + ) + { + this.name = name; + this.grade = grade; + materialType = type; + this.designCode = designCode; + this.codeYear = codeYear; + this.strength = strength; + this.elasticModulus = elasticModulus; + this.poissonsRatio = poissonsRatio; + this.shearModulus = shearModulus; + density = rho; + thermalExpansivity = alpha; + this.dampingRatio = dampingRatio; + this.materialSafetyFactor = materialSafetyFactor; + this.cost = cost; + } + + public string name { get; set; } + public string? grade { get; set; } //ex. 350W(G40.21 Plate), could be set in name too + public MaterialType materialType { get; set; } + public string? designCode { get; set; } + public string? codeYear { get; set; } + public double strength { get; set; } + public double elasticModulus { get; set; } // E + public double poissonsRatio { get; set; } // nu + public double shearModulus { get; set; } // G + public double density { get; set; } // rho + public double thermalExpansivity { get; set; } // alpha, thermal coefficient of expansion + public double dampingRatio { get; set; } // zeta, material damping fraction + public double cost { get; set; } // material rate (ie. $/weight) + public double materialSafetyFactor { get; set; } //resistance factor +} diff --git a/src/Objects/Structural/Materials/Timber.cs b/src/Objects/Structural/Materials/Timber.cs new file mode 100644 index 00000000..d608ce03 --- /dev/null +++ b/src/Objects/Structural/Materials/Timber.cs @@ -0,0 +1,47 @@ +using Speckle.Core.Kits; + +namespace Objects.Structural.Materials; + +public class Timber : StructuralMaterial +{ + public Timber() { } + + [SchemaInfo( + "Timber", + "Creates a Speckle structural material for timber (to be used in structural analysis models)", + "Structural", + "Materials" + )] + public Timber( + string name, + string? species = null, + string? grade = null, + string? designCode = null, + string? codeYear = null, + double strength = 0, + double elasticModulus = 0, + double poissonsRatio = 0, + double shearModulus = 0, + double density = 0, + double thermalExpansivity = 0, + double dampingRatio = 0 + ) + { + this.name = name; + this.grade = grade; + this.species = species; + materialType = MaterialType.Timber; + this.designCode = designCode; + this.codeYear = codeYear; + this.strength = strength; + this.elasticModulus = elasticModulus; + this.poissonsRatio = poissonsRatio; + this.shearModulus = shearModulus; + this.density = density; + this.thermalExpansivity = thermalExpansivity; + this.dampingRatio = dampingRatio; + } + + //missing timber-specific properties? parallel to grain, perpendicular to grain + public string? species { get; set; } +} diff --git a/src/Objects/Structural/Properties/Profiles/SectionProfile.cs b/src/Objects/Structural/Properties/Profiles/SectionProfile.cs new file mode 100644 index 00000000..cf933679 --- /dev/null +++ b/src/Objects/Structural/Properties/Profiles/SectionProfile.cs @@ -0,0 +1,251 @@ +using System.Collections.Generic; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.Structural.Properties.Profiles; + +public class SectionProfile : Base //section profile description +{ + public SectionProfile() { } + + public SectionProfile( + string name, + ShapeType shapeType, + double area, + double Iyy, + double Izz, + double J, + double Ky, + double Kz, + double weight + ) + { + this.name = name; + this.shapeType = shapeType; + this.area = area; + this.Iyy = Izz; + this.Izz = Izz; + this.J = J; + this.Ky = Ky; + this.Kz = Kz; + this.weight = weight; + } + + public string name { get; set; } + public virtual ShapeType shapeType { get; set; } = ShapeType.Undefined; + + public string shapeName => shapeType.ToString(); + + public double area { get; set; } + + //Moment of inertia about the Major axis + public double Iyy { get; set; } + public double Izz { get; set; } + public double J { get; set; } + public double Ky { get; set; } + public double Kz { get; set; } + public double weight { get; set; } //section weight, ex. kg/m weight is defined in materials though more ? rather than section profile. + public string units { get; set; } +} + +public class Rectangular : SectionProfile +{ + public Rectangular() { } + + [SchemaInfo( + "Rectangular", + "Creates a Speckle structural rectangular section profile", + "Structural", + "Section Profile" + )] + public Rectangular(string name, double depth, double width, double webThickness = 0, double flangeThickness = 0) + { + this.name = name; + this.depth = depth; + this.width = width; + this.webThickness = webThickness; + this.flangeThickness = flangeThickness; + } + + public double depth { get; set; } + public double width { get; set; } + public double webThickness { get; set; } // tw + public double flangeThickness { get; set; } // tf + public override ShapeType shapeType { get; set; } = ShapeType.Rectangular; +} + +public class Circular : SectionProfile +{ + public Circular() { } + + [SchemaInfo("Circular", "Creates a Speckle structural circular section profile", "Structural", "Section Profile")] + public Circular(string name, double radius, double wallThickness = 0) + { + this.name = name; + this.radius = radius; + this.wallThickness = wallThickness; + } + + public double radius { get; set; } + public double wallThickness { get; set; } + public override ShapeType shapeType { get; set; } = ShapeType.Circular; +} + +public class ISection : SectionProfile +{ + public ISection() { } + + [SchemaInfo("ISection", "Creates a Speckle structural I section profile", "Structural", "Section Profile")] + public ISection(string name, double depth, double width, double webThickness, double flangeThickness) + { + this.name = name; + this.depth = depth; + this.width = width; + this.webThickness = webThickness; + this.flangeThickness = flangeThickness; + } + + public double depth { get; set; } + public double width { get; set; } + public double webThickness { get; set; } + public double flangeThickness { get; set; } + public override ShapeType shapeType { get; set; } = ShapeType.I; +} + +public class Tee : SectionProfile +{ + public Tee() { } + + [SchemaInfo("Tee", "Creates a Speckle structural Tee section profile", "Structural", "Section Profile")] + public Tee(string name, double depth, double width, double webThickness, double flangeThickness) + { + this.name = name; + this.depth = depth; + this.width = width; + this.webThickness = webThickness; + this.flangeThickness = flangeThickness; + } + + public double depth { get; set; } + public double width { get; set; } + public double webThickness { get; set; } + public double flangeThickness { get; set; } + public override ShapeType shapeType { get; set; } = ShapeType.Tee; +} + +public class Angle : SectionProfile +{ + public Angle() { } + + [SchemaInfo("Angle", "Creates a Speckle structural angle section profile", "Structural", "Section Profile")] + public Angle(string name, double depth, double width, double webThickness, double flangeThickness) + { + this.name = name; + this.depth = depth; + this.width = width; + this.webThickness = webThickness; + this.flangeThickness = flangeThickness; + } + + public double depth { get; set; } + public double width { get; set; } + public double webThickness { get; set; } + public double flangeThickness { get; set; } + public override ShapeType shapeType { get; set; } = ShapeType.Angle; +} + +public class Channel : SectionProfile +{ + public Channel() { } + + [SchemaInfo("Channel", "Creates a Speckle structural channel section profile", "Structural", "Section Profile")] + public Channel(string name, double depth, double width, double webThickness, double flangeThickness) + { + this.name = name; + this.depth = depth; + this.width = width; + this.webThickness = webThickness; + this.flangeThickness = flangeThickness; + } + + public double depth { get; set; } + public double width { get; set; } + public double webThickness { get; set; } + public double flangeThickness { get; set; } + public override ShapeType shapeType { get; set; } = ShapeType.Channel; +} + +public class Perimeter : SectionProfile +{ + public Perimeter() { } + + [SchemaInfo( + "Perimeter", + "Creates a Speckle structural section profile defined by a perimeter curve and, if applicable, a list of void curves", + "Structural", + "Section Profile" + )] + public Perimeter(string name, ICurve outline, List? voids = null) + { + this.name = name; + this.outline = outline; + this.voids = voids ?? new(); + } + + public ICurve outline { get; set; } + public List voids { get; set; } = new(); +} + +public class Catalogue : SectionProfile +{ + public Catalogue() { } + + [SchemaInfo( + "Catalogue (by description)", + "Creates a Speckle structural section profile based on a catalogue section description", + "Structural", + "Section Profile" + )] + public Catalogue(string description) + { + this.description = description; + } + + [SchemaInfo("Catalogue", "Creates a Speckle structural section profile", "Structural", "Section Profile")] + public Catalogue(string name, string catalogueName, string sectionType, string sectionName) + { + this.name = name; + this.catalogueName = catalogueName; + this.sectionType = sectionType; + this.sectionName = sectionName; + } + + public string description { get; set; } // a description string for a catalogue section, per a to be defined convention for industry-typical, commonly manufactured sections - SAF Formcodes, Oasys profiles? + public string catalogueName { get; set; } // ex. AISC, could be enum value + public string sectionType { get; set; } // ex. W shapes, could be enum value + public string sectionName { get; set; } // ex. W44x335, could be enum value +} + +public class Explicit : SectionProfile +{ + public Explicit() { } + + [SchemaInfo( + "Explicit", + "Creates a Speckle structural section profile based on explicitly defining geometric properties", + "Structural", + "Section Profile" + )] + public Explicit(string name, double area, double Iyy, double Izz, double J, double Ky, double Kz) + { + this.name = name; + this.area = area; + this.Iyy = Iyy; + this.Izz = Izz; + this.J = J; + this.Ky = Ky; + this.Kz = Kz; + } + + public override ShapeType shapeType { get; set; } = ShapeType.Explicit; +} diff --git a/src/Objects/Structural/Properties/Property.cs b/src/Objects/Structural/Properties/Property.cs new file mode 100644 index 00000000..1a82a512 --- /dev/null +++ b/src/Objects/Structural/Properties/Property.cs @@ -0,0 +1,17 @@ +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.Structural.Properties; + +public class Property : Base +{ + public Property() { } + + [SchemaInfo("Property", "Creates a Speckle structural property", "Structural", "Properties")] + public Property(string name) + { + this.name = name; + } + + public string name { get; set; } +} diff --git a/src/Objects/Structural/Properties/Property1D.cs b/src/Objects/Structural/Properties/Property1D.cs new file mode 100644 index 00000000..e289fce1 --- /dev/null +++ b/src/Objects/Structural/Properties/Property1D.cs @@ -0,0 +1,38 @@ +using Objects.Structural.Geometry; +using Objects.Structural.Materials; +using Objects.Structural.Properties.Profiles; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.Structural.Properties; + +public class Property1D : Property //SectionProperty as alt class name +{ + public Property1D() { } + + [SchemaInfo("Property1D (by name)", "Creates a Speckle structural 1D element property", "Structural", "Properties")] + public Property1D(string name) + { + this.name = name; + } + + [SchemaInfo("Property1D", "Creates a Speckle structural 1D element property", "Structural", "Properties")] + public Property1D(string name, StructuralMaterial? material, SectionProfile profile) + { + this.name = name; + this.material = material; + this.profile = profile; + } + + public MemberType memberType { get; set; } + + [DetachProperty] + public StructuralMaterial? material { get; set; } + + [DetachProperty] + public SectionProfile profile { get; set; } //section description + + public BaseReferencePoint referencePoint { get; set; } + public double offsetY { get; set; } //offset from reference point + public double offsetZ { get; set; } //offset from reference point +} diff --git a/src/Objects/Structural/Properties/Property2D.cs b/src/Objects/Structural/Properties/Property2D.cs new file mode 100644 index 00000000..157f692a --- /dev/null +++ b/src/Objects/Structural/Properties/Property2D.cs @@ -0,0 +1,42 @@ +using Objects.Structural.Geometry; +using Objects.Structural.Materials; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.Structural.Properties; + +public class Property2D : Property +{ + public Property2D() { } + + [SchemaInfo("Property2D (by name)", "Creates a Speckle structural 2D element property", "Structural", "Properties")] + public Property2D(string name) + { + this.name = name; + } + + [SchemaInfo("Property2D", "Creates a Speckle structural 2D element property", "Structural", "Properties")] + public Property2D(string name, StructuralMaterial? material, PropertyType2D type, double thickness) + { + this.name = name; + this.material = material; + this.type = type; + this.thickness = thickness; + } + + public PropertyType2D type { get; set; } + public double thickness { get; set; } //also thickness type? ex. waffle vs constant + + [DetachProperty] + public StructuralMaterial? material { get; set; } + + [DetachProperty] + public Axis orientationAxis { get; set; } + + public ReferenceSurface refSurface { get; set; } //system plane + public double zOffset { get; set; } //relative to reference surface + public double modifierInPlane { get; set; } + public double modifierBending { get; set; } + public double modifierShear { get; set; } + public double modifierVolume { get; set; } +} diff --git a/src/Objects/Structural/Properties/Property3D.cs b/src/Objects/Structural/Properties/Property3D.cs new file mode 100644 index 00000000..638d5fa0 --- /dev/null +++ b/src/Objects/Structural/Properties/Property3D.cs @@ -0,0 +1,33 @@ +using Objects.Structural.Geometry; +using Objects.Structural.Materials; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.Structural.Properties; + +public class Property3D : Property +{ + public Property3D() { } + + [SchemaInfo("Property3D (by name)", "Creates a Speckle structural 3D element property", "Structural", "Properties")] + public Property3D(string name) + { + this.name = name; + } + + [SchemaInfo("Property3D", "Creates a Speckle structural 3D element property", "Structural", "Properties")] + public Property3D(string name, PropertyType3D type, StructuralMaterial material) + { + this.name = name; + this.type = type; + this.material = material; + } + + public PropertyType3D type { get; set; } + + [DetachProperty] + public StructuralMaterial material { get; set; } + + [DetachProperty] + public Axis orientationAxis { get; set; } +} diff --git a/src/Objects/Structural/Properties/PropertyDamper.cs b/src/Objects/Structural/Properties/PropertyDamper.cs new file mode 100644 index 00000000..5232b279 --- /dev/null +++ b/src/Objects/Structural/Properties/PropertyDamper.cs @@ -0,0 +1,49 @@ +using Speckle.Core.Kits; + +namespace Objects.Structural.Properties; + +public class PropertyDamper : Property +{ + public PropertyDamper() { } + + [SchemaInfo("PropertyDamper", "Creates a Speckle structural damper property", "Structural", "Properties")] + public PropertyDamper(string name) + { + this.name = name; + } + + [SchemaInfo( + "PropertyDamper (general)", + "Creates a Speckle structural damper property (for 6 degrees of freedom)", + "Structural", + "Properties" + )] + public PropertyDamper( + string name, + PropertyTypeDamper damperType, + double dampingX = 0, + double dampingY = 0, + double dampingZ = 0, + double dampingXX = 0, + double dampingYY = 0, + double dampingZZ = 0 + ) + { + this.name = name; + this.damperType = damperType; + this.dampingX = dampingX; + this.dampingY = dampingY; + this.dampingZ = dampingZ; + this.dampingXX = dampingXX; + this.dampingYY = dampingYY; + this.dampingZZ = dampingZZ; + } + + public PropertyTypeDamper damperType { get; set; } + public double dampingX { get; set; } + public double dampingY { get; set; } + public double dampingZ { get; set; } + public double dampingXX { get; set; } + public double dampingYY { get; set; } + public double dampingZZ { get; set; } +} diff --git a/src/Objects/Structural/Properties/PropertyMass.cs b/src/Objects/Structural/Properties/PropertyMass.cs new file mode 100644 index 00000000..5b85f0b7 --- /dev/null +++ b/src/Objects/Structural/Properties/PropertyMass.cs @@ -0,0 +1,56 @@ +using Speckle.Core.Kits; + +namespace Objects.Structural.Properties; + +public class PropertyMass : Property // nodal constraint axis of the node assumed to be mass property axis +{ + public PropertyMass() { } + + [SchemaInfo("PropertyMass", "Creates a Speckle structural mass property", "Structural", "Properties")] + public PropertyMass(string name) + { + this.name = name; + } + + [SchemaInfo("PropertyMass (general)", "Creates a Speckle structural mass property", "Structural", "Properties")] + public PropertyMass( + string name, + double mass, + double inertiaXX = 0, + double inertiaYY = 0, + double inertiaZZ = 0, + double inertiaXY = 0, + double inertiaYZ = 0, + double inertiaZX = 0, + bool massModified = false, + double massModifierX = 0, + double massModifierY = 0, + double massModifierZ = 0 + ) + { + this.name = name; + this.mass = mass; + this.inertiaXX = inertiaXX; + this.inertiaYY = inertiaYY; + this.inertiaZZ = inertiaZZ; + this.inertiaXY = inertiaXY; + this.inertiaYZ = inertiaYZ; + this.inertiaZX = inertiaZX; + this.massModified = massModified; + this.massModifierX = massModifierX; + this.massModifierY = massModifierY; + this.massModifierZ = massModifierZ; + } + + public double mass { get; set; } + public double inertiaXX { get; set; } + public double inertiaYY { get; set; } + public double inertiaZZ { get; set; } + public double inertiaXY { get; set; } + public double inertiaYZ { get; set; } + public double inertiaZX { get; set; } + public bool massModified { get; set; } + public double massModifierX { get; set; } + public double massModifierY { get; set; } + public double massModifierZ { get; set; } +} diff --git a/src/Objects/Structural/Properties/PropertySpring.cs b/src/Objects/Structural/Properties/PropertySpring.cs new file mode 100644 index 00000000..8ec35a12 --- /dev/null +++ b/src/Objects/Structural/Properties/PropertySpring.cs @@ -0,0 +1,107 @@ +using Speckle.Core.Kits; + +namespace Objects.Structural.Properties; + +public class PropertySpring : Property +{ + public PropertySpring() { } + + [SchemaInfo("PropertySpring", "Creates a Speckle structural spring property", "Structural", "Properties")] + public PropertySpring(string name) + { + this.name = name; + } + + [SchemaInfo( + "PropertySpring (linear/elastic)", + "Creates a Speckle structural spring property (linear/elastic spring)", + "Structural", + "Properties" + )] + public PropertySpring( + string name, + double stiffnessX = 0, + double stiffnessY = 0, + double stiffnessZ = 0, + double stiffnessXX = 0, + double stiffnessYY = 0, + double stiffnessZZ = 0, + double dampingRatio = 0 + ) + { + this.name = name; + springType = PropertyTypeSpring.General; + this.stiffnessX = stiffnessX; + this.stiffnessY = stiffnessY; + this.stiffnessZ = stiffnessZ; + this.stiffnessXX = stiffnessXX; + this.stiffnessYY = stiffnessYY; + this.stiffnessZZ = stiffnessZZ; + this.dampingRatio = dampingRatio; + } + + [SchemaInfo( + "PropertySpring (non-linear)", + "Creates a Speckle structural spring property (non-linear spring)", + "Structural", + "Properties" + )] + public PropertySpring( + string name, + double springCurveX = 0, + double stiffnessX = 0, + double springCurveY = 0, + double stiffnessY = 0, + double springCurveZ = 0, + double stiffnessZ = 0, + double springCurveXX = 0, + double stiffnessXX = 0, + double springCurveYY = 0, + double stiffnessYY = 0, + double springCurveZZ = 0, + double stiffnessZZ = 0, + double dampingRatio = 0 + ) + { + this.name = name; + springType = PropertyTypeSpring.General; + this.springCurveX = springCurveX; + this.springCurveY = springCurveY; + this.springCurveZ = springCurveZ; + this.springCurveXX = springCurveXX; + this.springCurveYY = springCurveYY; + this.springCurveZZ = springCurveZZ; + this.stiffnessX = springCurveX == 0 ? stiffnessX : 0; + this.stiffnessY = springCurveY == 0 ? stiffnessY : 0; + this.stiffnessZ = springCurveZ == 0 ? stiffnessZ : 0; + this.stiffnessXX = springCurveXX == 0 ? stiffnessXX : 0; + this.stiffnessYY = springCurveYY == 0 ? stiffnessYY : 0; + this.stiffnessZZ = springCurveZZ == 0 ? stiffnessZZ : 0; + this.dampingRatio = dampingRatio; + } + + public PropertyTypeSpring springType { get; set; } + public double springCurveX { get; set; } //if 0 spring is elastic, otherwise refers to a material curve by number + public double stiffnessX { get; set; } + public double springCurveY { get; set; } //if 0 spring is elastic, otherwise refers to a material curve by number + public double stiffnessY { get; set; } + public double springCurveZ { get; set; } //if 0 spring is elastic, otherwise refers to a material curve by number + public double stiffnessZ { get; set; } + public double springCurveXX { get; set; } //if 0 spring is elastic, otherwise refers to a material curve by number + public double stiffnessXX { get; set; } + public double springCurveYY { get; set; } //if 0 spring is elastic, otherwise refers to a material curve by number + public double stiffnessYY { get; set; } + public double springCurveZZ { get; set; } //if 0 spring is elastic, otherwise refers to a material curve by number + public double stiffnessZZ { get; set; } + public double dampingRatio { get; set; } + public double dampingX { get; set; } //is this needed? springType can't be set to DAMPER + public double dampingY { get; set; } //is this needed? springType can't be set to DAMPER + public double dampingZ { get; set; } //is this needed? springType can't be set to DAMPER + public double dampingXX { get; set; } //is this needed? springType can't be set to DAMPER + public double dampingYY { get; set; } //is this needed? springType can't be set to DAMPER + public double dampingZZ { get; set; } //is this needed? springType can't be set to DAMPER + public double matrix { get; set; } //refers to spring matrix record. + public double positiveLockup { get; set; } + public double negativeLockup { get; set; } + public double frictionCoefficient { get; set; } +} diff --git a/src/Objects/Structural/PropertyType.cs b/src/Objects/Structural/PropertyType.cs new file mode 100644 index 00000000..b17f28a3 --- /dev/null +++ b/src/Objects/Structural/PropertyType.cs @@ -0,0 +1,79 @@ +namespace Objects.Structural; + +public enum PropertyType2D +{ + Stress, + Fabric, + Plate, + Shell, + Curved, + Wall, + Strain, + Axi, + Load +} + +public enum ReferenceSurface +{ + Top, + Middle, + Bottom + //TOP_CENTRE, CENTROID,BOT_CENTRE +} + +public enum PropertyType3D +{ + Solid, + Infinite +} + +public enum PropertyTypeSpring +{ + Axial, + Torsional, + General, + Matrix, + TensionOnly, + CompressionOnly, + Connector, + LockUp, + Gap, + Friction + //Translational, //old + //Rotational //old +} + +public enum PropertyTypeDamper +{ + Axial, //translational + Torsional, //rotational + General +} + +public enum BaseReferencePoint +{ + Centroid, + TopLeft, + TopCentre, + TopRight, + MidLeft, + MidRight, + BotLeft, + BotCentre, + BotRight +} + +public enum ShapeType +{ + Rectangular, + Circular, + I, + Tee, + Angle, + Channel, + Perimeter, + Box, + Catalogue, + Explicit, + Undefined +} diff --git a/src/Objects/Structural/Results/AnalyticalResults.cs b/src/Objects/Structural/Results/AnalyticalResults.cs new file mode 100644 index 00000000..9c39b6fb --- /dev/null +++ b/src/Objects/Structural/Results/AnalyticalResults.cs @@ -0,0 +1,13 @@ +using System.Collections.Generic; +using Speckle.Core.Models; + +namespace Objects.Structural.Results; + +public class AnalyticalResults : Base +{ + public string? lengthUnits { get; set; } + public string? forceUnits { get; set; } + + [DetachProperty] + public List resultsByLoadCombination { get; set; } +} diff --git a/src/Objects/Structural/Results/Result.cs b/src/Objects/Structural/Results/Result.cs new file mode 100644 index 00000000..a117db54 --- /dev/null +++ b/src/Objects/Structural/Results/Result.cs @@ -0,0 +1,27 @@ +using Objects.Structural.Loading; +using Speckle.Core.Models; + +namespace Objects.Structural.Results; + +public class Result : Base +{ + public Result() { } + + public Result(LoadCase resultCase, string? description = null) + { + this.resultCase = resultCase; + this.description = description ?? ""; + } + + public Result(LoadCombination resultCase, string? description = null) + { + this.resultCase = resultCase; + this.description = description ?? ""; + } + + [DetachProperty] + public Base resultCase { get; set; } //loadCase or loadCombination + + public string permutation { get; set; } //for enveloped cases? + public string description { get; set; } = ""; +} diff --git a/src/Objects/Structural/Results/Result1D.cs b/src/Objects/Structural/Results/Result1D.cs new file mode 100644 index 00000000..f6f75b92 --- /dev/null +++ b/src/Objects/Structural/Results/Result1D.cs @@ -0,0 +1,170 @@ +using System.Collections.Generic; +using Objects.Structural.Geometry; +using Objects.Structural.Loading; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.Structural.Results; + +public class ResultSet1D : Result +{ + public ResultSet1D() { } + + [SchemaInfo("ResultSet1D", "Creates a Speckle 1D element result set object", "Structural", "Results")] + public ResultSet1D(List results1D) + { + this.results1D = results1D; + } + + [DetachProperty] + public List results1D { get; set; } +} + +public class Result1D : Result //result at a single position along a 1D element, ie. 1D element contains multiple Result1D objects to describe result at end 1, mid-span, end 2 +{ + public Result1D() { } + + [SchemaInfo( + "Result1D (load case)", + "Creates a Speckle 1D element result object (for load case)", + "Structural", + "Results" + )] + public Result1D( + Element1D element, + LoadCase resultCase, + float position, + float dispX, + float dispY, + float dispZ, + float rotXX, + float rotYY, + float rotZZ, + float forceX, + float forceY, + float forceZ, + float momentXX, + float momentYY, + float momentZZ, + float axialStress, + float shearStressY, + float shearStressZ, + float bendingStressYPos, + float bendingStressYNeg, + float bendingStressZPos, + float bendingStressZNeg, + float combinedStressMax, + float combinedStressMin + ) + { + this.element = element; + this.resultCase = resultCase; + this.position = position; + this.dispX = dispX; + this.dispY = dispY; + this.dispZ = dispZ; + this.rotXX = rotXX; + this.rotYY = rotYY; + this.rotZZ = rotZZ; + this.forceX = forceX; + this.forceY = forceY; + this.forceZ = forceZ; + this.momentXX = momentXX; + this.momentYY = momentYY; + this.momentZZ = momentZZ; + this.axialStress = axialStress; + this.shearStressY = shearStressY; + this.shearStressZ = shearStressZ; + this.bendingStressYPos = bendingStressYPos; + this.bendingStressYNeg = bendingStressYNeg; + this.bendingStressZPos = bendingStressZPos; + this.bendingStressZNeg = bendingStressZNeg; + this.combinedStressMax = combinedStressMax; + this.combinedStressMin = combinedStressMin; + } + + [SchemaInfo( + "Result1D (load combination)", + "Creates a Speckle 1D element result object (for load combination)", + "Structural", + "Results" + )] + public Result1D( + Element1D element, + LoadCombination resultCase, + float position, + float dispX, + float dispY, + float dispZ, + float rotXX, + float rotYY, + float rotZZ, + float forceX, + float forceY, + float forceZ, + float momentXX, + float momentYY, + float momentZZ, + float axialStress, + float shearStressY, + float shearStressZ, + float bendingStressYPos, + float bendingStressYNeg, + float bendingStressZPos, + float bendingStressZNeg, + float combinedStressMax, + float combinedStressMin + ) + { + this.element = element; + this.resultCase = resultCase; + this.position = position; + this.dispX = dispX; + this.dispY = dispY; + this.dispZ = dispZ; + this.rotXX = rotXX; + this.rotYY = rotYY; + this.rotZZ = rotZZ; + this.forceX = forceX; + this.forceY = forceY; + this.forceZ = forceZ; + this.momentXX = momentXX; + this.momentYY = momentYY; + this.momentZZ = momentZZ; + this.axialStress = axialStress; + this.shearStressY = shearStressY; + this.shearStressZ = shearStressZ; + this.bendingStressYPos = bendingStressYPos; + this.bendingStressYNeg = bendingStressYNeg; + this.bendingStressZPos = bendingStressZPos; + this.bendingStressZNeg = bendingStressZNeg; + this.combinedStressMax = combinedStressMax; + this.combinedStressMin = combinedStressMin; + } + + [DetachProperty] + public Element1D element { get; set; } + + public float? position { get; set; } //location along 1D element, normalised position (from 0 for end 1 to 1 for end 2) + public float? dispX { get; set; } + public float? dispY { get; set; } + public float? dispZ { get; set; } + public float? rotXX { get; set; } + public float? rotYY { get; set; } + public float? rotZZ { get; set; } + public float? forceX { get; set; } + public float? forceY { get; set; } + public float? forceZ { get; set; } + public float? momentXX { get; set; } + public float? momentYY { get; set; } + public float? momentZZ { get; set; } + public float? axialStress { get; set; } //axial stress, ie. Fx/Area + public float? shearStressY { get; set; } //shear stress, in minor axis dir, ie. Fy/Area + public float? shearStressZ { get; set; } //shear stress, in major axis dir, ie. Fz/Area + public float? bendingStressYPos { get; set; } //bending stress, about minor axis, ie. Myy/Iyy x Dz (Dz as distance from the centroid to the edge of the section in the +ve z direction) + public float? bendingStressYNeg { get; set; } //bending stress, about minor axis, ie. Myy/Iyy x Dz (Dz as distance from the centroid to the edge of the section in the -ve z direction) + public float? bendingStressZPos { get; set; } //bending stress, about major axis, ie. -Mzz/Izz x Dy (Dy as distance from the centroid to the edge of the section in the +ve y direction) + public float? bendingStressZNeg { get; set; } //bending stress, about major axis, ie. -Mzz/Izz x Dy (Dy as distance from the centroid to the edge of the section in the -ve y direction) + public float? combinedStressMax { get; set; } //maximum extreme fibre longitudinal stress due to axial forces and transverse bending + public float? combinedStressMin { get; set; } //minimum extreme fibre longitudinal stress due to axial forces and transverse bending +} diff --git a/src/Objects/Structural/Results/Result2D.cs b/src/Objects/Structural/Results/Result2D.cs new file mode 100644 index 00000000..89487759 --- /dev/null +++ b/src/Objects/Structural/Results/Result2D.cs @@ -0,0 +1,210 @@ +using System.Collections.Generic; +using Objects.Structural.Geometry; +using Objects.Structural.Loading; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.Structural.Results; + +public class ResultSet2D : Result +{ + public ResultSet2D() { } + + [SchemaInfo("ResultSet2D", "Creates a Speckle 2D element result set object", "Structural", "Results")] + public ResultSet2D(List results2D) + { + this.results2D = results2D; + } + + [DetachProperty] + public List results2D { get; set; } +} + +public class Result2D : Result //result at a single position within a 2D element, ie. 2D element contains multiple Result2D objects to describe result at node 1, node 2, node 3, node4 and centre of 4-node quad element +{ + public Result2D() { } + + [SchemaInfo( + "Result2D (load case)", + "Creates a Speckle 2D element result object (for load case)", + "Structural", + "Results" + )] + public Result2D( + Element2D element, + LoadCase resultCase, + List position, + float dispX, + float dispY, + float dispZ, + float forceXX, + float forceYY, + float forceXY, + float momentXX, + float momentYY, + float momentXY, + float shearX, + float shearY, + float stressTopXX, + float stressTopYY, + float stressTopZZ, + float stressTopXY, + float stressTopYZ, + float stressTopZX, + float stressMidXX, + float stressMidYY, + float stressMidZZ, + float stressMidXY, + float stressMidYZ, + float stressMidZX, + float stressBotXX, + float stressBotYY, + float stressBotZZ, + float stressBotXY, + float stressBotYZ, + float stressBotZX + ) + { + this.element = element; + this.resultCase = resultCase; + this.position = position; + this.dispX = dispX; + this.dispY = dispY; + this.dispZ = dispZ; + this.forceXX = forceXX; + this.forceYY = forceYY; + this.forceXY = forceXY; + this.momentXX = momentXX; + this.momentYY = momentYY; + this.momentXY = momentXY; + this.shearX = shearX; + this.shearY = shearY; + this.stressTopXX = stressTopXX; + this.stressTopYY = stressTopYY; + this.stressTopZZ = stressTopZZ; + this.stressTopXY = stressTopXY; + this.stressTopYZ = stressTopYZ; + this.stressTopZX = stressTopZX; + this.stressMidXX = stressMidXX; + this.stressMidYY = stressMidYY; + this.stressMidZZ = stressMidZZ; + this.stressMidXY = stressMidXY; + this.stressMidYZ = stressMidYZ; + this.stressMidZX = stressMidZX; + this.stressBotXX = stressBotXX; + this.stressBotYY = stressBotYY; + this.stressBotZZ = stressBotZZ; + this.stressBotXY = stressBotXY; + this.stressBotYZ = stressBotYZ; + this.stressBotZX = stressBotZX; + } + + [SchemaInfo( + "Result2D (load combination)", + "Creates a Speckle 2D element result object (for load combination)", + "Structural", + "Results" + )] + public Result2D( + Element2D element, + LoadCombination resultCase, + List position, + float dispX, + float dispY, + float dispZ, + float forceXX, + float forceYY, + float forceXY, + float momentXX, + float momentYY, + float momentXY, + float shearX, + float shearY, + float stressTopXX, + float stressTopYY, + float stressTopZZ, + float stressTopXY, + float stressTopYZ, + float stressTopZX, + float stressMidXX, + float stressMidYY, + float stressMidZZ, + float stressMidXY, + float stressMidYZ, + float stressMidZX, + float stressBotXX, + float stressBotYY, + float stressBotZZ, + float stressBotXY, + float stressBotYZ, + float stressBotZX + ) + { + this.element = element; + this.resultCase = resultCase; + this.position = position; + this.dispX = dispX; + this.dispY = dispY; + this.dispZ = dispZ; + this.forceXX = forceXX; + this.forceYY = forceYY; + this.forceXY = forceXY; + this.momentXX = momentXX; + this.momentYY = momentYY; + this.momentXY = momentXY; + this.shearX = shearX; + this.shearY = shearY; + this.stressTopXX = stressTopXX; + this.stressTopYY = stressTopYY; + this.stressTopZZ = stressTopZZ; + this.stressTopXY = stressTopXY; + this.stressTopYZ = stressTopYZ; + this.stressTopZX = stressTopZX; + this.stressMidXX = stressMidXX; + this.stressMidYY = stressMidYY; + this.stressMidZZ = stressMidZZ; + this.stressMidXY = stressMidXY; + this.stressMidYZ = stressMidYZ; + this.stressMidZX = stressMidZX; + this.stressBotXX = stressBotXX; + this.stressBotYY = stressBotYY; + this.stressBotZZ = stressBotZZ; + this.stressBotXY = stressBotXY; + this.stressBotYZ = stressBotYZ; + this.stressBotZX = stressBotZX; + } + + [DetachProperty] + public Element2D element { get; set; } + + public List position { get; set; } //relative position within element (x,y in range [0:1], { 0.5, 0.5 } corresponds to centre of element, { 0, 0 } correponds to corner/at a node of a element + public float? dispX { get; set; } + public float? dispY { get; set; } + public float? dispZ { get; set; } + public float? forceXX { get; set; } //in-plane force per unit length in x direction + public float? forceYY { get; set; } //in-plane force per unit length in y direction + public float? forceXY { get; set; } //in-plane force per unit length in xy direction (at interface) + public float? momentXX { get; set; } //moment per unit length in x direction + public float? momentYY { get; set; } //moment per unit length in y direction + public float? momentXY { get; set; } //moment per unit length in xy direction + public float? shearX { get; set; } //through thickness shear force per unit length in x direction + public float? shearY { get; set; } //through thickness shear force per unit length in y direction + public float? stressTopXX { get; set; } //in-plane stress in x direction at top layer of element + public float? stressTopYY { get; set; } //in-plane stress in y direction at top layer of element + public float? stressTopZZ { get; set; } //in-plane stress in z direction (through thickness) at top layer of element + public float? stressTopXY { get; set; } //shear stress in xy direction at top layer of element + public float? stressTopYZ { get; set; } //shear stress in yz direction at top layer of element + public float? stressTopZX { get; set; } //shear stress in zx direction at top layer of element + public float? stressMidXX { get; set; } //in-plane stress in x direction at mid layer of element + public float? stressMidYY { get; set; } //in-plane stress in y direction at mid layer of element + public float? stressMidZZ { get; set; } //in-plane stress in z direction (through thickness) at mid layer of element + public float? stressMidXY { get; set; } //shear stress in xy direction at mid layer of element + public float? stressMidYZ { get; set; } //shear stress in yz direction at mid layer of element + public float? stressMidZX { get; set; } //shear stress in zx direction at mid layer of element + public float? stressBotXX { get; set; } //in-plane stress in x direction at bot layer of element + public float? stressBotYY { get; set; } //in-plane stress in y direction at bot layer of element + public float? stressBotZZ { get; set; } //in-plane stress in z direction (through thickness) at bot layer of element + public float? stressBotXY { get; set; } //shear stress in xy direction at bot layer of element + public float? stressBotYZ { get; set; } //shear stress in yz direction at bot layer of element + public float? stressBotZX { get; set; } //shear stress in zx direction at bot layer of element +} diff --git a/src/Objects/Structural/Results/Result3D.cs b/src/Objects/Structural/Results/Result3D.cs new file mode 100644 index 00000000..0fac7109 --- /dev/null +++ b/src/Objects/Structural/Results/Result3D.cs @@ -0,0 +1,110 @@ +using System.Collections.Generic; +using Objects.Structural.Geometry; +using Objects.Structural.Loading; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.Structural.Results; + +public class ResultSet3D : Result +{ + public ResultSet3D() { } + + [SchemaInfo("ResultSet3D", "Creates a Speckle 3D element result set object", "Structural", "Results")] + public ResultSet3D(List results3D) + { + this.results3D = results3D; + } + + [DetachProperty] + public List results3D { get; set; } +} + +public class Result3D : Result +{ + public Result3D() { } + + [SchemaInfo( + "Result3D (load case)", + "Creates a Speckle 3D element result object (for load case)", + "Structural", + "Results" + )] + public Result3D( + Element3D element, + LoadCase resultCase, + List position, + float dispX, + float dispY, + float dispZ, + float stressXX, + float stressYY, + float stressZZ, + float stressXY, + float stressYZ, + float stressZX + ) + { + this.element = element; + this.resultCase = resultCase; + this.position = position; + this.dispX = dispX; + this.dispY = dispY; + this.dispZ = dispZ; + this.stressXX = stressXX; + this.stressYY = stressYY; + this.stressZZ = stressZZ; + this.stressXY = stressXY; + this.stressYZ = stressYZ; + this.stressZX = stressZX; + } + + [SchemaInfo( + "Result3D (load combination)", + "Creates a Speckle 3D element result object (for load combination)", + "Structural", + "Results" + )] + public Result3D( + Element3D element, + LoadCombination resultCase, + List position, + float dispX, + float dispY, + float dispZ, + float stressXX, + float stressYY, + float stressZZ, + float stressXY, + float stressYZ, + float stressZX + ) + { + this.element = element; + this.resultCase = resultCase; + this.position = position; + this.dispX = dispX; + this.dispY = dispY; + this.dispZ = dispZ; + this.stressXX = stressXX; + this.stressYY = stressYY; + this.stressZZ = stressZZ; + this.stressXY = stressXY; + this.stressYZ = stressYZ; + this.stressZX = stressZX; + } + + [DetachProperty] + public Element3D element { get; set; } + + public List position { get; set; } //relative position within element (x,y,z in range [0:1] to describe position) + public float? dispX { get; set; } + public float? dispY { get; set; } + public float? dispZ { get; set; } + public float? stressXX { get; set; } + public float? stressYY { get; set; } + public float? stressZZ { get; set; } + public float? stressXY { get; set; } + public float? stressYZ { get; set; } + public float? stressZX { get; set; } +} diff --git a/src/Objects/Structural/Results/ResultAll.cs b/src/Objects/Structural/Results/ResultAll.cs new file mode 100644 index 00000000..a44eb577 --- /dev/null +++ b/src/Objects/Structural/Results/ResultAll.cs @@ -0,0 +1,45 @@ +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.Structural.Results; + +public class ResultSetAll : Base +{ + public ResultSetAll() { } + + [SchemaInfo( + "ResultSetAll", + "Creates a Speckle result set object for 1d element, 2d element, 3d element global and nodal results", + "Structural", + "Results" + )] + public ResultSetAll( + ResultSet1D? results1D, + ResultSet2D? results2D, + ResultSet3D? results3D, + ResultGlobal? resultsGlobal, + ResultSetNode? resultsNode + ) + { + this.results1D = results1D; + this.results2D = results2D; + this.results3D = results3D; + this.resultsGlobal = resultsGlobal; + this.resultsNode = resultsNode; + } + + [DetachProperty] + public ResultSet1D? results1D { get; set; } //1d element results + + [DetachProperty] + public ResultSet2D? results2D { get; set; } //2d elements results + + [DetachProperty] + public ResultSet3D? results3D { get; set; } //3d elements results + + [DetachProperty] + public ResultGlobal? resultsGlobal { get; set; } //global results + + [DetachProperty] + public ResultSetNode? resultsNode { get; set; } //nodal results +} diff --git a/src/Objects/Structural/Results/ResultGlobal.cs b/src/Objects/Structural/Results/ResultGlobal.cs new file mode 100644 index 00000000..787a31cd --- /dev/null +++ b/src/Objects/Structural/Results/ResultGlobal.cs @@ -0,0 +1,156 @@ +using Objects.Structural.Analysis; +using Objects.Structural.Loading; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.Structural.Results; + +public class ResultGlobal : Result +{ + public ResultGlobal() { } + + [SchemaInfo( + "ResultGlobal (load case)", + "Creates a Speckle global result object (for load case)", + "Structural", + "Results" + )] + public ResultGlobal( + LoadCase resultCase, + float loadX, + float loadY, + float loadZ, + float loadXX, + float loadYY, + float loadZZ, + float reactionX, + float reactionY, + float reactionZ, + float reactionXX, + float reactionYY, + float reactionZZ, + float mode, + float frequency, + float loadFactor, + float modalStiffness, + float modalGeoStiffness, + float effMassX, + float effMassY, + float effMassZ, + float effMassXX, + float effMassYY, + float effMassZZ + ) + { + this.resultCase = resultCase; + this.loadX = loadX; + this.loadY = loadY; + this.loadZ = loadZ; + this.loadXX = loadXX; + this.loadYY = loadYY; + this.loadZZ = loadZZ; + this.reactionX = reactionX; + this.reactionY = reactionY; + this.reactionZ = reactionZ; + this.reactionXX = reactionXX; + this.reactionYY = reactionYY; + this.reactionZZ = reactionZZ; + this.mode = mode; + this.frequency = frequency; + this.loadFactor = loadFactor; + this.modalStiffness = modalStiffness; + this.modalGeoStiffness = modalGeoStiffness; + this.effMassX = effMassX; + this.effMassY = effMassY; + this.effMassZ = effMassZ; + this.effMassXX = effMassXX; + this.effMassYY = effMassYY; + this.effMassZZ = effMassZZ; + } + + [SchemaInfo( + "ResultGlobal (load combination)", + "Creates a Speckle global result object (for load combination)", + "Structural", + "Results" + )] + public ResultGlobal( + LoadCombination resultCase, + float loadX, + float loadY, + float loadZ, + float loadXX, + float loadYY, + float loadZZ, + float reactionX, + float reactionY, + float reactionZ, + float reactionXX, + float reactionYY, + float reactionZZ, + float mode, + float frequency, + float loadFactor, + float modalStiffness, + float modalGeoStiffness, + float effMassX, + float effMassY, + float effMassZ, + float effMassXX, + float effMassYY, + float effMassZZ + ) + { + this.resultCase = resultCase; + this.loadX = loadX; + this.loadY = loadY; + this.loadZ = loadZ; + this.loadXX = loadXX; + this.loadYY = loadYY; + this.loadZZ = loadZZ; + this.reactionX = reactionX; + this.reactionY = reactionY; + this.reactionZ = reactionZ; + this.reactionXX = reactionXX; + this.reactionYY = reactionYY; + this.reactionZZ = reactionZZ; + this.mode = mode; + this.frequency = frequency; + this.loadFactor = loadFactor; + this.modalStiffness = modalStiffness; + this.modalGeoStiffness = modalGeoStiffness; + this.effMassX = effMassX; + this.effMassY = effMassY; + this.effMassZ = effMassZ; + this.effMassXX = effMassXX; + this.effMassYY = effMassYY; + this.effMassZZ = effMassZZ; + } + + [DetachProperty] + public Model model { get; set; } // this should be a model identifier instead + + public float? loadX { get; set; } + public float? loadY { get; set; } + public float? loadZ { get; set; } + public float? loadXX { get; set; } + public float? loadYY { get; set; } + public float? loadZZ { get; set; } + public float? reactionX { get; set; } + public float? reactionY { get; set; } + public float? reactionZ { get; set; } + public float? reactionXX { get; set; } + public float? reactionYY { get; set; } + public float? reactionZZ { get; set; } + public float? mode { get; set; } + public float? frequency { get; set; } + public float? loadFactor { get; set; } + public float? modalStiffness { get; set; } + public float? modalGeoStiffness { get; set; } + public float? effMassX { get; set; } + public float? effMassY { get; set; } + public float? effMassZ { get; set; } + public float? effMassXX { get; set; } + public float? effMassYY { get; set; } + public float? effMassZZ { get; set; } +} diff --git a/src/Objects/Structural/Results/ResultNode.cs b/src/Objects/Structural/Results/ResultNode.cs new file mode 100644 index 00000000..305cc923 --- /dev/null +++ b/src/Objects/Structural/Results/ResultNode.cs @@ -0,0 +1,211 @@ +using System.Collections.Generic; +using Objects.Structural.Geometry; +using Objects.Structural.Loading; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.Structural.Results; + +public enum CaseType +{ + Analysis, + Combination +} + +public class ResultSetNode : Result +{ + public ResultSetNode() { } + + [SchemaInfo("ResultSetNode", "Creates a Speckle node result set object", "Structural", "Results")] + public ResultSetNode(List resultsNode) + { + this.resultsNode = resultsNode; + } + + [DetachProperty] + public List resultsNode { get; set; } +} + +public class ResultNode : Result +{ + public ResultNode() { } + + [SchemaInfo("ResultNode (load case)", "Creates a Speckle structural nodal result object", "Structural", "Results")] + public ResultNode( + LoadCase resultCase, + Node node, + float dispX, + float dispY, + float dispZ, + float rotXX, + float rotYY, + float rotZZ, + float reactionX, + float reactionY, + float reactionZ, + float reactionXX, + float reactionYY, + float reactionZZ, + float constraintX, + float constraintY, + float constraintZ, + float constraintXX, + float constraintYY, + float constraintZZ, + float velX, + float velY, + float velZ, + float velXX, + float velYY, + float velZZ, + float accX, + float accY, + float accZ, + float accXX, + float accYY, + float accZZ + ) + { + this.resultCase = resultCase; + this.node = node; + this.dispX = dispX; + this.dispY = dispY; + this.dispZ = dispZ; + this.rotXX = rotXX; + this.rotYY = rotYY; + this.rotZZ = rotZZ; + this.reactionX = reactionX; + this.reactionY = reactionY; + this.reactionZ = reactionZ; + this.reactionXX = reactionXX; + this.reactionYY = reactionYY; + this.reactionZZ = reactionZZ; + this.constraintX = constraintX; + this.constraintY = constraintY; + this.constraintZ = constraintZ; + this.constraintXX = constraintXX; + this.constraintYY = constraintYY; + this.constraintZZ = constraintZZ; + this.velX = velX; + this.velY = velY; + this.velZ = velZ; + this.velXX = velXX; + this.velYY = velYY; + this.velZZ = velZZ; + this.accX = accX; + this.accY = accY; + this.accZ = accZ; + this.accXX = accXX; + this.accYY = accYY; + this.accZZ = accZZ; + } + + [SchemaInfo( + "ResultNode (load combination)", + "Creates a Speckle structural nodal result object", + "Structural", + "Results" + )] + public ResultNode( + LoadCombination resultCase, + Node node, + float dispX, + float dispY, + float dispZ, + float rotXX, + float rotYY, + float rotZZ, + float reactionX, + float reactionY, + float reactionZ, + float reactionXX, + float reactionYY, + float reactionZZ, + float constraintX, + float constraintY, + float constraintZ, + float constraintXX, + float constraintYY, + float constraintZZ, + float velX, + float velY, + float velZ, + float velXX, + float velYY, + float velZZ, + float accX, + float accY, + float accZ, + float accXX, + float accYY, + float accZZ + ) + { + this.resultCase = resultCase; + this.node = node; + this.dispX = dispX; + this.dispY = dispY; + this.dispZ = dispZ; + this.rotXX = rotXX; + this.rotYY = rotYY; + this.rotZZ = rotZZ; + this.reactionX = reactionX; + this.reactionY = reactionY; + this.reactionZ = reactionZ; + this.reactionXX = reactionXX; + this.reactionYY = reactionYY; + this.reactionZZ = reactionZZ; + this.constraintX = constraintX; + this.constraintY = constraintY; + this.constraintZ = constraintZ; + this.constraintXX = constraintXX; + this.constraintYY = constraintYY; + this.constraintZZ = constraintZZ; + this.velX = velX; + this.velY = velY; + this.velZ = velZ; + this.velXX = velXX; + this.velYY = velYY; + this.velZZ = velZZ; + this.accX = accX; + this.accY = accY; + this.accZ = accZ; + this.accXX = accXX; + this.accYY = accYY; + this.accZZ = accZZ; + } + + [DetachProperty] + public Node node { get; set; } + + public float? dispX { get; set; } + public float? dispY { get; set; } + public float? dispZ { get; set; } + public float? rotXX { get; set; } + public float? rotYY { get; set; } + public float? rotZZ { get; set; } + public float? reactionX { get; set; } + public float? reactionY { get; set; } + public float? reactionZ { get; set; } + public float? reactionXX { get; set; } + public float? reactionYY { get; set; } + public float? reactionZZ { get; set; } + public float? constraintX { get; set; } + public float? constraintY { get; set; } + public float? constraintZ { get; set; } + public float? constraintXX { get; set; } + public float? constraintYY { get; set; } + public float? constraintZZ { get; set; } + public float? velX { get; set; } + public float? velY { get; set; } + public float? velZ { get; set; } + public float? velXX { get; set; } + public float? velYY { get; set; } + public float? velZZ { get; set; } + public float? accX { get; set; } + public float? accY { get; set; } + public float? accZ { get; set; } + public float? accXX { get; set; } + public float? accYY { get; set; } + public float? accZZ { get; set; } +} diff --git a/src/Objects/Utils/MeshTriangulationHelper.cs b/src/Objects/Utils/MeshTriangulationHelper.cs new file mode 100644 index 00000000..b553dc88 --- /dev/null +++ b/src/Objects/Utils/MeshTriangulationHelper.cs @@ -0,0 +1,273 @@ +using System; +using System.Collections.Generic; +using System.Runtime.CompilerServices; +using Objects.Geometry; + +namespace Objects.Utils; + +/// +/// Set of functions to triangulate n-gon faces (i.e. polygon faces with an arbitrary (n) number of vertices) in es. +/// +public static class MeshTriangulationHelper +{ + /// + /// Triangulates all faces in . + /// + /// The mesh to triangulate. + /// If , will not triangulate quad faces. + public static void TriangulateMesh(this Mesh mesh, bool preserveQuads = false) + { + List triangles = new(mesh.faces.Count); //Our new list is going to be at least as big as our old one + int i = 0; + while (i < mesh.faces.Count) + { + int n = mesh.faces[i]; + if (n < 3) + { + n += 3; // 0 -> 3, 1 -> 4 + } + + if (n == 3) + { + triangles.Add(3); + triangles.Add(mesh.faces[i + 1]); + triangles.Add(mesh.faces[i + 2]); + triangles.Add(mesh.faces[i + 3]); + } + else if (preserveQuads && n == 4) + { + triangles.Add(4); + triangles.Add(mesh.faces[i + 1]); + triangles.Add(mesh.faces[i + 2]); + triangles.Add(mesh.faces[i + 3]); + triangles.Add(mesh.faces[i + 4]); + } + else + { + var triangle = TriangulateFace(i, mesh); + triangles.AddRange(triangle); + } + + i += n + 1; + } + + mesh.faces = triangles; + } + + /// Overload using a , does not mutate + /// + public static List TriangulateFace(int faceIndex, Mesh mesh, bool includeIndicators = true) + { + return TriangulateFace(faceIndex, mesh.faces, mesh.vertices, includeIndicators); + } + + /// + /// Calculates the triangulation of the face at in list. + /// + /// + /// This implementation is based the ear clipping method + /// Proposed by "Christer Ericson (2005) Real-Time Collision Detection". + /// + /// The index of the face's cardinality indicator n in list. + /// + /// + /// if , the returned list will include cardinality indicators for each triangle + /// (i.e 4 ints for each tri), otherwise will simply be 3 ints for each tri. + /// List of triangle faces in the specified format. + public static List TriangulateFace( + int faceIndex, + IReadOnlyList faces, + IReadOnlyList vertices, + bool includeIndicators = true + ) + { + int n = faces[faceIndex]; + if (n < 3) + { + n += 3; // 0 -> 3, 1 -> 4 + } + #region Local Funcitions + //Converts from relative to absolute index (returns index in mesh.vertices list) + int AsIndex(int v) => faceIndex + v + 1; + + //Gets vertex from a relative vert index + Vector3 V(int v) + { + int index = faces[AsIndex(v)] * 3; + return new Vector3(vertices[index], vertices[index + 1], vertices[index + 2]); + } + #endregion + + int intsPerTri = includeIndicators ? 4 : 3; + List triangleFaces = new((n - 2) * intsPerTri); + + //Calculate face normal using the Newell Method + Vector3 faceNormal = Vector3.Zero; + for (int ii = n - 1, jj = 0; jj < n; ii = jj, jj++) + { + Vector3 iPos = V(ii); + Vector3 jPos = V(jj); + faceNormal.x += (jPos.y - iPos.y) * (iPos.z + jPos.z); // projection on yz + faceNormal.y += (jPos.z - iPos.z) * (iPos.x + jPos.x); // projection on xz + faceNormal.z += (jPos.x - iPos.x) * (iPos.y + jPos.y); // projection on xy + } + faceNormal.Normalize(); + + //Set up previous and next links to effectively form a double-linked vertex list + int[] prev = new int[n], + next = new int[n]; + for (int j = 0; j < n; j++) + { + prev[j] = j - 1; + next[j] = j + 1; + } + prev[0] = n - 1; + next[n - 1] = 0; + + //Start clipping ears until we are left with a triangle + int i = 0; + int counter = 0; + while (n >= 3) + { + bool isEar = true; + + //If we are the last triangle or we have exhausted our vertices, the below statement will be false + if (n > 3 && counter < n) + { + Vector3 prevVertex = V(prev[i]); + Vector3 earVertex = V(i); + Vector3 nextVertex = V(next[i]); + + if (TriangleIsCCW(faceNormal, prevVertex, earVertex, nextVertex)) + { + int k = next[next[i]]; + + do + { + if (TestPointTriangle(V(k), prevVertex, earVertex, nextVertex)) + { + isEar = false; + break; + } + + k = next[k]; + } while (k != prev[i]); + } + else + { + isEar = false; + } + } + + if (isEar) + { + int a = faces[AsIndex(i)]; + int b = faces[AsIndex(next[i])]; + int c = faces[AsIndex(prev[i])]; + + if (includeIndicators) + { + triangleFaces.Add(3); + } + + triangleFaces.Add(a); + triangleFaces.Add(b); + triangleFaces.Add(c); + + next[prev[i]] = next[i]; + prev[next[i]] = prev[i]; + n--; + i = prev[i]; + counter = 0; + } + else + { + i = next[i]; + counter++; + } + } + + return triangleFaces; + } + + /// + /// Tests if point is within triangle + /// + /// true if is within triangle + [MethodImpl(MethodImplOptions.AggressiveInlining)] + private static bool TestPointTriangle(Vector3 v, Vector3 a, Vector3 b, Vector3 c) + { + static bool Test(Vector3 v, Vector3 a, Vector3 b) + { + Vector3 crossA = v.Cross(a); + Vector3 crossB = a.Cross(b); + double dotWithEpsilon = double.Epsilon + crossA.Dot(crossB); + return Math.Sign(dotWithEpsilon) != -1; + } + + return Test(b - a, v - a, c - a) && Test(c - b, v - b, a - b) && Test(a - c, v - c, b - c); + } + + /// + /// Checks that triangle is clockwise with reference to + /// + /// The normal direction of the face + /// + /// + /// + /// true if triangle is ccw + [MethodImpl(MethodImplOptions.AggressiveInlining)] + private static bool TriangleIsCCW(Vector3 referenceNormal, Vector3 a, Vector3 b, Vector3 c) + { + Vector3 triangleNormal = (c - a).Cross(b - a); + triangleNormal.Normalize(); + return referenceNormal.Dot(triangleNormal) > 0.0f; + } + + /// + /// 3-dimension x, Y, Z Vector of s encapsulating necessary vector mathematics + /// + private struct Vector3 + { + public double x, + y, + z; + + public Vector3(double x, double y, double z) + { + this.x = x; + this.y = y; + this.z = z; + } + + public static readonly Vector3 Zero = new(0, 0, 0); + + public static Vector3 operator +(Vector3 a, Vector3 b) => new(a.x + b.x, a.y + b.y, a.z + b.z); + + public static Vector3 operator -(Vector3 a, Vector3 b) => new(a.x - b.x, a.y - b.y, a.z - b.z); + + public readonly double Dot(Vector3 v) + { + return x * v.x + y * v.y + z * v.z; + } + + public readonly Vector3 Cross(Vector3 v) + { + var x = this.y * v.z - this.z * v.y; + var y = this.z * v.x - this.x * v.z; + var z = this.x * v.y - this.y * v.x; + + return new Vector3(x, y, z); + } + + public readonly double SquareSum => x * x + y * y + z * z; + + public void Normalize() + { + double scale = 1d / Math.Sqrt(SquareSum); + x *= scale; + y *= scale; + z *= scale; + } + } +} diff --git a/src/Objects/Utils/Parameters.cs b/src/Objects/Utils/Parameters.cs new file mode 100644 index 00000000..153bbbea --- /dev/null +++ b/src/Objects/Utils/Parameters.cs @@ -0,0 +1,37 @@ +using System.Collections.Generic; +using Objects.BuiltElements.Revit; +using Speckle.Core.Models; + +namespace Objects.Utils; + +public static class Parameters +{ + /// + /// Turns a List of Parameters into a Base so that it can be used with the Speckle parameters prop + /// + /// + /// + public static Base? ToBase(this List parameters) + { + if (parameters == null) + { + return null; + } + + var @base = new Base(); + + foreach (Parameter p in parameters) + { + //if an applicationId is defined (BuiltInName) use that as key, otherwise use the display name + var key = string.IsNullOrEmpty(p.applicationInternalName) ? p.name : p.applicationInternalName; + if (string.IsNullOrEmpty(key) || @base[key] != null) + { + continue; + } + + @base[key] = p; + } + + return @base; + } +} diff --git a/src/Speckle.Core.Transports/DiskTransport.cs b/src/Speckle.Core.Transports/DiskTransport.cs new file mode 100644 index 00000000..a780eef9 --- /dev/null +++ b/src/Speckle.Core.Transports/DiskTransport.cs @@ -0,0 +1,159 @@ +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.IO; +using System.Text; +using System.Threading; +using System.Threading.Tasks; +using Speckle.Core.Helpers; + +namespace Speckle.Core.Transports; + +/// +/// Writes speckle objects to disk. +/// +public class DiskTransport : ICloneable, ITransport +{ + public DiskTransport(string? basePath = null) + { + basePath ??= Path.Combine(SpecklePathProvider.UserSpeckleFolderPath, "DiskTransportFiles"); + + RootPath = Path.Combine(basePath); + + Directory.CreateDirectory(RootPath); + } + + public string RootPath { get; set; } + + public object Clone() + { + return new DiskTransport + { + RootPath = RootPath, + CancellationToken = CancellationToken, + OnErrorAction = OnErrorAction, + OnProgressAction = OnProgressAction, + TransportName = TransportName + }; + } + + public string TransportName { get; set; } = "Disk"; + + public Dictionary TransportContext => + new() + { + { "name", TransportName }, + { "type", GetType().Name }, + { "basePath", RootPath } + }; + + public CancellationToken CancellationToken { get; set; } + + public Action? OnProgressAction { get; set; } + + public Action? OnErrorAction { get; set; } + + public int SavedObjectCount { get; private set; } + + public TimeSpan Elapsed { get; set; } = TimeSpan.Zero; + + public void BeginWrite() + { + SavedObjectCount = 0; + } + + public void EndWrite() { } + + public string? GetObject(string id) + { + CancellationToken.ThrowIfCancellationRequested(); + + var filePath = Path.Combine(RootPath, id); + if (File.Exists(filePath)) + { + return File.ReadAllText(filePath, Encoding.UTF8); + } + + return null; + } + + public void SaveObject(string id, string serializedObject) + { + var stopwatch = Stopwatch.StartNew(); + CancellationToken.ThrowIfCancellationRequested(); + + var filePath = Path.Combine(RootPath, id); + if (File.Exists(filePath)) + { + return; + } + + try + { + File.WriteAllText(filePath, serializedObject, Encoding.UTF8); + } + catch (Exception ex) + { + throw new TransportException(this, $"Failed to write object {id} to disk", ex); + } + + SavedObjectCount++; + OnProgressAction?.Invoke(TransportName, SavedObjectCount); + stopwatch.Stop(); + Elapsed += stopwatch.Elapsed; + } + + public void SaveObject(string id, ITransport sourceTransport) + { + CancellationToken.ThrowIfCancellationRequested(); + + var serializedObject = sourceTransport.GetObject(id); + + if (serializedObject is null) + { + throw new TransportException( + this, + $"Cannot copy {id} from {sourceTransport.TransportName} to {TransportName} as source returned null" + ); + } + + SaveObject(id, serializedObject); + } + + public Task WriteComplete() + { + return Task.CompletedTask; + } + + public Task CopyObjectAndChildren( + string id, + ITransport targetTransport, + Action? onTotalChildrenCountKnown = null + ) + { + string res = TransportHelpers.CopyObjectAndChildrenSync( + id, + this, + targetTransport, + onTotalChildrenCountKnown, + CancellationToken + ); + return Task.FromResult(res); + } + + public Task> HasObjects(IReadOnlyList objectIds) + { + Dictionary ret = new(); + foreach (string objectId in objectIds) + { + var filePath = Path.Combine(RootPath, objectId); + ret[objectId] = File.Exists(filePath); + } + return Task.FromResult(ret); + } + + public override string ToString() + { + return $"Disk Transport @{RootPath}"; + } +} diff --git a/src/Speckle.Core.Transports/Speckle.Core.Transports.csproj b/src/Speckle.Core.Transports/Speckle.Core.Transports.csproj new file mode 100644 index 00000000..52ef1ac2 --- /dev/null +++ b/src/Speckle.Core.Transports/Speckle.Core.Transports.csproj @@ -0,0 +1,18 @@ + + + + netstandard2.0 + Speckle.Transports.Disk + DiskTransport + A Disk transport for Speckle + $(PackageTags) disk transport + Debug;Release + true + enable + Speckle.Core.Transports + + + + + + diff --git a/src/Speckle.Core.Transports/packages.lock.json b/src/Speckle.Core.Transports/packages.lock.json new file mode 100644 index 00000000..c9670d19 --- /dev/null +++ b/src/Speckle.Core.Transports/packages.lock.json @@ -0,0 +1,460 @@ +{ + "version": 2, + "dependencies": { + ".NETStandard,Version=v2.0": { + "NETStandard.Library": { + "type": "Direct", + "requested": "[2.0.3, )", + "resolved": "2.0.3", + "contentHash": "st47PosZSHrjECdjeIzZQbzivYBJFv6P2nv4cj2ypdI204DO+vZ7l5raGMiX4eXMJ53RfOIg+/s4DHVZ54Nu2A==", + "dependencies": { + "Microsoft.NETCore.Platforms": "1.1.0" + } + }, + "PolySharp": { + "type": "Direct", + "requested": "[1.14.1, )", + "resolved": "1.14.1", + "contentHash": "mOOmFYwad3MIOL14VCjj02LljyF1GNw1wP0YVlxtcPvqdxjGGMNdNJJxHptlry3MOd8b40Flm8RPOM8JOlN2sQ==" + }, + "GraphQL.Client.Abstractions": { + "type": "Transitive", + "resolved": "6.0.0", + "contentHash": "h7uzWFORHZ+CCjwr/ThAyXMr0DPpzEANDa4Uo54wqCQ+j7qUKwqYTgOrb1W40sqbvNaZm9v/X7It31SUw0maHA==", + "dependencies": { + "GraphQL.Primitives": "6.0.0" + } + }, + "GraphQL.Client.Abstractions.Websocket": { + "type": "Transitive", + "resolved": "6.0.0", + "contentHash": "Nr9bPf8gIOvLuXpqEpqr9z9jslYFJOvd0feHth3/kPqeR3uMbjF5pjiwh4jxyMcxHdr8Pb6QiXkV3hsSyt0v7A==", + "dependencies": { + "GraphQL.Client.Abstractions": "6.0.0" + } + }, + "GraphQL.Primitives": { + "type": "Transitive", + "resolved": "6.0.0", + "contentHash": "yg72rrYDapfsIUrul7aF6wwNnTJBOFvuA9VdDTQpPa8AlAriHbufeXYLBcodKjfUdkCnaiggX1U/nEP08Zb5GA==" + }, + "Microsoft.AspNetCore.Http": { + "type": "Transitive", + "resolved": "2.1.1", + "contentHash": "pPDcCW8spnyibK3krpxrOpaFHf5fjV6k1Hsl6gfh77N/8gRYlLU7MOQDUnjpEwdlHmtxwJKQJNxZqVQOmJGRUw==", + "dependencies": { + "Microsoft.AspNetCore.Http.Abstractions": "2.1.1", + "Microsoft.AspNetCore.WebUtilities": "2.1.1", + "Microsoft.Extensions.ObjectPool": "2.1.1", + "Microsoft.Extensions.Options": "2.1.1", + "Microsoft.Net.Http.Headers": "2.1.1" + } + }, + "Microsoft.AspNetCore.Http.Abstractions": { + "type": "Transitive", + "resolved": "2.1.1", + "contentHash": "kQUEVOU4loc8CPSb2WoHFTESqwIa8Ik7ysCBfTwzHAd0moWovc9JQLmhDIHlYLjHbyexqZAlkq/FPRUZqokebw==", + "dependencies": { + "Microsoft.AspNetCore.Http.Features": "2.1.1", + "System.Text.Encodings.Web": "4.5.0" + } + }, + "Microsoft.AspNetCore.Http.Features": { + "type": "Transitive", + "resolved": "2.1.1", + "contentHash": "VklZ7hWgSvHBcDtwYYkdMdI/adlf7ebxTZ9kdzAhX+gUs5jSHE9mZlTamdgf9miSsxc1QjNazHXTDJdVPZKKTw==", + "dependencies": { + "Microsoft.Extensions.Primitives": "2.1.1" + } + }, + "Microsoft.AspNetCore.WebUtilities": { + "type": "Transitive", + "resolved": "2.1.1", + "contentHash": "PGKIZt4+412Z/XPoSjvYu/QIbTxcAQuEFNoA1Pw8a9mgmO0ZhNBmfaNyhgXFf7Rq62kP0tT/2WXpxdcQhkFUPA==", + "dependencies": { + "Microsoft.Net.Http.Headers": "2.1.1", + "System.Text.Encodings.Web": "4.5.0" + } + }, + "Microsoft.Bcl.AsyncInterfaces": { + "type": "Transitive", + "resolved": "5.0.0", + "contentHash": "W8DPQjkMScOMTtJbPwmPyj9c3zYSFGawDW3jwlBOOsnY+EzZFLgNQ/UMkK35JmkNOVPdCyPr2Tw7Vv9N+KA3ZQ==", + "dependencies": { + "System.Threading.Tasks.Extensions": "4.5.4" + } + }, + "Microsoft.CSharp": { + "type": "Transitive", + "resolved": "4.7.0", + "contentHash": "pTj+D3uJWyN3My70i2Hqo+OXixq3Os2D1nJ2x92FFo6sk8fYS1m1WLNTs0Dc1uPaViH0YvEEwvzddQ7y4rhXmA==" + }, + "Microsoft.Data.Sqlite.Core": { + "type": "Transitive", + "resolved": "7.0.5", + "contentHash": "FTerRmQPqHrCrnoUzhBu+E+1DNGwyrAMLqHkAqOOOu5pGfyMOj8qQUBxI/gDtWtG11p49UxSfWmBzRNlwZqfUg==", + "dependencies": { + "SQLitePCLRaw.core": "2.1.4" + } + }, + "Microsoft.Extensions.DependencyInjection.Abstractions": { + "type": "Transitive", + "resolved": "2.1.1", + "contentHash": "MgYpU5cwZohUMKKg3sbPhvGG+eAZ/59E9UwPwlrUkyXU+PGzqwZg9yyQNjhxuAWmoNoFReoemeCku50prYSGzA==" + }, + "Microsoft.Extensions.ObjectPool": { + "type": "Transitive", + "resolved": "2.1.1", + "contentHash": "SErON45qh4ogDp6lr6UvVmFYW0FERihW+IQ+2JyFv1PUyWktcJytFaWH5zarufJvZwhci7Rf1IyGXr9pVEadTw==" + }, + "Microsoft.Extensions.Options": { + "type": "Transitive", + "resolved": "2.1.1", + "contentHash": "V7lXCU78lAbzaulCGFKojcCyG8RTJicEbiBkPJjFqiqXwndEBBIehdXRMWEVU3UtzQ1yDvphiWUL9th6/4gJ7w==", + "dependencies": { + "Microsoft.Extensions.DependencyInjection.Abstractions": "2.1.1", + "Microsoft.Extensions.Primitives": "2.1.1" + } + }, + "Microsoft.Extensions.Primitives": { + "type": "Transitive", + "resolved": "2.1.1", + "contentHash": "scJ1GZNIxMmjpENh0UZ8XCQ6vzr/LzeF9WvEA51Ix2OQGAs9WPgPu8ABVUdvpKPLuor/t05gm6menJK3PwqOXg==", + "dependencies": { + "System.Memory": "4.5.1", + "System.Runtime.CompilerServices.Unsafe": "4.5.1" + } + }, + "Microsoft.Net.Http.Headers": { + "type": "Transitive", + "resolved": "2.1.1", + "contentHash": "lPNIphl8b2EuhOE9dMH6EZDmu7pS882O+HMi5BJNsigxHaWlBrYxZHFZgE18cyaPp6SSZcTkKkuzfjV/RRQKlA==", + "dependencies": { + "Microsoft.Extensions.Primitives": "2.1.1", + "System.Buffers": "4.5.0" + } + }, + "Microsoft.NETCore.Platforms": { + "type": "Transitive", + "resolved": "1.1.0", + "contentHash": "kz0PEW2lhqygehI/d6XsPCQzD7ff7gUJaVGPVETX611eadGsA3A877GdSlU0LRVMCTH/+P3o2iDTak+S08V2+A==" + }, + "Microsoft.NETCore.Targets": { + "type": "Transitive", + "resolved": "1.1.0", + "contentHash": "aOZA3BWfz9RXjpzt0sRJJMjAscAUm3Hoa4UWAfceV9UTYxgwZ1lZt5nO2myFf+/jetYQo4uTP7zS8sJY67BBxg==" + }, + "Serilog.Formatting.Compact": { + "type": "Transitive", + "resolved": "1.1.0", + "contentHash": "pNroKVjo+rDqlxNG5PXkRLpfSCuDOBY0ri6jp9PLe505ljqwhwZz8ospy2vWhQlFu5GkIesh3FcDs4n7sWZODA==", + "dependencies": { + "Serilog": "2.8.0" + } + }, + "Serilog.Sinks.File": { + "type": "Transitive", + "resolved": "5.0.0", + "contentHash": "uwV5hdhWPwUH1szhO8PJpFiahqXmzPzJT/sOijH/kFgUx+cyoDTMM8MHD0adw9+Iem6itoibbUXHYslzXsLEAg==", + "dependencies": { + "Serilog": "2.10.0" + } + }, + "Serilog.Sinks.PeriodicBatching": { + "type": "Transitive", + "resolved": "3.1.0", + "contentHash": "NDWR7m3PalVlGEq3rzoktrXikjFMLmpwF0HI4sowo8YDdU+gqPlTHlDQiOGxHfB0sTfjPA9JjA7ctKG9zqjGkw==", + "dependencies": { + "Serilog": "2.0.0" + } + }, + "SQLitePCLRaw.bundle_e_sqlite3": { + "type": "Transitive", + "resolved": "2.1.4", + "contentHash": "EWI1olKDjFEBMJu0+3wuxwziIAdWDVMYLhuZ3Qs84rrz+DHwD00RzWPZCa+bLnHCf3oJwuFZIRsHT5p236QXww==", + "dependencies": { + "SQLitePCLRaw.lib.e_sqlite3": "2.1.4", + "SQLitePCLRaw.provider.e_sqlite3": "2.1.4" + } + }, + "SQLitePCLRaw.core": { + "type": "Transitive", + "resolved": "2.1.4", + "contentHash": "inBjvSHo9UDKneGNzfUfDjK08JzlcIhn1+SP5Y3m6cgXpCxXKCJDy6Mka7LpgSV+UZmKSnC8rTwB0SQ0xKu5pA==", + "dependencies": { + "System.Memory": "4.5.3" + } + }, + "SQLitePCLRaw.lib.e_sqlite3": { + "type": "Transitive", + "resolved": "2.1.4", + "contentHash": "2C9Q9eX7CPLveJA0rIhf9RXAvu+7nWZu1A2MdG6SD/NOu26TakGgL1nsbc0JAspGijFOo3HoN79xrx8a368fBg==" + }, + "SQLitePCLRaw.provider.e_sqlite3": { + "type": "Transitive", + "resolved": "2.1.4", + "contentHash": "CSlb5dUp1FMIkez9Iv5EXzpeq7rHryVNqwJMWnpq87j9zWZexaEMdisDktMsnnrzKM6ahNrsTkjqNodTBPBxtQ==", + "dependencies": { + "SQLitePCLRaw.core": "2.1.4" + } + }, + "System.Buffers": { + "type": "Transitive", + "resolved": "4.5.1", + "contentHash": "Rw7ijyl1qqRS0YQD/WycNst8hUUMgrMH4FCn1nNm27M4VxchZ1js3fVjQaANHO5f3sN4isvP4a+Met9Y4YomAg==" + }, + "System.Collections.Immutable": { + "type": "Transitive", + "resolved": "5.0.0", + "contentHash": "FXkLXiK0sVVewcso0imKQoOxjoPAj42R8HtjjbSjVPAzwDfzoyoznWxgA3c38LDbN9SJux1xXoXYAhz98j7r2g==", + "dependencies": { + "System.Memory": "4.5.4" + } + }, + "System.Memory": { + "type": "Transitive", + "resolved": "4.5.4", + "contentHash": "1MbJTHS1lZ4bS4FmsJjnuGJOu88ZzTT2rLvrhW7Ygic+pC0NWA+3hgAen0HRdsocuQXCkUTdFn9yHJJhsijDXw==", + "dependencies": { + "System.Buffers": "4.5.1", + "System.Numerics.Vectors": "4.4.0", + "System.Runtime.CompilerServices.Unsafe": "4.5.3" + } + }, + "System.Numerics.Vectors": { + "type": "Transitive", + "resolved": "4.5.0", + "contentHash": "QQTlPTl06J/iiDbJCiepZ4H//BVraReU4O4EoRw1U02H5TLUIT7xn3GnDp9AXPSlJUDyFs4uWjWafNX6WrAojQ==" + }, + "System.Reactive": { + "type": "Transitive", + "resolved": "5.0.0", + "contentHash": "erBZjkQHWL9jpasCE/0qKAryzVBJFxGHVBAvgRN1bzM0q2s1S4oYREEEL0Vb+1kA/6BKb5FjUZMp5VXmy+gzkQ==", + "dependencies": { + "System.Runtime.InteropServices.WindowsRuntime": "4.3.0", + "System.Threading.Tasks.Extensions": "4.5.4" + } + }, + "System.Reflection.Metadata": { + "type": "Transitive", + "resolved": "5.0.0", + "contentHash": "5NecZgXktdGg34rh1OenY1rFNDCI8xSjFr+Z4OU4cU06AQHUdRnIIEeWENu3Wl4YowbzkymAIMvi3WyK9U53pQ==", + "dependencies": { + "System.Collections.Immutable": "5.0.0" + } + }, + "System.Reflection.TypeExtensions": { + "type": "Transitive", + "resolved": "4.7.0", + "contentHash": "VybpaOQQhqE6siHppMktjfGBw1GCwvCqiufqmP8F1nj7fTUNtW35LOEt3UZTEsECfo+ELAl/9o9nJx3U91i7vA==" + }, + "System.Runtime": { + "type": "Transitive", + "resolved": "4.3.0", + "contentHash": "JufQi0vPQ0xGnAczR13AUFglDyVYt4Kqnz1AZaiKZ5+GICq0/1MH/mO/eAJHt/mHW1zjKBJd7kV26SrxddAhiw==", + "dependencies": { + "Microsoft.NETCore.Platforms": "1.1.0", + "Microsoft.NETCore.Targets": "1.1.0" + } + }, + "System.Runtime.CompilerServices.Unsafe": { + "type": "Transitive", + "resolved": "5.0.0", + "contentHash": "ZD9TMpsmYJLrxbbmdvhwt9YEgG5WntEnZ/d1eH8JBX9LBp+Ju8BSBhUGbZMNVHHomWo2KVImJhTDl2hIgw/6MA==" + }, + "System.Runtime.InteropServices.WindowsRuntime": { + "type": "Transitive", + "resolved": "4.3.0", + "contentHash": "J4GUi3xZQLUBasNwZnjrffN8i5wpHrBtZoLG+OhRyGo/+YunMRWWtwoMDlUAIdmX0uRfpHIBDSV6zyr3yf00TA==", + "dependencies": { + "System.Runtime": "4.3.0" + } + }, + "System.Text.Encodings.Web": { + "type": "Transitive", + "resolved": "5.0.1", + "contentHash": "KmJ+CJXizDofbq6mpqDoRRLcxgOd2z9X3XoFNULSbvbqVRZkFX3istvr+MUjL6Zw1RT+RNdoI4GYidIINtgvqQ==", + "dependencies": { + "System.Buffers": "4.5.1", + "System.Memory": "4.5.4" + } + }, + "System.Text.Json": { + "type": "Transitive", + "resolved": "5.0.2", + "contentHash": "I47dVIGiV6SfAyppphxqupertT/5oZkYLDCX6vC3HpOI4ZLjyoKAreUoem2ie6G0RbRuFrlqz/PcTQjfb2DOfQ==", + "dependencies": { + "Microsoft.Bcl.AsyncInterfaces": "5.0.0", + "System.Buffers": "4.5.1", + "System.Memory": "4.5.4", + "System.Numerics.Vectors": "4.5.0", + "System.Runtime.CompilerServices.Unsafe": "5.0.0", + "System.Text.Encodings.Web": "5.0.1", + "System.Threading.Tasks.Extensions": "4.5.4" + } + }, + "System.Threading.Tasks.Extensions": { + "type": "Transitive", + "resolved": "4.5.4", + "contentHash": "zteT+G8xuGu6mS+mzDzYXbzS7rd3K6Fjb9RiZlYlJPam2/hU7JCBZBVEcywNuR+oZ1ncTvc/cq0faRr3P01OVg==", + "dependencies": { + "System.Runtime.CompilerServices.Unsafe": "4.5.3" + } + }, + "speckle.core": { + "type": "Project", + "dependencies": { + "GraphQL.Client": "[6.0.0, )", + "Microsoft.CSharp": "[4.7.0, )", + "Microsoft.Data.Sqlite": "[7.0.5, )", + "Polly": "[7.2.3, )", + "Polly.Contrib.WaitAndRetry": "[1.1.1, )", + "Polly.Extensions.Http": "[3.0.0, )", + "Sentry": "[3.33.0, )", + "Sentry.Serilog": "[3.33.0, )", + "Serilog": "[2.12.0, )", + "Serilog.Enrichers.ClientInfo": "[1.3.0, )", + "Serilog.Exceptions": "[8.4.0, )", + "Serilog.Sinks.Console": "[4.1.0, )", + "Serilog.Sinks.Seq": "[5.2.2, )", + "SerilogTimings": "[3.0.1, )", + "Speckle.Newtonsoft.Json": "[13.0.2, )", + "System.DoubleNumerics": "[3.1.3, )" + } + }, + "GraphQL.Client": { + "type": "CentralTransitive", + "requested": "[6.0.0, )", + "resolved": "6.0.0", + "contentHash": "8yPNBbuVBpTptivyAlak4GZvbwbUcjeQTL4vN1HKHRuOykZ4r7l5fcLS6vpyPyLn0x8FsL31xbOIKyxbmR9rbA==", + "dependencies": { + "GraphQL.Client.Abstractions": "6.0.0", + "GraphQL.Client.Abstractions.Websocket": "6.0.0", + "System.Reactive": "5.0.0" + } + }, + "Microsoft.Data.Sqlite": { + "type": "CentralTransitive", + "requested": "[7.0.5, )", + "resolved": "7.0.5", + "contentHash": "KGxbPeWsQMnmQy43DSBxAFtHz3l2JX8EWBSGUCvT3CuZ8KsuzbkqMIJMDOxWtG8eZSoCDI04aiVQjWuuV8HmSw==", + "dependencies": { + "Microsoft.Data.Sqlite.Core": "7.0.5", + "SQLitePCLRaw.bundle_e_sqlite3": "2.1.4" + } + }, + "Polly": { + "type": "CentralTransitive", + "requested": "[7.2.3, )", + "resolved": "7.2.3", + "contentHash": "DeCY0OFbNdNxsjntr1gTXHJ5pKUwYzp04Er2LLeN3g6pWhffsGuKVfMBLe1lw7x76HrPkLxKEFxBlpRxS2nDEQ==" + }, + "Polly.Contrib.WaitAndRetry": { + "type": "CentralTransitive", + "requested": "[1.1.1, )", + "resolved": "1.1.1", + "contentHash": "1MUQLiSo4KDkQe6nzQRhIU05lm9jlexX5BVsbuw0SL82ynZ+GzAHQxJVDPVBboxV37Po3SG077aX8DuSy8TkaA==" + }, + "Polly.Extensions.Http": { + "type": "CentralTransitive", + "requested": "[3.0.0, )", + "resolved": "3.0.0", + "contentHash": "drrG+hB3pYFY7w1c3BD+lSGYvH2oIclH8GRSehgfyP5kjnFnHKQuuBhuHLv+PWyFuaTDyk/vfRpnxOzd11+J8g==", + "dependencies": { + "Polly": "7.1.0" + } + }, + "Sentry": { + "type": "CentralTransitive", + "requested": "[3.33.0, )", + "resolved": "3.33.0", + "contentHash": "8vbD2o6IR2wrRrkSiRbnodWGWUOqIlwYtzpjvPNOb5raJdOf+zxMwfS8f6nx9bmrTTfDj7KrCB8C/5OuicAc8A==", + "dependencies": { + "System.Reflection.Metadata": "5.0.0", + "System.Text.Json": "5.0.2" + } + }, + "Sentry.Serilog": { + "type": "CentralTransitive", + "requested": "[3.33.0, )", + "resolved": "3.33.0", + "contentHash": "V8BU7QGWg2qLYfNPqtuTBhC1opysny5l+Ifp6J6PhOeAxU0FssR7nYfbJVetrnLIoh2rd3DlJ6hHYYQosQYcUQ==", + "dependencies": { + "Sentry": "3.33.0", + "Serilog": "2.7.1" + } + }, + "Serilog": { + "type": "CentralTransitive", + "requested": "[2.12.0, )", + "resolved": "2.12.0", + "contentHash": "xaiJLIdu6rYMKfQMYUZgTy8YK7SMZjB4Yk50C/u//Z4OsvxkUfSPJy4nknfvwAC34yr13q7kcyh4grbwhSxyZg==" + }, + "Serilog.Enrichers.ClientInfo": { + "type": "CentralTransitive", + "requested": "[1.3.0, )", + "resolved": "1.3.0", + "contentHash": "mTc7PM+wC9Hr7LWSwqt5mmnlAr7RJs+eTb3PGPRhwdOackk95MkhUZognuxXEdlW19HAFNmEBTSBY5DfLwM8jQ==", + "dependencies": { + "Microsoft.AspNetCore.Http": "2.1.1", + "Serilog": "2.7.1" + } + }, + "Serilog.Exceptions": { + "type": "CentralTransitive", + "requested": "[8.4.0, )", + "resolved": "8.4.0", + "contentHash": "nc/+hUw3lsdo0zCj0KMIybAu7perMx79vu72w0za9Nsi6mWyNkGXxYxakAjWB7nEmYL6zdmhEQRB4oJ2ALUeug==", + "dependencies": { + "Serilog": "2.8.0", + "System.Reflection.TypeExtensions": "4.7.0" + } + }, + "Serilog.Sinks.Console": { + "type": "CentralTransitive", + "requested": "[4.1.0, )", + "resolved": "4.1.0", + "contentHash": "K6N5q+5fetjnJPvCmkWOpJ/V8IEIoMIB1s86OzBrbxwTyHxdx3pmz4H+8+O/Dc/ftUX12DM1aynx/dDowkwzqg==", + "dependencies": { + "Serilog": "2.10.0" + } + }, + "Serilog.Sinks.Seq": { + "type": "CentralTransitive", + "requested": "[5.2.2, )", + "resolved": "5.2.2", + "contentHash": "1Csmo5ua7NKUe0yXUx+zsRefjAniPWcXFhUXxXG8pwo0iMiw2gjn9SOkgYnnxbgWqmlGv236w0N/dHc2v5XwMg==", + "dependencies": { + "Serilog": "2.12.0", + "Serilog.Formatting.Compact": "1.1.0", + "Serilog.Sinks.File": "5.0.0", + "Serilog.Sinks.PeriodicBatching": "3.1.0" + } + }, + "SerilogTimings": { + "type": "CentralTransitive", + "requested": "[3.0.1, )", + "resolved": "3.0.1", + "contentHash": "Zs28eTgszAMwpIrbBnWHBI50yuxL50p/dmAUWmy75+axdZYK/Sjm5/5m1N/CisR8acJUhTVcjPZrsB1P5iv0Uw==", + "dependencies": { + "Serilog": "2.10.0" + } + }, + "Speckle.Newtonsoft.Json": { + "type": "CentralTransitive", + "requested": "[13.0.2, )", + "resolved": "13.0.2", + "contentHash": "g1BejUZwax5PRfL6xHgLEK23sqHWOgOj9hE7RvfRRlN00AGt8GnPYt8HedSK7UB3HiRW8zCA9Pn0iiYxCK24BA==" + }, + "System.DoubleNumerics": { + "type": "CentralTransitive", + "requested": "[3.1.3, )", + "resolved": "3.1.3", + "contentHash": "KRKEM/L3KBodjA9VOg3EifFVWUY6EOqaMB05UvPEDm7Zeby/kZW+4kdWUEPzW6xtkwf46p661L9NrbeeQhtLzw==", + "dependencies": { + "NETStandard.Library": "1.6.1" + } + } + } + } +} \ No newline at end of file diff --git a/src/Speckle.Core/Api/Exceptions.cs b/src/Speckle.Core/Api/Exceptions.cs new file mode 100644 index 00000000..7d10bd99 --- /dev/null +++ b/src/Speckle.Core/Api/Exceptions.cs @@ -0,0 +1,104 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using GraphQL; +using Speckle.Core.Logging; + +namespace Speckle.Core.Api; + +/// +/// Base class for GraphQL API exceptions +/// +public class SpeckleGraphQLException : SpeckleException +{ + private readonly GraphQLRequest _request; + public GraphQLResponse? Response { get; } + + public SpeckleGraphQLException(string message, GraphQLRequest request, GraphQLResponse? response) + : base(message) + { + _request = request; + Response = response; + } + + public SpeckleGraphQLException(string message, Exception inner, GraphQLRequest request, GraphQLResponse? response) + : this(message, inner) + { + _request = request; + Response = response; + } + + public SpeckleGraphQLException() { } + + public SpeckleGraphQLException(string message) + : base(message) { } + + public SpeckleGraphQLException(string message, Exception innerException) + : base(message, innerException) { } + + public IEnumerable ErrorMessages => + Response?.Errors != null ? Response.Errors.Select(e => e.Message) : Enumerable.Empty(); + + public IDictionary? Extensions => Response?.Extensions; +} + +public class SpeckleGraphQLException : SpeckleGraphQLException +{ + public SpeckleGraphQLException(string message, GraphQLRequest request, GraphQLResponse? response) + : base(message, request, response) { } + + public SpeckleGraphQLException() { } + + public SpeckleGraphQLException(string message) + : base(message) { } + + public SpeckleGraphQLException(string message, Exception innerException) + : base(message, innerException) { } +} + +/// +/// Represents a "FORBIDDEN" on "UNAUTHORIZED" GraphQL error as an exception. +/// https://www.apollographql.com/docs/apollo-server/v2/data/errors/#unauthenticated +/// https://www.apollographql.com/docs/apollo-server/v2/data/errors/#forbidden +/// +public class SpeckleGraphQLForbiddenException : SpeckleGraphQLException +{ + public SpeckleGraphQLForbiddenException(GraphQLRequest request, GraphQLResponse response) + : base("Your request was forbidden", request, response) { } + + public SpeckleGraphQLForbiddenException() { } + + public SpeckleGraphQLForbiddenException(string message) + : base(message) { } + + public SpeckleGraphQLForbiddenException(string message, Exception innerException) + : base(message, innerException) { } +} + +public class SpeckleGraphQLInternalErrorException : SpeckleGraphQLException +{ + public SpeckleGraphQLInternalErrorException(GraphQLRequest request, GraphQLResponse response) + : base("Your request failed on the server side", request, response) { } + + public SpeckleGraphQLInternalErrorException() { } + + public SpeckleGraphQLInternalErrorException(string message) + : base(message) { } + + public SpeckleGraphQLInternalErrorException(string message, Exception innerException) + : base(message, innerException) { } +} + +public class SpeckleGraphQLStreamNotFoundException : SpeckleGraphQLException +{ + public SpeckleGraphQLStreamNotFoundException(GraphQLRequest request, GraphQLResponse response) + : base("Stream not found", request, response) { } + + public SpeckleGraphQLStreamNotFoundException() { } + + public SpeckleGraphQLStreamNotFoundException(string message) + : base(message) { } + + public SpeckleGraphQLStreamNotFoundException(string message, Exception innerException) + : base(message, innerException) { } +} diff --git a/src/Speckle.Core/Api/GraphQL/.editorconfig b/src/Speckle.Core/Api/GraphQL/.editorconfig new file mode 100644 index 00000000..4a82f788 --- /dev/null +++ b/src/Speckle.Core/Api/GraphQL/.editorconfig @@ -0,0 +1,10 @@ + +[*.{cs,vb}] + +# Name properties with camelCase +dotnet_naming_rule.properties_should_be_camel_case.severity = none +dotnet_naming_rule.properties_should_be_camel_case.symbols = properties +dotnet_naming_rule.properties_should_be_camel_case.style = property_style + +dotnet_naming_symbols.properties.applicable_kinds = property +dotnet_naming_style.property_style.capitalization = pascal_case \ No newline at end of file diff --git a/src/Speckle.Core/Api/GraphQL/Client.GraphqlCleintOperations/Client.ActivityOperations.cs b/src/Speckle.Core/Api/GraphQL/Client.GraphqlCleintOperations/Client.ActivityOperations.cs new file mode 100644 index 00000000..34250ecb --- /dev/null +++ b/src/Speckle.Core/Api/GraphQL/Client.GraphqlCleintOperations/Client.ActivityOperations.cs @@ -0,0 +1,67 @@ +using System; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using GraphQL; + +namespace Speckle.Core.Api; + +public partial class Client +{ + /// + /// Gets the activity of a stream + /// + /// Id of the stream to get the activity from + /// Only show activity after this DateTime + /// Only show activity before this DateTime + /// Time to filter the activity with + /// Time to filter the activity with + /// Max number of activity items to get + /// + /// + public async Task> StreamGetActivity( + string id, + DateTime? after = null, + DateTime? before = null, + DateTime? cursor = null, + string actionType = "", + int limit = 25, + CancellationToken cancellationToken = default + ) + { + var request = new GraphQLRequest + { + Query = + @"query Stream($id: String!, $before: DateTime,$after: DateTime, $cursor: DateTime, $activity: String, $limit: Int!) { + stream(id: $id) { + activity (actionType: $activity, after: $after, before: $before, cursor: $cursor, limit: $limit) { + totalCount + cursor + items { + actionType + userId + streamId + resourceId + resourceType + time + info + message + } + } + } + }", + Variables = new + { + id, + limit, + actionType, + after, + before, + cursor + } + }; + + var res = await ExecuteGraphQLRequest(request, cancellationToken).ConfigureAwait(false); + return res.stream.activity.items; + } +} diff --git a/src/Speckle.Core/Api/GraphQL/Client.GraphqlCleintOperations/Client.BranchOperations.cs b/src/Speckle.Core/Api/GraphQL/Client.GraphqlCleintOperations/Client.BranchOperations.cs new file mode 100644 index 00000000..1c3a67b1 --- /dev/null +++ b/src/Speckle.Core/Api/GraphQL/Client.GraphqlCleintOperations/Client.BranchOperations.cs @@ -0,0 +1,222 @@ +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using GraphQL; + +namespace Speckle.Core.Api; + +public partial class Client +{ + /// + /// Get branches from a given stream, first with a max of 500 and then with a max of 100. + /// This ensures that if the server API is limiting to 100 branches, that any failure will try again at the lower value. + /// + /// Id of the stream to get the branches from + /// Max number of commits to retrieve + /// + public async Task> StreamGetBranchesWithLimitRetry(string streamId, int commitsLimit = 10) + { + List branches; + try + { + branches = await StreamGetBranches(streamId, ServerLimits.BRANCH_GET_LIMIT, commitsLimit).ConfigureAwait(true); + } + catch (SpeckleGraphQLException) + { + branches = await StreamGetBranches(streamId, ServerLimits.OLD_BRANCH_GET_LIMIT, commitsLimit) + .ConfigureAwait(true); + } + + return branches; + } + + /// + /// Get branches from a given stream + /// + /// Id of the stream to get the branches from + /// Max number of branches to retrieve + /// Max number of commits to retrieve + /// + /// + public async Task> StreamGetBranches( + string streamId, + int branchesLimit = 10, + int commitsLimit = 10, + CancellationToken cancellationToken = default + ) + { + var request = new GraphQLRequest + { + Query = + $@"query Stream ($streamId: String!) {{ + stream(id: $streamId) {{ + branches(limit: {branchesLimit}) {{ + items {{ + id + name + description + commits (limit: {commitsLimit}) {{ + totalCount + cursor + items {{ + id + referencedObject + sourceApplication + message + authorName + authorId + branchName + parents + createdAt + }} + }} + }} + }} + }} + }}", + Variables = new { streamId } + }; + var res = await ExecuteGraphQLRequest(request, cancellationToken).ConfigureAwait(false); + return res.stream.branches.items; + } + + /// + /// Creates a branch on a stream. + /// + /// + /// + /// The branch id. + public async Task BranchCreate(BranchCreateInput branchInput, CancellationToken cancellationToken = default) + { + var request = new GraphQLRequest + { + Query = @"mutation branchCreate($myBranch: BranchCreateInput!){ branchCreate(branch: $myBranch)}", + Variables = new { myBranch = branchInput } + }; + + var res = await ExecuteGraphQLRequest>(request, cancellationToken).ConfigureAwait(false); + return (string)res["branchCreate"]; + } + + /// + /// Gets a given branch from a stream. + /// + /// Id of the stream to get the branch from + /// Name of the branch to get + /// + /// The requested branch + public async Task BranchGet( + string streamId, + string branchName, + int commitsLimit = 10, + CancellationToken cancellationToken = default + ) + { + var request = new GraphQLRequest + { + Query = + $@"query Stream($streamId: String!, $branchName: String!) {{ + stream(id: $streamId) {{ + branch(name: $branchName){{ + id, + name, + description, + commits (limit: {commitsLimit}) {{ + totalCount, + cursor, + items {{ + id, + referencedObject, + sourceApplication, + totalChildrenCount, + message, + authorName, + authorId, + branchName, + parents, + createdAt + }} + }} + }} + }} + }}", + Variables = new { streamId, branchName } + }; + + var res = await ExecuteGraphQLRequest(request, cancellationToken).ConfigureAwait(false); + return res.stream.branch; + } + + /// + /// Gets a given model from a project. + /// + /// + /// Id of the project to get the model from + /// Id of the model + /// + public async Task ModelGet(string projectId, string modelId, CancellationToken cancellationToken = default) + { + var request = new GraphQLRequest + { + Query = + $@"query ProjectModel($projectId: String!, $modelId: String!) {{ + project(id: $projectId) {{ + model(id: $modelId){{ + id, + name, + description + }} + }} + }}", + Variables = new { projectId, modelId } + }; + + var res = await ExecuteGraphQLRequest>>>( + request, + cancellationToken + ) + .ConfigureAwait(false); + var branch = new Branch + { + description = res["project"]["model"]["description"], + id = res["project"]["model"]["id"], + name = res["project"]["model"]["name"] + }; + return branch; + } + + /// + /// Updates a branch. + /// + /// + /// The stream's id. + public async Task BranchUpdate(BranchUpdateInput branchInput, CancellationToken cancellationToken = default) + { + var request = new GraphQLRequest + { + Query = @"mutation branchUpdate($myBranch: BranchUpdateInput!){ branchUpdate(branch: $myBranch)}", + Variables = new { myBranch = branchInput } + }; + + var res = await ExecuteGraphQLRequest>(request, cancellationToken).ConfigureAwait(false); + return (bool)res["branchUpdate"]; + } + + /// + /// Deletes a stream. + /// + /// + /// + /// + public async Task BranchDelete(BranchDeleteInput branchInput, CancellationToken cancellationToken = default) + { + var request = new GraphQLRequest + { + Query = @"mutation branchDelete($myBranch: BranchDeleteInput!){ branchDelete(branch: $myBranch)}", + Variables = new { myBranch = branchInput } + }; + + var res = await ExecuteGraphQLRequest>(request, cancellationToken).ConfigureAwait(false); + return (bool)res["branchDelete"]; + } +} diff --git a/src/Speckle.Core/Api/GraphQL/Client.GraphqlCleintOperations/Client.CommentOperations.cs b/src/Speckle.Core/Api/GraphQL/Client.GraphqlCleintOperations/Client.CommentOperations.cs new file mode 100644 index 00000000..20feaaf3 --- /dev/null +++ b/src/Speckle.Core/Api/GraphQL/Client.GraphqlCleintOperations/Client.CommentOperations.cs @@ -0,0 +1,103 @@ +using System.Threading; +using System.Threading.Tasks; +using GraphQL; + +namespace Speckle.Core.Api; + +public partial class Client +{ + /// + /// Gets the comments on a Stream + /// + /// Id of the stream to get the comments from + /// The number of comments to get + /// Time to filter the comments with + /// + /// + public async Task StreamGetComments( + string streamId, + int limit = 25, + string? cursor = null, + CancellationToken cancellationToken = default + ) + { + var request = new GraphQLRequest + { + Query = + @"query Comments($streamId: String!, $cursor: String, $limit: Int!) { + comments(streamId: $streamId, cursor: $cursor, limit: $limit) { + totalCount + cursor + items { + id + authorId + archived + rawText + data + createdAt + updatedAt + viewedAt + reactions + resources { + resourceId + resourceType + } + replies { + totalCount + cursor + items { + id + authorId + archived + rawText + data + createdAt + updatedAt + viewedAt + } + } + } + } + }", + Variables = new + { + streamId, + cursor, + limit + } + }; + + var res = await ExecuteGraphQLRequest(request, cancellationToken).ConfigureAwait(false); + return res.comments; + } + + /// + /// Gets the screenshot of a Comment + /// + /// Id of the comment + /// Id of the stream to get the comment from + /// + /// + public async Task StreamGetCommentScreenshot( + string id, + string streamId, + CancellationToken cancellationToken = default + ) + { + var request = new GraphQLRequest + { + Query = + @"query Comment($id: String!, $streamId: String!) { + comment(id: $id, streamId: $streamId) { + id + screenshot + } + } + ", + Variables = new { id, streamId } + }; + + var res = await ExecuteGraphQLRequest(request, cancellationToken).ConfigureAwait(false); + return res.comment.screenshot; + } +} diff --git a/src/Speckle.Core/Api/GraphQL/Client.GraphqlCleintOperations/Client.CommitOperations.cs b/src/Speckle.Core/Api/GraphQL/Client.GraphqlCleintOperations/Client.CommitOperations.cs new file mode 100644 index 00000000..d76bc430 --- /dev/null +++ b/src/Speckle.Core/Api/GraphQL/Client.GraphqlCleintOperations/Client.CommitOperations.cs @@ -0,0 +1,161 @@ +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using GraphQL; + +namespace Speckle.Core.Api; + +public partial class Client +{ + /// + /// Gets a given commit from a stream. + /// + /// Id of the stream to get the commit from + /// Id of the commit to get + /// + /// + public async Task CommitGet(string streamId, string commitId, CancellationToken cancellationToken = default) + { + var request = new GraphQLRequest + { + Query = + @"query Stream($streamId: String!, $commitId: String!) { + stream(id: $streamId) { + commit(id: $commitId){ + id, + message, + sourceApplication, + totalChildrenCount, + referencedObject, + branchName, + createdAt, + parents, + authorName + } + } + }", + Variables = new { streamId, commitId } + }; + + var res = await ExecuteGraphQLRequest(request, cancellationToken).ConfigureAwait(false); + return res.stream.commit; + } + + /// + /// Gets the latest commits from a stream + /// + /// Id of the stream to get the commits from + /// Max number of commits to get + /// + /// The requested commits + public async Task> StreamGetCommits( + string streamId, + int limit = 10, + CancellationToken cancellationToken = default + ) + { + var request = new GraphQLRequest + { + Query = + @"query Stream($streamId: String!, $limit: Int!) { + stream(id: $streamId) { + commits(limit: $limit) { + items { + id, + message, + branchName, + sourceApplication, + totalChildrenCount, + referencedObject, + createdAt, + parents, + authorName, + authorId, + authorAvatar + } + } + } + }", + Variables = new { streamId, limit } + }; + + var res = await ExecuteGraphQLRequest(request, cancellationToken).ConfigureAwait(false); + return res.stream.commits.items; + } + + /// + /// Creates a commit on a branch. + /// + /// + /// The commit id. + public async Task CommitCreate(CommitCreateInput commitInput, CancellationToken cancellationToken = default) + { + var request = new GraphQLRequest + { + Query = @"mutation commitCreate($myCommit: CommitCreateInput!){ commitCreate(commit: $myCommit)}", + Variables = new { myCommit = commitInput } + }; + + var res = await ExecuteGraphQLRequest>(request, cancellationToken).ConfigureAwait(false); + return (string)res["commitCreate"]; + } + + /// + /// Updates a commit. + /// + /// + /// + /// The stream's id. + public async Task CommitUpdate(CommitUpdateInput commitInput, CancellationToken cancellationToken = default) + { + var request = new GraphQLRequest + { + Query = @"mutation commitUpdate($myCommit: CommitUpdateInput!){ commitUpdate(commit: $myCommit)}", + Variables = new { myCommit = commitInput } + }; + + var res = await ExecuteGraphQLRequest>(request, cancellationToken).ConfigureAwait(false); + return (bool)res["commitUpdate"]; + } + + /// + /// Deletes a commit. + /// + /// + /// + /// + public async Task CommitDelete(CommitDeleteInput commitInput, CancellationToken cancellationToken = default) + { + var request = new GraphQLRequest + { + Query = @"mutation commitDelete($myCommit: CommitDeleteInput!){ commitDelete(commit: $myCommit)}", + Variables = new { myCommit = commitInput } + }; + + var res = await ExecuteGraphQLRequest>(request, cancellationToken).ConfigureAwait(false); + return (bool)res["commitDelete"]; + } + + /// + /// Sends a commitReceived mutation, affirming a commit has been received. + /// + /// Used for read receipts + /// + /// + /// + public async Task CommitReceived( + CommitReceivedInput commitReceivedInput, + CancellationToken cancellationToken = default + ) + { + var request = new GraphQLRequest + { + Query = @"mutation($myInput:CommitReceivedInput!){ commitReceive(input:$myInput) }", + Variables = new { myInput = commitReceivedInput } + }; + + var res = await ExecuteGraphQLRequest>(request, cancellationToken).ConfigureAwait(false); + + return (bool)res["commitReceive"]; + } +} diff --git a/src/Speckle.Core/Api/GraphQL/Client.GraphqlCleintOperations/Client.ObjectOperations.cs b/src/Speckle.Core/Api/GraphQL/Client.GraphqlCleintOperations/Client.ObjectOperations.cs new file mode 100644 index 00000000..79e9f8b9 --- /dev/null +++ b/src/Speckle.Core/Api/GraphQL/Client.GraphqlCleintOperations/Client.ObjectOperations.cs @@ -0,0 +1,71 @@ +using System.Threading; +using System.Threading.Tasks; +using GraphQL; + +namespace Speckle.Core.Api; + +public partial class Client +{ + /// + /// Gets data about the requested Speckle object from a stream. + /// + /// Id of the stream to get the object from + /// Id of the object to get + /// + /// + public async Task ObjectGet( + string streamId, + string objectId, + CancellationToken cancellationToken = default + ) + { + var request = new GraphQLRequest + { + Query = + @"query Stream($streamId: String!, $objectId: String!) { + stream(id: $streamId) { + object(id: $objectId){ + id + applicationId + createdAt + totalChildrenCount + } + } + }", + Variables = new { streamId, objectId } + }; + + var res = await ExecuteGraphQLRequest(request, cancellationToken).ConfigureAwait(false); + return res.stream.@object; + } + + /// + /// Gets a given object from a stream. + /// + /// + /// + /// + /// + public async Task ObjectCountGet( + string streamId, + string objectId, + CancellationToken cancellationToken = default + ) + { + var request = new GraphQLRequest + { + Query = + @"query Stream($streamId: String!, $objectId: String!) { + stream(id: $streamId) { + object(id: $objectId){ + totalChildrenCount + } + } + }", + Variables = new { streamId, objectId } + }; + + var res = await ExecuteGraphQLRequest(request, cancellationToken).ConfigureAwait(false); + return res.stream.@object; + } +} diff --git a/src/Speckle.Core/Api/GraphQL/Client.GraphqlCleintOperations/Client.ObsoleteOperations.cs b/src/Speckle.Core/Api/GraphQL/Client.GraphqlCleintOperations/Client.ObsoleteOperations.cs new file mode 100644 index 00000000..15d0054f --- /dev/null +++ b/src/Speckle.Core/Api/GraphQL/Client.GraphqlCleintOperations/Client.ObsoleteOperations.cs @@ -0,0 +1,250 @@ +using System; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Threading; +using System.Threading.Tasks; +using GraphQL; + +namespace Speckle.Core.Api; + +[SuppressMessage("Design", "CA1068:CancellationToken parameters must come last")] +public partial class Client +{ + #region Stream Grant Permission + + /// + /// Grants permissions to a user on a given stream. + /// + /// + /// + [Obsolete("Please use the `StreamUpdatePermission` method", true)] + public Task StreamGrantPermission(StreamPermissionInput permissionInput) + { + return StreamGrantPermission(CancellationToken.None, permissionInput); + } + + /// + /// Grants permissions to a user on a given stream. + /// + /// + /// + /// + [Obsolete("Please use the `StreamUpdatePermission` method", true)] + public async Task StreamGrantPermission( + CancellationToken cancellationToken, + StreamPermissionInput permissionInput + ) + { + var request = new GraphQLRequest + { + Query = + @" + mutation streamGrantPermission($permissionParams: StreamGrantPermissionInput!) { + streamGrantPermission(permissionParams:$permissionParams) + }", + Variables = new { permissionParams = permissionInput } + }; + + var res = await ExecuteGraphQLRequest>(request, cancellationToken).ConfigureAwait(false); + return (bool)res["streamGrantPermission"]; + } + + #endregion + + #region Cancellation token as last param + + [Obsolete("Use overload with cancellation token parameter last")] + public Task> StreamGetActivity( + CancellationToken cancellationToken, + string id, + DateTime? after = null, + DateTime? before = null, + DateTime? cursor = null, + string actionType = "", + int limit = 25 + ) + { + return StreamGetActivity(id, after, before, cursor, actionType, limit, cancellationToken); + } + + [Obsolete("Use overload with cancellation token parameter last")] + public Task> StreamGetBranches( + CancellationToken cancellationToken, + string streamId, + int branchesLimit = 10, + int commitsLimit = 10 + ) + { + return StreamGetBranches(streamId, branchesLimit, commitsLimit, CancellationToken.None); + } + + [Obsolete("Use overload with cancellation token parameter last")] + public Task BranchCreate(CancellationToken cancellationToken, BranchCreateInput branchInput) + { + return BranchCreate(branchInput, cancellationToken); + } + + [Obsolete("Use overload with cancellation token parameter last")] + public Task BranchGet( + CancellationToken cancellationToken, + string streamId, + string branchName, + int commitsLimit = 10 + ) + { + return BranchGet(streamId, branchName, commitsLimit, cancellationToken); + } + + [Obsolete("Use overload with cancellation token parameter last")] + public Task BranchUpdate(CancellationToken cancellationToken, BranchUpdateInput branchInput) + { + return BranchUpdate(branchInput, cancellationToken); + } + + [Obsolete("Use overload with cancellation token parameter last")] + public Task BranchDelete(CancellationToken cancellationToken, BranchDeleteInput branchInput) + { + return BranchDelete(branchInput, cancellationToken); + } + + [Obsolete("Use overload with cancellation token parameter last")] + public Task StreamGetComments( + CancellationToken cancellationToken, + string streamId, + int limit = 25, + string? cursor = null + ) + { + return StreamGetComments(streamId, limit, cursor, cancellationToken); + } + + [Obsolete("Use overload with cancellation token parameter last")] + public Task StreamGetCommentScreenshot(CancellationToken cancellationToken, string id, string streamId) + { + return StreamGetCommentScreenshot(id, streamId, cancellationToken); + } + + [Obsolete("Use overload with cancellation token parameter last")] + public Task CommitGet(CancellationToken cancellationToken, string streamId, string commitId) + { + return CommitGet(streamId, commitId, cancellationToken); + } + + [Obsolete("Use overload with cancellation token parameter last")] + public Task> StreamGetCommits(CancellationToken cancellationToken, string streamId, int limit = 10) + { + return StreamGetCommits(streamId, limit, cancellationToken); + } + + [Obsolete("Use overload with cancellation token parameter last")] + public Task CommitCreate(CancellationToken cancellationToken, CommitCreateInput commitInput) + { + return CommitCreate(commitInput, cancellationToken); + } + + [Obsolete("Use overload with cancellation token parameter last")] + public Task CommitUpdate(CancellationToken cancellationToken, CommitUpdateInput commitInput) + { + return CommitUpdate(commitInput, cancellationToken); + } + + [Obsolete("Use overload with cancellation token parameter last")] + public Task CommitDelete(CancellationToken cancellationToken, CommitDeleteInput commitInput) + { + return CommitDelete(commitInput, cancellationToken); + } + + [Obsolete("Use overload with cancellation token parameter last")] + public Task CommitReceived(CancellationToken cancellationToken, CommitReceivedInput commitReceivedInput) + { + return CommitReceived(commitReceivedInput, cancellationToken); + } + + [Obsolete("Use overload with cancellation token parameter last")] + public Task ObjectGet(CancellationToken cancellationToken, string streamId, string objectId) + { + return ObjectGet(streamId, objectId, cancellationToken); + } + + [Obsolete("Use overload with cancellation token parameter last")] + public Task ObjectCountGet(CancellationToken cancellationToken, string streamId, string objectId) + { + return ObjectCountGet(streamId, objectId, cancellationToken); + } + + [Obsolete("Use overload with cancellation token parameter last")] + public Task StreamGet(CancellationToken cancellationToken, string id, int branchesLimit = 10) + { + return StreamGet(id, branchesLimit, cancellationToken); + } + + [Obsolete("Use overload with cancellation token parameter last")] + public Task> StreamsGet(CancellationToken cancellationToken, int limit = 10) + { + return StreamsGet(limit, cancellationToken); + } + + [Obsolete("Use overload with cancellation token parameter last")] + public Task> FavoriteStreamsGet(CancellationToken cancellationToken, int limit = 10) + { + return FavoriteStreamsGet(limit, cancellationToken); + } + + [Obsolete("Use overload with cancellation token parameter last")] + public Task> StreamSearch(CancellationToken cancellationToken, string query, int limit = 10) + { + return StreamSearch(query, limit, cancellationToken); + } + + [Obsolete("Use overload with cancellation token parameter last")] + public Task StreamCreate(CancellationToken cancellationToken, StreamCreateInput streamInput) + { + return StreamCreate(streamInput, cancellationToken); + } + + [Obsolete("Use overload with cancellation token parameter last")] + public Task StreamUpdate(CancellationToken cancellationToken, StreamUpdateInput streamInput) + { + return StreamUpdate(streamInput, cancellationToken); + } + + [Obsolete("Use overload with cancellation token parameter last")] + public Task StreamDelete(CancellationToken cancellationToken, string id) + { + return StreamDelete(id, cancellationToken); + } + + [Obsolete("Use overload with cancellation token parameter last")] + public Task StreamRevokePermission( + CancellationToken cancellationToken, + StreamRevokePermissionInput permissionInput + ) + { + return StreamRevokePermission(permissionInput, cancellationToken); + } + + [Obsolete("Use overload with cancellation token parameter last")] + public Task StreamGetPendingCollaborators(CancellationToken cancellationToken, string id) + { + return StreamGetPendingCollaborators(id, cancellationToken); + } + + [Obsolete("Use overload with cancellation token parameter last")] + public Task StreamInviteCreate(CancellationToken cancellationToken, StreamInviteCreateInput inviteCreateInput) + { + return StreamInviteCreate(inviteCreateInput, cancellationToken); + } + + [Obsolete("Use overload with cancellation token parameter last")] + public Task OtherUserGet(CancellationToken cancellationToken, string id) + { + return OtherUserGet(id, cancellationToken); + } + + [Obsolete("Use overload with cancellation token parameter last")] + public Task> UserSearch(CancellationToken cancellationToken, string query, int limit = 10) + { + return UserSearch(query, limit, cancellationToken); + } + #endregion +} diff --git a/src/Speckle.Core/Api/GraphQL/Client.GraphqlCleintOperations/Client.ServerOperations.cs b/src/Speckle.Core/Api/GraphQL/Client.GraphqlCleintOperations/Client.ServerOperations.cs new file mode 100644 index 00000000..2244822d --- /dev/null +++ b/src/Speckle.Core/Api/GraphQL/Client.GraphqlCleintOperations/Client.ServerOperations.cs @@ -0,0 +1,41 @@ +using System; +using System.Text.RegularExpressions; +using System.Threading; +using System.Threading.Tasks; +using GraphQL; +using Speckle.Core.Logging; + +namespace Speckle.Core.Api; + +public partial class Client +{ + /// + /// Gets the version of the current server. Useful for guarding against unsupported api calls on newer or older servers. + /// + /// [Optional] defaults to an empty cancellation token + /// object excluding any strings (eg "2.7.2-alpha.6995" becomes "2.7.2.6995") + /// + [Obsolete("Use GraphQLHttpClient.GetServerVersion instead")] + public async Task GetServerVersion(CancellationToken cancellationToken = default) + { + var request = new GraphQLRequest + { + Query = + @"query Server { + serverInfo { + version + } + }" + }; + + var res = await ExecuteGraphQLRequest(request, cancellationToken).ConfigureAwait(false); + + if (res.serverInfo.version.Contains("dev")) + { + return new System.Version(999, 999, 999); + } + + ServerVersion = new System.Version(Regex.Replace(res.serverInfo.version, "[-a-zA-Z]+", "")); + return ServerVersion; + } +} diff --git a/src/Speckle.Core/Api/GraphQL/Client.GraphqlCleintOperations/Client.StreamOperations.cs b/src/Speckle.Core/Api/GraphQL/Client.GraphqlCleintOperations/Client.StreamOperations.cs new file mode 100644 index 00000000..f90a6849 --- /dev/null +++ b/src/Speckle.Core/Api/GraphQL/Client.GraphqlCleintOperations/Client.StreamOperations.cs @@ -0,0 +1,513 @@ +#nullable disable +using System; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using GraphQL; +using Speckle.Core.Logging; + +namespace Speckle.Core.Api; + +public partial class Client +{ + /// + /// Checks if a stream exists by id. + /// + /// Id of the stream to get + /// + /// + public async Task IsStreamAccessible(string id, CancellationToken cancellationToken = default) + { + try + { + var request = new GraphQLRequest + { + Query = + $@"query Stream($id: String!) {{ + stream(id: $id) {{ + id + }} + }}", + Variables = new { id } + }; + var stream = (await ExecuteGraphQLRequest(request, cancellationToken).ConfigureAwait(false)).stream; + + return stream.id == id; + } + catch (SpeckleGraphQLForbiddenException) + { + return false; + } + catch (SpeckleGraphQLStreamNotFoundException) + { + return false; + } + } + + /// + /// Gets a stream by id including basic branch info (id, name, description, and total commit count). + /// For detailed commit and branch info, use and respectively. + /// + /// Id of the stream to get + /// Max number of branches to retrieve + /// + /// + public async Task StreamGet(string id, int branchesLimit = 10, CancellationToken cancellationToken = default) + { + var request = new GraphQLRequest + { + Query = + $@"query Stream($id: String!) {{ + stream(id: $id) {{ + id + name + description + isPublic + role + createdAt + updatedAt + commentCount + favoritedDate + favoritesCount + collaborators {{ + id + name + role + avatar + }}, + branches (limit: {branchesLimit}){{ + totalCount, + cursor, + items {{ + id, + name, + description, + commits {{ + totalCount + }} + }} + }} + }} + }}", + Variables = new { id } + }; + return (await ExecuteGraphQLRequest(request, cancellationToken).ConfigureAwait(false)).stream; + } + + /// + /// Gets all streams for the current user + /// + /// Max number of streams to return + /// + /// + public async Task> StreamsGet(int limit = 10, CancellationToken cancellationToken = default) + { + var request = new GraphQLRequest + { + Query = + $@"query User {{ + activeUser{{ + id, + email, + name, + bio, + company, + avatar, + verified, + profiles, + role, + streams(limit:{limit}) {{ + totalCount, + cursor, + items {{ + id, + name, + description, + isPublic, + role, + createdAt, + updatedAt, + favoritedDate, + commentCount + favoritesCount + collaborators {{ + id, + name, + role, + avatar + }} + }} + }} + }} + }}" + }; + + var res = await ExecuteGraphQLRequest(request, cancellationToken).ConfigureAwait(false); + + if (res?.activeUser == null) + { + throw new SpeckleException( + "User is not authenticated, or the credentials were not valid. Check the provided account is still valid, remove it from manager and add it again." + ); + } + + return res.activeUser.streams.items; + } + + /// + /// Gets all favorite streams for the current user + /// + /// Max number of streams to return + /// + /// + public async Task> FavoriteStreamsGet(int limit = 10, CancellationToken cancellationToken = default) + { + var request = new GraphQLRequest + { + Query = + $@"query User {{ + activeUser{{ + id, + email, + name, + bio, + company, + avatar, + verified, + profiles, + role, + favoriteStreams(limit:{limit}) {{ + totalCount, + cursor, + items {{ + id, + name, + description, + isPublic, + role, + createdAt, + updatedAt, + favoritedDate, + commentCount + favoritesCount + collaborators {{ + id, + name, + role, + avatar + }} + }} + }} + }} + }}" + }; + return (await ExecuteGraphQLRequest(request, cancellationToken).ConfigureAwait(false)) + .activeUser + .favoriteStreams + .items; + } + + /// + /// Searches the user's streams by name, description, and ID + /// + /// String query to search for + /// Max number of streams to return + /// + /// + public async Task> StreamSearch( + string query, + int limit = 10, + CancellationToken cancellationToken = default + ) + { + var request = new GraphQLRequest + { + Query = + @"query Streams ($query: String!, $limit: Int!) { + streams(query: $query, limit: $limit) { + totalCount, + cursor, + items { + id, + name, + description, + isPublic, + role, + createdAt, + updatedAt, + commentCount + favoritesCount + collaborators { + id, + name, + role + } + } + } + }", + Variables = new { query, limit } + }; + + var res = await GQLClient.SendMutationAsync(request, cancellationToken).ConfigureAwait(false); //WARN: Why do we do this? + return (await ExecuteGraphQLRequest(request, cancellationToken).ConfigureAwait(false)).streams.items; + } + + /// + /// Creates a stream. + /// + /// + /// + /// The stream's id. + public async Task StreamCreate(StreamCreateInput streamInput, CancellationToken cancellationToken = default) + { + var request = new GraphQLRequest + { + Query = @"mutation streamCreate($myStream: StreamCreateInput!) { streamCreate(stream: $myStream) }", + Variables = new { myStream = streamInput } + }; + var res = await ExecuteGraphQLRequest>(request, cancellationToken).ConfigureAwait(false); + return (string)res["streamCreate"]; + } + + /// + /// Updates a stream. + /// + /// Note: the id field needs to be a valid stream id. + /// + /// The stream's id. + public async Task StreamUpdate(StreamUpdateInput streamInput, CancellationToken cancellationToken = default) + { + var request = new GraphQLRequest + { + Query = @"mutation streamUpdate($myStream: StreamUpdateInput!) { streamUpdate(stream:$myStream) }", + Variables = new { myStream = streamInput } + }; + + var res = await ExecuteGraphQLRequest>(request, cancellationToken).ConfigureAwait(false); + + return (bool)res["streamUpdate"]; + } + + /// + /// Deletes a stream. + /// + /// Id of the stream to be deleted + /// + /// + public async Task StreamDelete(string id, CancellationToken cancellationToken = default) + { + var request = new GraphQLRequest + { + Query = @"mutation streamDelete($id: String!) { streamDelete(id:$id) }", + Variables = new { id } + }; + var res = await ExecuteGraphQLRequest>(request, cancellationToken).ConfigureAwait(false); + return (bool)res["streamDelete"]; + } + + /// + /// Revokes permissions of a user on a given stream. + /// + /// + /// + /// + public async Task StreamRevokePermission( + StreamRevokePermissionInput permissionInput, + CancellationToken cancellationToken = default + ) + { + var request = new GraphQLRequest + { + Query = + @"mutation streamRevokePermission($permissionParams: StreamRevokePermissionInput!) { + streamRevokePermission(permissionParams: $permissionParams) + }", + Variables = new { permissionParams = permissionInput } + }; + + var res = await ExecuteGraphQLRequest>(request, cancellationToken).ConfigureAwait(false); + return (bool)res["streamRevokePermission"]; + } + + /// + /// Updates permissions for a user on a given stream. + /// + /// includes the streamId, the userId of the user to update, and the user's new role + /// + /// + /// + public async Task StreamUpdatePermission( + StreamPermissionInput updatePermissionInput, + CancellationToken cancellationToken = default + ) + { + var request = new GraphQLRequest + { + Query = + @" + mutation streamUpdatePermission($permissionParams: StreamUpdatePermissionInput!) { + streamUpdatePermission(permissionParams:$permissionParams) + }", + Variables = new { permissionParams = updatePermissionInput } + }; + + var res = await ExecuteGraphQLRequest>(request, cancellationToken).ConfigureAwait(false); + return (bool)res["streamUpdatePermission"]; + } + + /// + /// Gets the pending collaborators of a stream by id. + /// Requires the user to be an owner of the stream. + /// + /// + /// + /// + public async Task StreamGetPendingCollaborators( + string streamId, + CancellationToken cancellationToken = default + ) + { + var request = new GraphQLRequest + { + Query = + @"query Stream($id: String!) { + stream(id: $id) { + id + pendingCollaborators { + id + inviteId + title + role + user { + avatar + } + } + } + }", + Variables = new { id = streamId } + }; + var res = await GQLClient.SendMutationAsync(request, cancellationToken).ConfigureAwait(false); //WARN: Why do we do this? + return (await ExecuteGraphQLRequest(request, cancellationToken).ConfigureAwait(false)).stream; + } + + /// + /// Sends an email invite to join a stream and assigns them a collaborator role. + /// + /// + /// + /// + public async Task StreamInviteCreate( + StreamInviteCreateInput inviteCreateInput, + CancellationToken cancellationToken = default + ) + { + if ((inviteCreateInput.email == null) & (inviteCreateInput.userId == null)) + { + throw new ArgumentException("You must provide either an email or a user id to create a stream invite"); + } + + var request = new GraphQLRequest + { + Query = + @" + mutation streamInviteCreate($input: StreamInviteCreateInput!) { + streamInviteCreate(input: $input) + }", + Variables = new { input = inviteCreateInput } + }; + + var res = await ExecuteGraphQLRequest>(request, cancellationToken).ConfigureAwait(false); + return (bool)res["streamInviteCreate"]; + } + + /// + /// Cancels an invite to join a stream. + /// + /// Id of the stream + /// Id of the invite to cancel + /// + /// + public async Task StreamInviteCancel( + string streamId, + string inviteId, + CancellationToken cancellationToken = default + ) + { + var request = new GraphQLRequest + { + Query = + @" + mutation streamInviteCancel( $streamId: String!, $inviteId: String! ) { + streamInviteCancel(streamId: $streamId, inviteId: $inviteId) + }", + Variables = new { streamId, inviteId } + }; + + var res = await ExecuteGraphQLRequest>(request, cancellationToken).ConfigureAwait(false); + return (bool)res["streamInviteCancel"]; + } + + /// + /// Accept or decline a stream invite. + /// + /// + /// + /// + /// + /// + /// + public async Task StreamInviteUse( + string streamId, + string token, + bool accept = true, + CancellationToken cancellationToken = default + ) + { + var request = new GraphQLRequest + { + Query = + @" + mutation streamInviteUse( $accept: Boolean!, $streamId: String!, $token: String! ) { + streamInviteUse(accept: $accept, streamId: $streamId, token: $token) + }", + Variables = new + { + streamId, + token, + accept + } + }; + + var res = await ExecuteGraphQLRequest>(request, cancellationToken).ConfigureAwait(false); + return (bool)res["streamInviteUse"]; + } + + public async Task> GetAllPendingInvites(CancellationToken cancellationToken = default) + { + var request = new GraphQLRequest + { + Query = + @" + query StreamInvites { + streamInvites{ + id + token + inviteId + streamId + streamName + title + role + invitedBy { + id + name + company + avatar + } + } + }" + }; + + var res = await ExecuteGraphQLRequest(request, cancellationToken).ConfigureAwait(false); + return res.streamInvites; + } +} diff --git a/src/Speckle.Core/Api/GraphQL/Client.GraphqlCleintOperations/Client.UserOperations.cs b/src/Speckle.Core/Api/GraphQL/Client.GraphqlCleintOperations/Client.UserOperations.cs new file mode 100644 index 00000000..a79764e5 --- /dev/null +++ b/src/Speckle.Core/Api/GraphQL/Client.GraphqlCleintOperations/Client.UserOperations.cs @@ -0,0 +1,99 @@ +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using GraphQL; + +namespace Speckle.Core.Api; + +public partial class Client +{ + /// + /// Gets the currently active user profile. + /// + /// + /// + public async Task ActiveUserGet(CancellationToken cancellationToken = default) + { + var request = new GraphQLRequest + { + Query = + @"query User { + activeUser { + id, + email, + name, + bio, + company, + avatar, + verified, + profiles, + role, + } + }" + }; + return (await ExecuteGraphQLRequest(request, cancellationToken).ConfigureAwait(false)).activeUser; + } + + /// + /// Get another user's profile by its user id. + /// + /// Id of the user you are looking for + /// + /// + public async Task OtherUserGet(string id, CancellationToken cancellationToken = default) + { + var request = new GraphQLRequest + { + Query = + @"query LimitedUser($id: String!) { + otherUser(id: $id){ + id, + name, + bio, + company, + avatar, + verified, + role, + } + }", + Variables = new { id } + }; + return (await ExecuteGraphQLRequest(request, cancellationToken).ConfigureAwait(false)).otherUser; + } + + /// + /// Searches for a user on the server. + /// + /// String to search for. Must be at least 3 characters + /// Max number of users to return + /// + public async Task> UserSearch( + string query, + int limit = 10, + CancellationToken cancellationToken = default + ) + { + var request = new GraphQLRequest + { + Query = + @"query UserSearch($query: String!, $limit: Int!) { + userSearch(query: $query, limit: $limit) { + cursor, + items { + id + name + bio + company + avatar + verified + role + } + } + }", + Variables = new { query, limit } + }; + return (await ExecuteGraphQLRequest(request, cancellationToken).ConfigureAwait(false)) + .userSearch + .items; + } +} diff --git a/src/Speckle.Core/Api/GraphQL/Client.Subscriptions/Client.Subscriptions.Branch.cs b/src/Speckle.Core/Api/GraphQL/Client.Subscriptions/Client.Subscriptions.Branch.cs new file mode 100644 index 00000000..a27910a0 --- /dev/null +++ b/src/Speckle.Core/Api/GraphQL/Client.Subscriptions/Client.Subscriptions.Branch.cs @@ -0,0 +1,87 @@ +#nullable disable +using System; +using GraphQL; +using Speckle.Core.Api.SubscriptionModels; + +namespace Speckle.Core.Api; + +public partial class Client +{ + #region BranchCreated + + public delegate void BranchCreatedHandler(object sender, BranchInfo e); + + public event BranchCreatedHandler OnBranchCreated; + public IDisposable BranchCreatedSubscription { get; private set; } + + /// + /// Subscribe to events of branch created for a stream + /// + /// + public void SubscribeBranchCreated(string streamId) + { + var request = new GraphQLRequest { Query = $@"subscription {{ branchCreated (streamId: ""{streamId}"") }}" }; + + BranchCreatedSubscription = SubscribeTo( + request, + (sender, result) => OnBranchCreated?.Invoke(sender, result.branchCreated) + ); + } + + public bool HasSubscribedBranchCreated => BranchCreatedSubscription != null; + + #endregion + + + #region BranchUpdated + + public delegate void BranchUpdatedHandler(object sender, BranchInfo e); + + public event BranchUpdatedHandler OnBranchUpdated; + public IDisposable BranchUpdatedSubscription { get; private set; } + + /// + /// Subscribe to events of branch updated for a stream + /// + /// + public void SubscribeBranchUpdated(string streamId, string branchId = null) + { + var request = new GraphQLRequest + { + Query = $@"subscription {{ branchUpdated (streamId: ""{streamId}"", branchId: ""{branchId}"") }}" + }; + BranchUpdatedSubscription = SubscribeTo( + request, + (sender, result) => OnBranchUpdated?.Invoke(sender, result.branchUpdated) + ); + } + + public bool HasSubscribedBranchUpdated => BranchUpdatedSubscription != null; + + #endregion + + #region BranchDeleted + + public delegate void BranchDeletedHandler(object sender, BranchInfo e); + + public event BranchDeletedHandler OnBranchDeleted; + public IDisposable BranchDeletedSubscription { get; private set; } + + /// + /// Subscribe to events of branch deleted for a stream + /// + /// + public void SubscribeBranchDeleted(string streamId) + { + var request = new GraphQLRequest { Query = $@"subscription {{ branchDeleted (streamId: ""{streamId}"") }}" }; + + BranchDeletedSubscription = SubscribeTo( + request, + (sender, result) => OnBranchDeleted?.Invoke(sender, result.branchDeleted) + ); + } + + public bool HasSubscribedBranchDeleted => BranchDeletedSubscription != null; + + #endregion +} diff --git a/src/Speckle.Core/Api/GraphQL/Client.Subscriptions/Client.Subscriptions.Commit.cs b/src/Speckle.Core/Api/GraphQL/Client.Subscriptions/Client.Subscriptions.Commit.cs new file mode 100644 index 00000000..a3614ae6 --- /dev/null +++ b/src/Speckle.Core/Api/GraphQL/Client.Subscriptions/Client.Subscriptions.Commit.cs @@ -0,0 +1,87 @@ +#nullable disable +using System; +using GraphQL; +using Speckle.Core.Api.SubscriptionModels; + +namespace Speckle.Core.Api; + +public partial class Client +{ + #region CommitCreated + + public delegate void CommitCreatedHandler(object sender, CommitInfo e); + + public event CommitCreatedHandler OnCommitCreated; + public IDisposable CommitCreatedSubscription; + + /// + /// Subscribe to events of commit created for a stream + /// + /// + public void SubscribeCommitCreated(string streamId) + { + var request = new GraphQLRequest { Query = $@"subscription {{ commitCreated (streamId: ""{streamId}"") }}" }; + + CommitCreatedSubscription = SubscribeTo( + request, + (sender, result) => OnCommitCreated?.Invoke(sender, result.commitCreated) + ); + } + + public bool HasSubscribedCommitCreated => CommitCreatedSubscription != null; + + #endregion + + #region CommitUpdated + + public delegate void CommitUpdatedHandler(object sender, CommitInfo e); + + public event CommitUpdatedHandler OnCommitUpdated; + public IDisposable CommitUpdatedSubscription; + + /// + /// Subscribe to events of commit updated for a stream + /// + /// + public void SubscribeCommitUpdated(string streamId, string commitId = null) + { + var request = new GraphQLRequest + { + Query = $@"subscription {{ commitUpdated (streamId: ""{streamId}"", commitId: ""{commitId}"") }}" + }; + + var res = GQLClient.CreateSubscriptionStream(request); + CommitUpdatedSubscription = SubscribeTo( + request, + (sender, result) => OnCommitUpdated?.Invoke(sender, result.commitUpdated) + ); + } + + public bool HasSubscribedCommitUpdated => CommitUpdatedSubscription != null; + + #endregion + + #region CommitDeleted + + public delegate void CommitDeletedHandler(object sender, CommitInfo e); + + public event CommitDeletedHandler OnCommitDeleted; + public IDisposable CommitDeletedSubscription; + + /// + /// Subscribe to events of commit updated for a stream + /// + /// + public void SubscribeCommitDeleted(string streamId) + { + var request = new GraphQLRequest { Query = $@"subscription {{ commitDeleted (streamId: ""{streamId}"") }}" }; + CommitDeletedSubscription = SubscribeTo( + request, + (sender, result) => OnCommitDeleted?.Invoke(sender, result.commitDeleted) + ); + } + + public bool HasSubscribedCommitDeleted => CommitDeletedSubscription != null; + + #endregion +} diff --git a/src/Speckle.Core/Api/GraphQL/Client.Subscriptions/Client.Subscriptions.Stream.cs b/src/Speckle.Core/Api/GraphQL/Client.Subscriptions/Client.Subscriptions.Stream.cs new file mode 100644 index 00000000..a62e757f --- /dev/null +++ b/src/Speckle.Core/Api/GraphQL/Client.Subscriptions/Client.Subscriptions.Stream.cs @@ -0,0 +1,110 @@ +#nullable disable +using System; +using GraphQL; +using Speckle.Core.Api.SubscriptionModels; + +namespace Speckle.Core.Api; + +public partial class Client +{ + #region UserStreamAdded + + public delegate void UserStreamAddedHandler(object sender, StreamInfo e); + + public event UserStreamAddedHandler OnUserStreamAdded; + public IDisposable UserStreamAddedSubscription; + + /// + /// Subscribe to events of streams added for the current user + /// + /// + public void SubscribeUserStreamAdded() + { + var request = new GraphQLRequest { Query = @"subscription { userStreamAdded }" }; + + UserStreamAddedSubscription = SubscribeTo( + request, + (sender, result) => OnUserStreamAdded?.Invoke(sender, result.userStreamAdded) + ); + } + + public bool HasSubscribedUserStreamAdded => UserStreamAddedSubscription != null; + + #endregion + + #region StreamUpdated + + public delegate void StreamUpdatedHandler(object sender, StreamInfo e); + + public event StreamUpdatedHandler OnStreamUpdated; + public IDisposable StreamUpdatedSubscription; + + /// + /// Subscribe to events of streams updated for a specific streamId + /// + /// streamId + public void SubscribeStreamUpdated(string id) + { + var request = new GraphQLRequest { Query = $@"subscription {{ streamUpdated( streamId: ""{id}"") }}" }; + StreamUpdatedSubscription = SubscribeTo( + request, + (sender, result) => OnStreamUpdated?.Invoke(sender, result.streamUpdated) + ); + } + + public bool HasSubscribedStreamUpdated => StreamUpdatedSubscription != null; + + #endregion + + #region StreamRemoved + + public delegate void UserStreamRemovedHandler(object sender, StreamInfo e); + + public event UserStreamRemovedHandler OnUserStreamRemoved; + public IDisposable UserStreamRemovedSubscription; + + /// + /// Subscribe to events of streams removed for the current user + /// + public void SubscribeUserStreamRemoved() + { + var request = new GraphQLRequest { Query = @"subscription { userStreamRemoved }" }; + + UserStreamRemovedSubscription = SubscribeTo( + request, + (sender, result) => OnUserStreamRemoved?.Invoke(sender, result.userStreamRemoved) + ); + } + + public bool HasSubscribedUserStreamRemoved => UserStreamRemovedSubscription != null; + + #endregion + + #region CommentActivity + + public delegate void CommentActivityHandler(object sender, CommentItem e); + + public event CommentActivityHandler OnCommentActivity; + public IDisposable CommentActivitySubscription; + + /// + /// Subscribe to new comment events + /// + /// + public void SubscribeCommentActivity(string streamId) + { + var request = new GraphQLRequest + { + Query = + $@"subscription {{ commentActivity( streamId: ""{streamId}"") {{ type comment {{ id authorId archived screenshot rawText }} }} }}" + }; + CommentActivitySubscription = SubscribeTo( + request, + (sender, result) => OnCommentActivity?.Invoke(sender, result.commentActivity.comment) + ); + } + + public bool HasSubscribedCommentActivity => CommentActivitySubscription != null; + + #endregion +} diff --git a/src/Speckle.Core/Api/GraphQL/Client.cs b/src/Speckle.Core/Api/GraphQL/Client.cs new file mode 100644 index 00000000..bacfa63b --- /dev/null +++ b/src/Speckle.Core/Api/GraphQL/Client.cs @@ -0,0 +1,385 @@ +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Dynamic; +using System.Linq; +using System.Net.Http; +using System.Net.WebSockets; +using System.Reflection; +using System.Threading; +using System.Threading.Tasks; +using GraphQL; +using GraphQL.Client.Http; +using Polly; +using Polly.Contrib.WaitAndRetry; +using Serilog.Context; +using Serilog.Core; +using Serilog.Core.Enrichers; +using Speckle.Core.Api.GraphQL.Serializer; +using Speckle.Core.Credentials; +using Speckle.Core.Helpers; +using Speckle.Core.Logging; +using Speckle.Newtonsoft.Json; + +namespace Speckle.Core.Api; + +public sealed partial class Client : IDisposable +{ + [Obsolete] + internal Client() { } + + public Client(Account account) + { + Account = account ?? throw new SpeckleException("Provided account is null."); + + HttpClient = Http.GetHttpProxyClient(null, TimeSpan.FromSeconds(30)); + Http.AddAuthHeader(HttpClient, account.token); + + HttpClient.DefaultRequestHeaders.Add("apollographql-client-name", Setup.HostApplication); + HttpClient.DefaultRequestHeaders.Add( + "apollographql-client-version", + Assembly.GetExecutingAssembly().GetName().Version.ToString() + ); + + GQLClient = new GraphQLHttpClient( + new GraphQLHttpClientOptions + { + EndPoint = new Uri(new Uri(account.serverInfo.url), "/graphql"), + UseWebSocketForQueriesAndMutations = false, + WebSocketProtocol = "graphql-ws", + ConfigureWebSocketConnectionInitPayload = _ => + { + return Http.CanAddAuth(account.token, out string? authValue) ? new { Authorization = authValue } : null; + }, + }, + new NewtonsoftJsonSerializer(), + HttpClient + ); + + GQLClient.WebSocketReceiveErrors.Subscribe(e => + { + if (e is WebSocketException we) + { + Console.WriteLine( + $"WebSocketException: {we.Message} (WebSocketError {we.WebSocketErrorCode}, ErrorCode {we.ErrorCode}, NativeErrorCode {we.NativeErrorCode}" + ); + } + else + { + Console.WriteLine($"Exception in websocket receive stream: {e}"); + } + }); + } + + public string ServerUrl => Account.serverInfo.url; + + public string ApiToken => Account.token; + + public System.Version? ServerVersion { get; set; } + + [JsonIgnore] + public Account Account { get; set; } + + private HttpClient HttpClient { get; set; } + + public GraphQLHttpClient GQLClient { get; set; } + + public void Dispose() + { + try + { + UserStreamAddedSubscription?.Dispose(); + UserStreamRemovedSubscription?.Dispose(); + StreamUpdatedSubscription?.Dispose(); + BranchCreatedSubscription?.Dispose(); + BranchUpdatedSubscription?.Dispose(); + BranchDeletedSubscription?.Dispose(); + CommitCreatedSubscription?.Dispose(); + CommitUpdatedSubscription?.Dispose(); + CommitDeletedSubscription?.Dispose(); + CommentActivitySubscription?.Dispose(); + GQLClient?.Dispose(); + } + catch (Exception ex) when (!ex.IsFatal()) { } + } + + internal async Task ExecuteWithResiliencePolicies(Func> func) + { + // TODO: handle these in the HttpClient factory with a custom RequestHandler class + // 408 Request Timeout + // 425 Too Early + // 429 Too Many Requests + // 500 Internal Server Error + // 502 Bad Gateway + // 503 Service Unavailable + // 504 Gateway Timeout + + var delay = Backoff.DecorrelatedJitterBackoffV2(TimeSpan.FromSeconds(1), 5); + var graphqlRetry = Policy + .Handle>() + .WaitAndRetryAsync( + delay, + (ex, timeout, context) => + { + var graphqlEx = (SpeckleGraphQLException)ex; + SpeckleLog.Logger + .ForContext("graphqlExtensions", graphqlEx.Extensions) + .ForContext("graphqlErrorMessages", graphqlEx.ErrorMessages) + .Warning( + ex, + "The previous attempt at executing function to get {resultType} failed with {exceptionMessage}. Retrying after {timeout}.", + typeof(T).Name, + ex.Message, + timeout + ); + } + ); + + return await graphqlRetry.ExecuteAsync(func).ConfigureAwait(false); + } + + /// "FORBIDDEN" on "UNAUTHORIZED" response from server + /// All other request errors + /// The requested a cancel + public async Task ExecuteGraphQLRequest(GraphQLRequest request, CancellationToken cancellationToken = default) + { + using IDisposable context0 = LogContext.Push(CreateEnrichers(request)); + + SpeckleLog.Logger.Debug("Starting execution of graphql request to get {resultType}", typeof(T).Name); + var timer = new Stopwatch(); + var success = false; + timer.Start(); + try + { + var result = await ExecuteWithResiliencePolicies(async () => + { + GraphQLResponse result = await GQLClient + .SendMutationAsync(request, cancellationToken) + .ConfigureAwait(false); + MaybeThrowFromGraphQLErrors(request, result); + return result.Data; + }) + .ConfigureAwait(false); + success = true; + return result; + } + // cancellations are bubbling up with no logging + catch (OperationCanceledException) + { + throw; + } + // we catch forbidden to rethrow, making sure its not logged. + catch (SpeckleGraphQLForbiddenException) + { + throw; + } + // anything else related to graphql gets logged + catch (SpeckleGraphQLException gqlException) + { + SpeckleLog.Logger + .ForContext("graphqlResponse", gqlException.Response) + .ForContext("graphqlExtensions", gqlException.Extensions) + .ForContext("graphqlErrorMessages", gqlException.ErrorMessages.ToList()) + .Warning( + gqlException, + "Execution of the graphql request to get {resultType} failed with {graphqlExceptionType} {exceptionMessage}.", + typeof(T).Name, + gqlException.GetType().Name, + gqlException.Message + ); + throw; + } + // we log and wrap anything that is not a graphql exception. + // this makes sure, that any graphql operation only throws SpeckleGraphQLExceptions + catch (Exception ex) when (!ex.IsFatal()) + { + SpeckleLog.Logger.Warning( + ex, + "Execution of the graphql request to get {resultType} failed without a graphql response. Cause {exceptionMessage}", + typeof(T).Name, + ex.Message + ); + throw new SpeckleGraphQLException("The graphql request failed without a graphql response", ex, request, null); + } + finally + { + // this is a performance metric log operation + // this makes sure that both success and failed operations report + // the same performance log + timer.Stop(); + var status = success ? "succeeded" : "failed"; + SpeckleLog.Logger.Information( + "Execution of graphql request to get {resultType} {resultStatus} after {elapsed} seconds", + typeof(T).Name, + status, + timer.Elapsed.TotalSeconds + ); + } + } + + internal void MaybeThrowFromGraphQLErrors(GraphQLRequest request, GraphQLResponse response) + { + // The errors reflect the Apollo server v2 API, which is deprecated. It is bound to change, + // once we migrate to a newer version. + var errors = response.Errors; + if (errors != null && errors.Length != 0) + { + var errorMessages = errors.Select(e => e.Message); + if ( + errors.Any( + e => + e.Extensions != null + && ( + e.Extensions.Contains(new KeyValuePair("code", "FORBIDDEN")) + || e.Extensions.Contains(new KeyValuePair("code", "UNAUTHENTICATED")) + ) + ) + ) + { + throw new SpeckleGraphQLForbiddenException(request, response); + } + + if ( + errors.Any( + e => + e.Extensions != null && e.Extensions.Contains(new KeyValuePair("code", "STREAM_NOT_FOUND")) + ) + ) + { + throw new SpeckleGraphQLStreamNotFoundException(request, response); + } + + if ( + errors.Any( + e => + e.Extensions != null + && e.Extensions.Contains(new KeyValuePair("code", "INTERNAL_SERVER_ERROR")) + ) + ) + { + throw new SpeckleGraphQLInternalErrorException(request, response); + } + + throw new SpeckleGraphQLException("Request failed with errors", request, response); + } + } + + private Dictionary ConvertExpandoToDict(ExpandoObject expando) + { + var variables = new Dictionary(); + foreach (KeyValuePair kvp in expando) + { + object value; + if (kvp.Value is ExpandoObject ex) + { + value = ConvertExpandoToDict(ex); + } + else + { + value = kvp.Value; + } + + variables[kvp.Key] = value; + } + return variables; + } + + private ILogEventEnricher[] CreateEnrichers(GraphQLRequest request) + { + // i know this is double (de)serializing, but we need a recursive convert to + // dict here + var expando = JsonConvert.DeserializeObject(JsonConvert.SerializeObject(request.Variables)); + var variables = request.Variables != null && expando != null ? ConvertExpandoToDict(expando) : null; + return new ILogEventEnricher[] + { + new PropertyEnricher("serverUrl", ServerUrl), + new PropertyEnricher("graphqlQuery", request.Query), + new PropertyEnricher("graphqlVariables", variables), + new PropertyEnricher("resultType", typeof(T).Name) + }; + } + + internal IDisposable SubscribeTo(GraphQLRequest request, Action callback) + { + using (LogContext.Push(CreateEnrichers(request))) + { + try + { + var res = GQLClient.CreateSubscriptionStream(request); + return res.Subscribe( + response => + { + try + { + MaybeThrowFromGraphQLErrors(request, response); + + if (response.Data != null) + { + callback(this, response.Data); + } + else + { + SpeckleLog.Logger + .ForContext("graphqlResponse", response) + .Error("Cannot execute graphql callback for {resultType}, the response has no data.", typeof(T).Name); + } + } + // we catch forbidden to rethrow, making sure its not logged. + catch (SpeckleGraphQLForbiddenException) + { + throw; + } + // anything else related to graphql gets logged + catch (SpeckleGraphQLException gqlException) + { + SpeckleLog.Logger + .ForContext("graphqlResponse", gqlException.Response) + .ForContext("graphqlExtensions", gqlException.Extensions) + .ForContext("graphqlErrorMessages", gqlException.ErrorMessages.ToList()) + .Warning( + gqlException, + "Execution of the graphql request to get {resultType} failed with {graphqlExceptionType} {exceptionMessage}.", + typeof(T).Name, + gqlException.GetType().Name, + gqlException.Message + ); + throw; + } + // we're not handling the bare Exception type here, + // since we have a response object on the callback, we know the Exceptions + // can only be thrown from the MaybeThrowFromGraphQLErrors which wraps + // every exception into SpeckleGraphQLException + }, + ex => + { + // we're logging this as an error for now, to keep track of failures + // so far we've swallowed these errors + SpeckleLog.Logger.Error( + ex, + "Subscription request for {resultType} failed with {exceptionMessage}", + typeof(T).Name, + ex.Message + ); + // we could be throwing like this: + // throw ex; + } + ); + } + catch (Exception ex) when (!ex.IsFatal()) + { + SpeckleLog.Logger.Warning( + ex, + "Subscribing to graphql {resultType} failed without a graphql response. Cause {exceptionMessage}", + typeof(T).Name, + ex.Message + ); + throw new SpeckleGraphQLException( + "The graphql request failed without a graphql response", + ex, + request, + null + ); + } + } + } +} diff --git a/src/Speckle.Core/Api/GraphQL/GraphQLHttpClientExtensions.cs b/src/Speckle.Core/Api/GraphQL/GraphQLHttpClientExtensions.cs new file mode 100644 index 00000000..64b7a766 --- /dev/null +++ b/src/Speckle.Core/Api/GraphQL/GraphQLHttpClientExtensions.cs @@ -0,0 +1,56 @@ +using GraphQL; +using System.Threading.Tasks; +using System.Threading; +using GraphQL.Client.Http; +using System.Linq; + +namespace Speckle.Core.Api.GraphQL; + +public static class GraphQLHttpClientExtensions +{ + /// + /// Gets the version of the current server. Useful for guarding against unsupported api calls on newer or older servers. + /// + /// [Optional] defaults to an empty cancellation token + /// object excluding any strings (eg "2.7.2-alpha.6995" becomes "2.7.2.6995") + /// + public static async Task GetServerVersion( + this GraphQLHttpClient client, + CancellationToken cancellationToken = default + ) + { + var request = new GraphQLRequest + { + Query = + @"query Server { + serverInfo { + version + } + }" + }; + + var response = await client.SendQueryAsync(request, cancellationToken).ConfigureAwait(false); + + if (response.Errors != null) + { + throw new SpeckleGraphQLException( + $"Query {nameof(GetServerVersion)} failed", + request, + response + ); + } + + if (string.IsNullOrWhiteSpace(response.Data.serverInfo.version)) + { + throw new SpeckleGraphQLException( + $"Query {nameof(GetServerVersion)} did not provide a valid server version", + request, + response + ); + } + + return response.Data.serverInfo.version == "dev" + ? new System.Version(999, 999, 999) + : new System.Version(response.Data.serverInfo.version.Split('-').First()); + } +} diff --git a/src/Speckle.Core/Api/GraphQL/Models.cs b/src/Speckle.Core/Api/GraphQL/Models.cs new file mode 100644 index 00000000..f2a31f20 --- /dev/null +++ b/src/Speckle.Core/Api/GraphQL/Models.cs @@ -0,0 +1,583 @@ +#nullable disable +using System; +using System.Collections.Generic; +using System.Runtime.InteropServices; +using System.Text.Json.Serialization; + +namespace Speckle.Core.Api; + +#region inputs + +public class StreamCreateInput +{ + public string name { get; set; } + public string description { get; set; } + public bool isPublic { get; set; } = true; +} + +public class StreamUpdateInput +{ + public string id { get; set; } + public string name { get; set; } + public string description { get; set; } + public bool isPublic { get; set; } = true; +} + +public class StreamPermissionInput +{ + public string streamId { get; set; } + public string userId { get; set; } + public string role { get; set; } +} + +public class StreamRevokePermissionInput +{ + public string streamId { get; set; } + public string userId { get; set; } +} + +public class StreamInviteCreateInput +{ + public string streamId { get; set; } + public string userId { get; set; } + public string email { get; set; } + public string message { get; set; } + public string role { get; set; } +} + +public class BranchCreateInput +{ + public string streamId { get; set; } + public string name { get; set; } + public string description { get; set; } +} + +public class BranchUpdateInput +{ + public string streamId { get; set; } + public string id { get; set; } + public string name { get; set; } + public string description { get; set; } +} + +public class BranchDeleteInput +{ + public string streamId { get; set; } + public string id { get; set; } +} + +public class CommitCreateInput +{ + public string streamId { get; set; } + public string branchName { get; set; } + public string objectId { get; set; } + public string message { get; set; } + public string sourceApplication { get; set; } = ".net"; + public int totalChildrenCount { get; set; } + public List parents { get; set; } + + [Obsolete("Please use the parents property. This property will be removed in later versions")] + public List previousCommitIds { get; set; } +} + +public class CommitUpdateInput +{ + public string streamId { get; set; } + public string id { get; set; } + public string message { get; set; } +} + +public class CommitDeleteInput +{ + public string streamId { get; set; } + public string id { get; set; } +} + +public class CommitReceivedInput +{ + public string streamId { get; set; } + public string commitId { get; set; } + public string sourceApplication { get; set; } + public string message { get; set; } +} + +#endregion + +public class Stream +{ + public string id { get; set; } + public string name { get; set; } + public string description { get; set; } + + public bool isPublic { get; set; } + public string role { get; set; } + public DateTime createdAt { get; set; } + public DateTime updatedAt { get; set; } + public string favoritedDate { get; set; } + + public int commentCount { get; set; } + public int favoritesCount { get; set; } + + public List collaborators { get; set; } + public List pendingCollaborators { get; set; } = new(); + public Branches branches { get; set; } + + /// + /// Set only in the case that you've requested this through . + /// + public Branch branch { get; set; } + + /// + /// Set only in the case that you've requested this through . + /// + public Commit commit { get; set; } + + /// + /// Set only in the case that you've requested this through + /// + public Commits commits { get; set; } + + public Activity activity { get; set; } + + public SpeckleObject @object { get; set; } + + public override string ToString() + { + return $"Stream ({name} | {id})"; + } +} + +public class Collaborator +{ + public string id { get; set; } + public string name { get; set; } + public string role { get; set; } + public string avatar { get; set; } + + public override string ToString() + { + return $"Collaborator ({name} | {role} | {id})"; + } +} + +public class StreamInvitesResponse +{ + public List streamInvites { get; set; } +} + +public class PendingStreamCollaborator +{ + public string id { get; set; } + public string inviteId { get; set; } + public string streamId { get; set; } + public string streamName { get; set; } + public string title { get; set; } + public string role { get; set; } + public User invitedBy { get; set; } + public User user { get; set; } + public string token { get; set; } +} + +public class Branches +{ + public int totalCount { get; set; } + public string cursor { get; set; } + public List items { get; set; } +} + +public class Commits +{ + public int totalCount { get; set; } + public string cursor { get; set; } + public List items { get; set; } +} + +public class Commit +{ + public string id { get; set; } + public string message { get; set; } + public string branchName { get; set; } + public string authorName { get; set; } + public string authorId { get; set; } + public string authorAvatar { get; set; } + public DateTime createdAt { get; set; } + public string sourceApplication { get; set; } + + public string referencedObject { get; set; } + public int totalChildrenCount { get; set; } + public List parents { get; set; } + + public override string ToString() + { + return $"Commit ({message} | {id})"; + } +} + +public class Activity +{ + public int totalCount { get; set; } + public DateTime cursor { get; set; } + public List items { get; set; } +} + +public class ActivityItem +{ + public string actionType { get; set; } + public string userId { get; set; } + public string streamId { get; set; } + public string resourceId { get; set; } + public string resourceType { get; set; } + public DateTime time { get; set; } + public Info info { get; set; } + public string message { get; set; } +} + +public class Info +{ + public string message { get; set; } + public string sourceApplication { get; set; } + + public InfoCommit commit { get; set; } +} + +public class InfoCommit +{ + public string message { get; set; } + public string sourceApplication { get; set; } + public string branchName { get; set; } +} + +public class SpeckleObject +{ + public string id { get; set; } + public string speckleType { get; set; } + public string applicationId { get; set; } + public int totalChildrenCount { get; set; } + public DateTime createdAt { get; set; } +} + +public class Branch +{ + public string id { get; set; } + public string name { get; set; } + public string description { get; set; } + public Commits commits { get; set; } + + public override string ToString() + { + return $"Branch ({name} | {id})"; + } +} + +public class Streams +{ + public int totalCount { get; set; } + public string cursor { get; set; } + public List items { get; set; } +} + +public class UserBase +{ + public string id { get; set; } + public string name { get; set; } + public string bio { get; set; } + public string company { get; set; } + public string avatar { get; set; } + public bool verified { get; set; } + public string role { get; set; } + public Streams streams { get; set; } +} + +public class LimitedUser : UserBase +{ + public override string ToString() + { + return $"Other user profile: ({name} | {id})"; + } +} + +public class User : UserBase +{ + public string email { get; set; } + public Streams favoriteStreams { get; set; } + + public override string ToString() + { + return $"User ({email} | {name} | {id})"; + } +} + +public class Resource +{ + public string resourceId { get; set; } + public ResourceType resourceType { get; set; } +} + +public enum ResourceType +{ + commit, + stream, + @object, + comment +} + +public class Location +{ + public double x { get; set; } + public double y { get; set; } + public double z { get; set; } +} + +public class UserData +{ + public User user { get; set; } +} + +/// +/// GraphQL DTO model for active user data +/// +public class ActiveUserData +{ + /// + /// User profile of the active user. + /// + public User activeUser { get; set; } +} + +/// +/// GraphQL DTO model for limited user data. Mostly referring to other user's profile. +/// +public class LimitedUserData +{ + /// + /// The limited user profile of another (non active user) + /// + public LimitedUser otherUser { get; set; } +} + +public class UserSearchData +{ + public UserSearch userSearch { get; set; } +} + +public class UserSearch +{ + public string cursor { get; set; } + public List items { get; set; } +} + +public class ServerInfoResponse +{ + // TODO: server and user models are duplicated here and in Speckle.Core.Credentials.Responses + // a bit weird and unnecessary - shouldn't both Credentials and Api share the same models since they're + // all server models that should be consistent? am creating a new obj here as to not reference Credentials in + // this file but it should prob be refactored in the futrue + public ServerInfo serverInfo { get; set; } +} + +// TODO: prob remove and bring one level up and shared w Speckle.Core.Credentials +[ClassInterface(ClassInterfaceType.AutoDual)] +[ComVisible(true)] +public class ServerInfo +{ + public string name { get; set; } + public string company { get; set; } + public string version { get; set; } + public string adminContact { get; set; } + public string description { get; set; } + + /// + /// This field is not returned from the GQL API, + /// it should populated on construction from the response headers. + /// see + /// + public bool frontend2 { get; set; } + + /// + /// This field is not returned from the GQL API, + /// it should populated on construction. + /// see + /// + public string url { get; set; } + + public ServerMigration migration { get; set; } +} + +public class ServerMigration +{ + /// + /// New URI where this server is now deployed + /// + public Uri movedTo { get; set; } + + /// + /// Previous URI where this server used to be deployed + /// + public Uri movedFrom { get; set; } +} + +public class StreamData +{ + public Stream stream { get; set; } +} + +public class StreamsData +{ + public Streams streams { get; set; } +} + +#region comments +public class Comments +{ + public int totalCount { get; set; } + public DateTime? cursor { get; set; } + public List items { get; set; } +} + +public class CommentData +{ + public Comments comments { get; set; } + public List camPos { get; set; } + public object filters { get; set; } + public Location location { get; set; } + public object selection { get; set; } + public object sectionBox { get; set; } +} + +public class CommentItem +{ + public string id { get; set; } + public string authorId { get; set; } + public bool archived { get; set; } + public string screenshot { get; set; } + public string rawText { get; set; } + public CommentData data { get; set; } + public DateTime createdAt { get; set; } + public DateTime updatedAt { get; set; } + public DateTime? viewedAt { get; set; } + public object reactions { get; set; } + public Comments replies { get; set; } + public List resources { get; set; } +} + +public class ContentContent +{ + public string Type { get; set; } + + //public Mark[] Marks { get; set; } + public string Text { get; set; } +} + +public class CommentsData +{ + public Comments comments { get; set; } +} + +public class CommentItemData +{ + public CommentItem comment { get; set; } +} + +public class CommentActivityMessage +{ + public string type { get; set; } + public CommentItem comment { get; set; } +} + +public class CommentActivityResponse +{ + public CommentActivityMessage commentActivity { get; set; } +} +#endregion + +#region manager api + +public class Connector +{ + public List Versions { get; set; } = new(); +} + +public class Version +{ + public Version(string number, string url, Os os = Os.Win, Architecture architecture = Architecture.Any) + { + Number = number; + Url = url; + Date = DateTime.Now; + Prerelease = Number.Contains("-"); + Os = os; + Architecture = architecture; + } + + public string Number { get; set; } + public string Url { get; set; } + public Os Os { get; set; } + public Architecture Architecture { get; set; } = Architecture.Any; + public DateTime Date { get; set; } + + [JsonIgnore] + public string DateTimeAgo => Helpers.TimeAgo(Date); + + public bool Prerelease { get; set; } +} + +/// +/// OS +/// NOTE: do not edit order and only append new items as they are serialized to ints +/// +public enum Os +{ + Win, //0 + OSX, //1 + Linux, //2 + Any //3 +} + +/// +/// Architecture +/// NOTE: do not edit order and only append new items as they are serialized to ints +/// +public enum Architecture +{ + Any, //0 + Arm, //1 + Intel //2 +} + +//GHOST API +public class Meta +{ + public Pagination pagination { get; set; } +} + +public class Pagination +{ + public int page { get; set; } + public string limit { get; set; } + public int pages { get; set; } + public int total { get; set; } + public object next { get; set; } + public object prev { get; set; } +} + +public class Tags +{ + public List tags { get; set; } + public Meta meta { get; set; } +} + +public class Tag +{ + public string id { get; set; } + public string name { get; set; } + public string slug { get; set; } + public string description { get; set; } + public string feature_image { get; set; } + public string visibility { get; set; } + public string codeinjection_head { get; set; } + public object codeinjection_foot { get; set; } + public object canonical_url { get; set; } + public string accent_color { get; set; } + public string url { get; set; } +} +#endregion diff --git a/src/Speckle.Core/Api/GraphQL/Serializer/ConstantCaseEnumConverter.cs b/src/Speckle.Core/Api/GraphQL/Serializer/ConstantCaseEnumConverter.cs new file mode 100644 index 00000000..7088d10b --- /dev/null +++ b/src/Speckle.Core/Api/GraphQL/Serializer/ConstantCaseEnumConverter.cs @@ -0,0 +1,43 @@ +#nullable disable + +using System; +using System.Linq; +using System.Reflection; +using GraphQL.Client.Abstractions.Utilities; +using Speckle.Newtonsoft.Json; +using Speckle.Newtonsoft.Json.Converters; + +namespace Speckle.Core.Api.GraphQL.Serializer; + +internal class ConstantCaseEnumConverter : StringEnumConverter +{ + public override void WriteJson(JsonWriter writer, object value, JsonSerializer serializer) + { + if (value == null) + { + writer.WriteNull(); + } + else + { + var enumString = ((Enum)value).ToString("G"); + var memberName = value + .GetType() + .GetMember(enumString, BindingFlags.DeclaredOnly | BindingFlags.Static | BindingFlags.Public) + .FirstOrDefault() + ?.Name; + if (string.IsNullOrEmpty(memberName)) + { + if (!AllowIntegerValues) + { + throw new JsonSerializationException($"Integer value {value} is not allowed."); + } + + writer.WriteValue(value); + } + else + { + writer.WriteValue(memberName.ToConstantCase()); + } + } + } +} diff --git a/src/Speckle.Core/Api/GraphQL/Serializer/MapConverter.cs b/src/Speckle.Core/Api/GraphQL/Serializer/MapConverter.cs new file mode 100644 index 00000000..07843438 --- /dev/null +++ b/src/Speckle.Core/Api/GraphQL/Serializer/MapConverter.cs @@ -0,0 +1,88 @@ +#nullable disable +using System; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Linq; +using GraphQL; +using Speckle.Newtonsoft.Json; +using Speckle.Newtonsoft.Json.Linq; + +namespace Speckle.Core.Api.GraphQL.Serializer; + +internal sealed class MapConverter : JsonConverter +{ + public override void WriteJson(JsonWriter writer, Map value, JsonSerializer serializer) + { + throw new NotImplementedException( + "This converter currently is only intended to be used to read a JSON object into a strongly-typed representation." + ); + } + + public override Map ReadJson( + JsonReader reader, + Type objectType, + Map existingValue, + bool hasExistingValue, + JsonSerializer serializer + ) + { + var rootToken = JToken.ReadFrom(reader); + if (rootToken is JObject) + { + return (Map)ReadDictionary(rootToken, new Map()); + } + + throw new ArgumentException("This converter can only parse when the root element is a JSON Object."); + } + + [SuppressMessage( + "Maintainability", + "CA1508:Avoid dead conditional code", + Justification = "False positive, see https://github.com/dotnet/roslyn-analyzers/issues/6893" + )] + private object ReadToken(JToken token) + { + return token switch + { + JObject jObject => ReadDictionary(jObject, new Dictionary()), + JArray jArray => ReadArray(jArray).ToList(), + JValue jValue => jValue.Value, + JConstructor => throw new ArgumentOutOfRangeException(nameof(token), "cannot deserialize a JSON constructor"), + JProperty => throw new ArgumentOutOfRangeException(nameof(token), "cannot deserialize a JSON property"), + JContainer => throw new ArgumentOutOfRangeException(nameof(token), "cannot deserialize a JSON comment"), + _ => throw new ArgumentOutOfRangeException(nameof(token), $"Invalid token type {token?.Type}") + }; + } + + private Dictionary ReadDictionary(JToken element, Dictionary to) + { + foreach (var property in ((JObject)element).Properties()) + { + if (IsUnsupportedJTokenType(property.Value.Type)) + { + continue; + } + + to[property.Name] = ReadToken(property.Value); + } + return to; + } + + private IEnumerable ReadArray(JArray element) + { + foreach (var item in element) + { + if (IsUnsupportedJTokenType(item.Type)) + { + continue; + } + + yield return ReadToken(item); + } + } + + private bool IsUnsupportedJTokenType(JTokenType type) + { + return type == JTokenType.Constructor || type == JTokenType.Property || type == JTokenType.Comment; + } +} diff --git a/src/Speckle.Core/Api/GraphQL/Serializer/NewtonsoftJsonSerializer.cs b/src/Speckle.Core/Api/GraphQL/Serializer/NewtonsoftJsonSerializer.cs new file mode 100644 index 00000000..3509559b --- /dev/null +++ b/src/Speckle.Core/Api/GraphQL/Serializer/NewtonsoftJsonSerializer.cs @@ -0,0 +1,84 @@ +#nullable disable +using System; +using System.IO; +using System.Text; +using System.Threading; +using System.Threading.Tasks; +using GraphQL; +using GraphQL.Client.Abstractions; +using GraphQL.Client.Abstractions.Websocket; +using Speckle.Newtonsoft.Json; +using Speckle.Newtonsoft.Json.Serialization; + +namespace Speckle.Core.Api.GraphQL.Serializer; + +internal sealed class NewtonsoftJsonSerializer : IGraphQLWebsocketJsonSerializer +{ + public NewtonsoftJsonSerializer() + : this(DefaultJsonSerializerSettings) { } + + public NewtonsoftJsonSerializer(Action configure) + : this(configure.AndReturn(DefaultJsonSerializerSettings)) { } + + public NewtonsoftJsonSerializer(JsonSerializerSettings jsonSerializerSettings) + { + JsonSerializerSettings = jsonSerializerSettings; + ConfigureMandatorySerializerOptions(); + } + + public static JsonSerializerSettings DefaultJsonSerializerSettings => + new() + { + ContractResolver = new CamelCasePropertyNamesContractResolver { IgnoreIsSpecifiedMembers = true }, + MissingMemberHandling = MissingMemberHandling.Ignore, + Converters = { new ConstantCaseEnumConverter() } + }; + + public JsonSerializerSettings JsonSerializerSettings { get; } + + public string SerializeToString(GraphQLRequest request) + { + return JsonConvert.SerializeObject(request, JsonSerializerSettings); + } + + public byte[] SerializeToBytes(GraphQLWebSocketRequest request) + { + var json = JsonConvert.SerializeObject(request, JsonSerializerSettings); + return Encoding.UTF8.GetBytes(json); + } + + public Task DeserializeToWebsocketResponseWrapperAsync(System.IO.Stream stream) + { + return DeserializeFromUtf8Stream(stream); + } + + public GraphQLWebSocketResponse DeserializeToWebsocketResponse(byte[] bytes) + { + return JsonConvert.DeserializeObject>( + Encoding.UTF8.GetString(bytes), + JsonSerializerSettings + ); + } + + public Task> DeserializeFromUtf8StreamAsync( + System.IO.Stream stream, + CancellationToken cancellationToken + ) + { + return DeserializeFromUtf8Stream>(stream); + } + + // deserialize extensions to Dictionary + private void ConfigureMandatorySerializerOptions() + { + JsonSerializerSettings.Converters.Insert(0, new MapConverter()); + } + + private Task DeserializeFromUtf8Stream(System.IO.Stream stream) + { + using var sr = new StreamReader(stream); + using JsonReader reader = new JsonTextReader(sr); + var serializer = JsonSerializer.Create(JsonSerializerSettings); + return Task.FromResult(serializer.Deserialize(reader)); + } +} diff --git a/src/Speckle.Core/Api/GraphQL/SubscriptionModels.cs b/src/Speckle.Core/Api/GraphQL/SubscriptionModels.cs new file mode 100644 index 00000000..f1b253b6 --- /dev/null +++ b/src/Speckle.Core/Api/GraphQL/SubscriptionModels.cs @@ -0,0 +1,91 @@ +#nullable disable +using System; +using System.Collections.Generic; + +namespace Speckle.Core.Api.SubscriptionModels; + +#region streams +public class StreamInfo +{ + public string id { get; set; } + public string name { get; set; } + public string description { get; set; } + public string sharedBy { get; set; } +} + +public class UserStreamAddedResult +{ + public StreamInfo userStreamAdded { get; set; } +} + +public class StreamUpdatedResult +{ + public StreamInfo streamUpdated { get; set; } +} + +public class UserStreamRemovedResult +{ + public StreamInfo userStreamRemoved { get; set; } +} +#endregion + +#region branches + +public class BranchInfo +{ + public string id { get; set; } + public string name { get; set; } + public string description { get; set; } + public string streamId { get; set; } + public string authorId { get; set; } +} + +public class BranchCreatedResult +{ + public BranchInfo branchCreated { get; set; } +} + +public class BranchUpdatedResult +{ + public BranchInfo branchUpdated { get; set; } +} + +public class BranchDeletedResult +{ + public BranchInfo branchDeleted { get; set; } +} +#endregion + +#region commits + +public class CommitInfo +{ + public string id { get; set; } + public string streamId { get; set; } + public string branchName { get; set; } + public string objectId { get; set; } + public string authorId { get; set; } + public string message { get; set; } + public string sourceApplication { get; set; } + public int? totalChildrenCount { get; set; } + public IList parents { get; set; } + + [Obsolete("Please use the parents property. This property will be removed in later versions")] + public IList previousCommitIds { get; set; } +} + +public class CommitCreatedResult +{ + public CommitInfo commitCreated { get; set; } +} + +public class CommitUpdatedResult +{ + public CommitInfo commitUpdated { get; set; } +} + +public class CommitDeletedResult +{ + public CommitInfo commitDeleted { get; set; } +} +#endregion diff --git a/src/Speckle.Core/Api/Helpers.cs b/src/Speckle.Core/Api/Helpers.cs new file mode 100644 index 00000000..dc3e2ef1 --- /dev/null +++ b/src/Speckle.Core/Api/Helpers.cs @@ -0,0 +1,308 @@ +#nullable disable +using System; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Diagnostics.Contracts; +using System.Linq; +using System.Net.Http; +using System.Reflection; +using System.Runtime.InteropServices; +using System.Text.Json; +using System.Threading.Tasks; +using Speckle.Core.Credentials; +using Speckle.Core.Helpers; +using Speckle.Core.Kits; +using Speckle.Core.Logging; +using Speckle.Core.Models; +using Speckle.Core.Transports; + +namespace Speckle.Core.Api; + +public static class Helpers +{ + public const string RELEASES_URL = "https://releases.speckle.dev"; + private const string FEEDS_ENDPOINT = RELEASES_URL + "/manager2/feeds"; + + /// + /// Helper method to Receive from a Speckle Server. + /// + /// Stream URL or Id to receive from. If the URL contains branchName, commitId or objectId those will be used, otherwise the latest commit from main will be received. + /// Account to use. If not provided the default account will be used. + /// Action invoked on progress iterations. + /// Action invoked once the total count of objects is known. + /// + public static async Task Receive( + string stream, + Account account = null, + Action> onProgressAction = null, + Action onTotalChildrenCountKnown = null + ) + { + var sw = new StreamWrapper(stream); + + try + { + account ??= await sw.GetAccount().ConfigureAwait(false); + } + catch (SpeckleException) + { + if (string.IsNullOrEmpty(sw.StreamId)) + { + throw; + } + + //Fallback to a non authed account + account = new Account + { + token = "", + serverInfo = new ServerInfo { url = sw.ServerUrl }, + userInfo = new UserInfo() + }; + } + + using var client = new Client(account); + using var transport = new ServerTransport(client.Account, sw.StreamId); + + string objectId = ""; + Commit commit = null; + + //OBJECT URL + if (!string.IsNullOrEmpty(sw.ObjectId)) + { + objectId = sw.ObjectId; + } + //COMMIT URL + else if (!string.IsNullOrEmpty(sw.CommitId)) + { + commit = await client.CommitGet(sw.StreamId, sw.CommitId).ConfigureAwait(false); + objectId = commit.referencedObject; + } + //BRANCH URL OR STREAM URL + else + { + var branchName = string.IsNullOrEmpty(sw.BranchName) ? "main" : sw.BranchName; + + var branch = await client.BranchGet(sw.StreamId, branchName, 1).ConfigureAwait(false); + if (branch.commits.items.Count == 0) + { + throw new SpeckleException("The selected branch has no commits."); + } + + commit = branch.commits.items[0]; + objectId = branch.commits.items[0].referencedObject; + } + + Analytics.TrackEvent( + client.Account, + Analytics.Events.Receive, + new Dictionary + { + { "sourceHostApp", HostApplications.GetHostAppFromString(commit.sourceApplication).Slug }, + { "sourceHostAppVersion", commit.sourceApplication } + } + ); + + var receiveRes = await Operations + .Receive( + objectId, + transport, + onProgressAction: onProgressAction, + onTotalChildrenCountKnown: onTotalChildrenCountKnown + ) + .ConfigureAwait(false); + + try + { + await client + .CommitReceived( + new CommitReceivedInput + { + streamId = sw.StreamId, + commitId = commit?.id, + message = commit?.message, + sourceApplication = "Other" + } + ) + .ConfigureAwait(false); + } + catch (Exception ex) when (!ex.IsFatal()) + { + // Do nothing! + } + return receiveRes; + } + + /// + /// Helper method to Send to a Speckle Server. + /// + /// Stream URL or Id to send to. If the URL contains branchName, commitId or objectId those will be used, otherwise the latest commit from main will be received. + /// Data to send + /// Account to use. If not provided the default account will be used. + /// Toggle for the default cache. If set to false, it will only send to the provided transports. + /// Action invoked on progress iterations. + /// + public static async Task Send( + string stream, + Base data, + string message = "No message", + string sourceApplication = ".net", + int totalChildrenCount = 0, + Account account = null, + bool useDefaultCache = true, + Action> onProgressAction = null + ) + { + var sw = new StreamWrapper(stream); + + using var client = new Client(account ?? await sw.GetAccount().ConfigureAwait(false)); + + using ServerTransport transport = new(client.Account, sw.StreamId); + var branchName = string.IsNullOrEmpty(sw.BranchName) ? "main" : sw.BranchName; + + var objectId = await Operations.Send(data, transport, useDefaultCache, onProgressAction).ConfigureAwait(false); + + Analytics.TrackEvent(client.Account, Analytics.Events.Send); + + return await client + .CommitCreate( + new CommitCreateInput + { + streamId = sw.StreamId, + branchName = branchName, + objectId = objectId, + message = message, + sourceApplication = sourceApplication, + totalChildrenCount = totalChildrenCount + } + ) + .ConfigureAwait(false); + } + + /// + /// + /// + /// The connector slug eg. revit, rhino, etc + /// + public static async Task IsConnectorUpdateAvailable(string slug) + { + //when debugging the version is not correct, so don't bother + if (!Analytics.IsReleaseMode) + { + return false; + } + + try + { + using HttpClient client = Http.GetHttpProxyClient(); + var response = await client.GetStringAsync($"{FEEDS_ENDPOINT}/{slug}.json").ConfigureAwait(false); + var connector = JsonSerializer.Deserialize(response); + + var os = Os.Win; //TODO: This won't work for linux + if (RuntimeInformation.IsOSPlatform(OSPlatform.OSX)) + { + os = Os.OSX; + } + + var versions = connector.Versions.Where(x => x.Os == os).OrderByDescending(x => x.Date).ToList(); + var stables = versions.Where(x => !x.Prerelease).ToArray(); + if (stables.Length == 0) + { + return false; + } + + var latestVersion = new System.Version(stables.First().Number); + + var currentVersion = Assembly.GetAssembly(typeof(Helpers)).GetName().Version; + + if (latestVersion > currentVersion) + { + return true; + } + } + catch (Exception ex) when (!ex.IsFatal()) + { + SpeckleLog.Logger.ForContext("slug", slug).Warning(ex, "Failed to check for connector updates"); + } + + return false; + } + + [Obsolete("Use DateTime overload")] + public static string TimeAgo(string timestamp) + { + return TimeAgo(DateTime.Parse(timestamp)); + } + +#nullable enable + + /// + /// value to fallback to if the given is + public static string TimeAgo(DateTime? timestamp, string fallback = "Never") + { + return timestamp.HasValue ? TimeAgo(timestamp.Value) : fallback; + } + + /// Formats the given difference between the current system time and the provided + /// into a human readable string + /// + /// + /// A Human readable string + public static string TimeAgo(DateTime timestamp) + { + TimeSpan timeAgo; + + timeAgo = DateTime.UtcNow.Subtract(timestamp); + + if (timeAgo.TotalSeconds < 60) + { + return "just now"; + } + + if (timeAgo.TotalMinutes < 60) + { + return $"{timeAgo.Minutes} minute{PluralS(timeAgo.Minutes)} ago"; + } + + if (timeAgo.TotalHours < 24) + { + return $"{timeAgo.Hours} hour{PluralS(timeAgo.Hours)} ago"; + } + + if (timeAgo.TotalDays < 7) + { + return $"{timeAgo.Days} day{PluralS(timeAgo.Days)} ago"; + } + + if (timeAgo.TotalDays < 30) + { + return $"{timeAgo.Days / 7} week{PluralS(timeAgo.Days / 7)} ago"; + } + + if (timeAgo.TotalDays < 365) + { + return $"{timeAgo.Days / 30} month{PluralS(timeAgo.Days / 30)} ago"; + } + + if (timestamp <= new DateTime(1800, 1, 1)) + { + SpeckleLog.Logger.Warning( + "Tried to calculate {functionName} of a DateTime value that was way in the past: {dateTimeValue}", + nameof(TimeAgo), + timestamp + ); + // We assume this was an error, Likely a non-nullable DateTime was initialized/deserialized to the default + // Instead of potentially lying to the user, lets tell them we don't know what happened. + return "Unknown"; + } + + return $"{timeAgo.Days / 365} year{PluralS(timeAgo.Days / 365)} ago"; + } + + [Pure] + public static string PluralS(int num) => num != 1 ? "s" : ""; + + [Obsolete("Renamed to " + nameof(RELEASES_URL))] + [SuppressMessage("Style", "IDE1006:Naming Styles")] + public const string ReleasesUrl = RELEASES_URL; +} diff --git a/src/Speckle.Core/Api/Operations/Operations.Receive.Obsolete.cs b/src/Speckle.Core/Api/Operations/Operations.Receive.Obsolete.cs new file mode 100644 index 00000000..02a7de42 --- /dev/null +++ b/src/Speckle.Core/Api/Operations/Operations.Receive.Obsolete.cs @@ -0,0 +1,533 @@ +using System; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.Diagnostics; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using Serilog.Context; +using Speckle.Core.Logging; +using Speckle.Core.Models; +using Speckle.Core.Serialisation; +using Speckle.Core.Transports; +using Speckle.Newtonsoft.Json; + +namespace Speckle.Core.Api; + +#pragma warning disable CA1068, IDE1006 + +[Obsolete("Serializer v1 is deprecated")] +public enum SerializerVersion +{ + V1, + V2 +} + +public static partial class Operations +{ + private const string RECEIVE_DEPRECATION_MESSAGE = """ + This method overload is obsolete, consider using a non-obsolete overload. + 1.SerializerVersion selection will no longer be supported going foward (serializer v1 is now deprecated). + 2.Use of disposeTransports will no longer be supported going forward (you should dispose your own transports). + 3 OnErrorAction is no longer used (instead functions with throw exceptions for consistancy and clear stack trace) + """; + + /// + /// + [Obsolete(RECEIVE_DEPRECATION_MESSAGE)] + public static Task Receive( + string objectId, + CancellationToken cancellationToken, + ITransport? remoteTransport, + ITransport? localTransport, + Action>? onProgressAction, + Action? onErrorAction, + Action? onTotalChildrenCountKnown, + bool disposeTransports + ) + { + return Receive( + objectId, + cancellationToken, + remoteTransport, + localTransport, + onProgressAction, + onErrorAction, + onTotalChildrenCountKnown, + disposeTransports, + SerializerVersion.V2 + ); + } + + /// + /// + [Obsolete(RECEIVE_DEPRECATION_MESSAGE)] + public static Task Receive( + string objectId, + CancellationToken cancellationToken, + ITransport? remoteTransport, + Action>? onProgressAction, + Action? onErrorAction, + Action? onTotalChildrenCountKnown, + bool disposeTransports, + SerializerVersion serializerVersion + ) + { + return Receive( + objectId, + cancellationToken, + remoteTransport, + null, + onProgressAction, + onErrorAction, + onTotalChildrenCountKnown, + disposeTransports, + serializerVersion + ); + } + + /// + /// + [Obsolete(RECEIVE_DEPRECATION_MESSAGE)] + public static Task Receive( + string objectId, + CancellationToken cancellationToken, + ITransport? remoteTransport, + Action>? onProgressAction, + Action? onErrorAction, + Action? onTotalChildrenCountKnown, + bool disposeTransports + ) + { + return Receive( + objectId, + cancellationToken, + remoteTransport, + null, + onProgressAction, + onErrorAction, + onTotalChildrenCountKnown, + disposeTransports, + SerializerVersion.V2 + ); + } + + /// + /// + [Obsolete(RECEIVE_DEPRECATION_MESSAGE)] + public static Task Receive( + string objectId, + CancellationToken cancellationToken, + ITransport? remoteTransport, + bool disposeTransports + ) + { + return Receive( + objectId, + cancellationToken, + remoteTransport, + null, + null, + null, + null, + disposeTransports, + SerializerVersion.V2 + ); + } + + /// + /// + [Obsolete(RECEIVE_DEPRECATION_MESSAGE)] + public static Task Receive( + string objectId, + ITransport? remoteTransport, + ITransport? localTransport, + Action>? onProgressAction, + Action? onErrorAction, + Action? onTotalChildrenCountKnown, + bool disposeTransports, + SerializerVersion serializerVersion + ) + { + return Receive( + objectId, + CancellationToken.None, + remoteTransport, + localTransport, + onProgressAction, + onErrorAction, + onTotalChildrenCountKnown, + disposeTransports, + serializerVersion + ); + } + + /// + /// + [Obsolete(RECEIVE_DEPRECATION_MESSAGE)] + public static Task Receive( + string objectId, + ITransport? remoteTransport, + ITransport? localTransport, + Action>? onProgressAction, + Action? onErrorAction, + Action? onTotalChildrenCountKnown, + bool disposeTransports + ) + { + return Receive( + objectId, + CancellationToken.None, + remoteTransport, + localTransport, + onProgressAction, + onErrorAction, + onTotalChildrenCountKnown, + disposeTransports, + SerializerVersion.V2 + ); + } + + /// + /// + [Obsolete(RECEIVE_DEPRECATION_MESSAGE)] + public static Task Receive( + string objectId, + ITransport? remoteTransport, + Action>? onProgressAction, + Action? onErrorAction, + Action? onTotalChildrenCountKnown, + bool disposeTransports + ) + { + return Receive( + objectId, + CancellationToken.None, + remoteTransport, + null, + onProgressAction, + onErrorAction, + onTotalChildrenCountKnown, + disposeTransports, + SerializerVersion.V2 + ); + } + + /// + /// + [Obsolete(RECEIVE_DEPRECATION_MESSAGE)] + public static Task Receive( + string objectId, + ITransport? remoteTransport, + ITransport? localTransport, + bool disposeTransports + ) + { + return Receive( + objectId, + CancellationToken.None, + remoteTransport, + localTransport, + null, + null, + null, + disposeTransports, + SerializerVersion.V2 + ); + } + + /// + /// + [Obsolete(RECEIVE_DEPRECATION_MESSAGE)] + public static Task Receive(string objectId, ITransport? remoteTransport, bool disposeTransports) + { + return Receive( + objectId, + CancellationToken.None, + remoteTransport, + null, + null, + null, + null, + disposeTransports, + SerializerVersion.V2 + ); + } + + /// + /// + [Obsolete(RECEIVE_DEPRECATION_MESSAGE)] + public static Task Receive(string objectId, bool disposeTransports) + { + return Receive( + objectId, + CancellationToken.None, + null, + null, + null, + null, + null, + disposeTransports, + SerializerVersion.V2 + ); + } + + /// + /// + [Obsolete(RECEIVE_DEPRECATION_MESSAGE)] + public static Task Receive( + string objectId, + ITransport? remoteTransport, + ITransport? localTransport, + Action? onErrorAction + ) + { + return Receive( + objectId, + default, + remoteTransport, + localTransport, + null, + onErrorAction, + null, + false, + SerializerVersion.V2 + ); + } + + /// + /// + [Obsolete(RECEIVE_DEPRECATION_MESSAGE)] + public static Task Receive( + string objectId, + ITransport? remoteTransport, + Action? onErrorAction + ) + { + return Receive(objectId, default, remoteTransport, null, null, onErrorAction, null, false, SerializerVersion.V2); + } + + /// + /// Receives an object from a transport. + /// + /// + /// This overload is deprecated. You should consider using + /// + ///
+ /// The new overload no longer support switching as v1 is now deprecated. + ///
+ /// We also no longer offer the option to . + /// You should instead handle disposal yourself + /// using conventional mechanisms like the using keyword or try finally block
+ ///
+ /// This function overload will be kept around for several releases, but will eventually be removed. + ///
+ /// + /// The transport to receive from. + /// Leave null to use the default cache. + /// Action invoked on progress iterations. + /// Action invoked on internal errors. + /// Action invoked once the total count of objects is known. + /// + [Obsolete(RECEIVE_DEPRECATION_MESSAGE)] + public static async Task Receive( + string objectId, + CancellationToken cancellationToken, + ITransport? remoteTransport, + ITransport? localTransport, + Action>? onProgressAction, + Action? onErrorAction, + Action? onTotalChildrenCountKnown, + bool disposeTransports, + SerializerVersion serializerVersion + ) + { + var hasUserProvidedLocalTransport = localTransport != null; + localTransport ??= new SQLiteTransport(); + using (LogContext.PushProperty("remoteTransportContext", remoteTransport?.TransportContext)) + using (LogContext.PushProperty("localTransportContext", localTransport.TransportContext)) + using (LogContext.PushProperty("objectId", objectId)) + { + var timer = Stopwatch.StartNew(); + SpeckleLog.Logger.Information( + "Starting receive {objectId} from transports {localTransport} / {remoteTransport}", + objectId, + localTransport.TransportName, + remoteTransport?.TransportName + ); + + BaseObjectSerializer? serializer = null; + JsonSerializerSettings? settings = null; + BaseObjectDeserializerV2? serializerV2 = null; + if (serializerVersion == SerializerVersion.V1) + { + (serializer, settings) = GetSerializerInstance(); + } + else + { + serializerV2 = new BaseObjectDeserializerV2(); + } + + var internalProgressAction = GetInternalProgressAction(onProgressAction); + + localTransport.OnProgressAction = internalProgressAction; + localTransport.CancellationToken = cancellationToken; + + if (serializerVersion == SerializerVersion.V1) + { + serializer!.ReadTransport = localTransport; + serializer.OnProgressAction = internalProgressAction; + serializer.OnErrorAction = onErrorAction; + serializer.CancellationToken = cancellationToken; + } + else + { + serializerV2!.ReadTransport = localTransport; + serializerV2.OnProgressAction = internalProgressAction; + serializerV2.OnErrorAction = onErrorAction; + serializerV2.CancellationToken = cancellationToken; + if (remoteTransport is IBlobCapableTransport t) + { + serializerV2.BlobStorageFolder = t.BlobStorageFolder; + } + } + + // First we try and get the object from the local transport. If it's there, we assume all its children are there, and proceed with deserialisation. + // This assumption is hard-wired into the SDK. Read below. + var objString = localTransport.GetObject(objectId); + + if (objString != null) + { + // Shoot out the total children count + var partial = JsonConvert.DeserializeObject(objString); + if (partial == null) + { + throw new SpeckleDeserializeException( + $"Failed to deserialize {nameof(objString)} into {nameof(Placeholder)}" + ); + } + + if (partial.__closure != null) + { + onTotalChildrenCountKnown?.Invoke(partial.__closure.Count); + } + + Base? localRes = DeserializeStringToBase(serializerVersion, objString, settings, serializerV2); + + if ((disposeTransports || !hasUserProvidedLocalTransport) && localTransport is IDisposable dispLocal) + { + dispLocal.Dispose(); + } + + if (disposeTransports && remoteTransport != null && remoteTransport is IDisposable dispRemote) + { + dispRemote.Dispose(); + } + + timer.Stop(); + SpeckleLog.Logger + .ForContext("deserializerElapsed", serializerV2?.Elapsed) + .ForContext( + "transportElapsedBreakdown", + new[] { localTransport, remoteTransport } + .Where(t => t != null) + .ToDictionary(t => t!.TransportName, t => t!.Elapsed) + ) + .Information( + "Finished receiving {objectId} from {source} in {elapsed} seconds", + objectId, + localTransport.TransportName, + timer.Elapsed.TotalSeconds + ); + return localRes; + } + + if (remoteTransport == null) + { + var ex = new SpeckleException( + $"Could not find specified object using the local transport {localTransport.TransportName}, and you didn't provide a fallback remote from which to pull it." + ); + + SpeckleLog.Logger.Error(ex, "Cannot receive object from the given transports {exceptionMessage}", ex.Message); + throw ex; + } + + // If we've reached this stage, it means that we didn't get a local transport hit on our object, so we will proceed to get it from the provided remote transport. + // This is done by copying itself and all its children from the remote transport into the local one. + remoteTransport.OnProgressAction = internalProgressAction; + remoteTransport.CancellationToken = cancellationToken; + + SpeckleLog.Logger.Debug( + "Cannot find object {objectId} in the local transport, hitting remote {transportName}", + remoteTransport.TransportName + ); + objString = await remoteTransport + .CopyObjectAndChildren(objectId, localTransport, onTotalChildrenCountKnown) + .ConfigureAwait(false); + + // Wait for the local transport to finish "writing" - in this case, it signifies that the remote transport has done pushing copying objects into it. (TODO: I can see some scenarios where latency can screw things up, and we should rather wait on the remote transport). + await localTransport.WriteComplete().ConfigureAwait(false); + + // Proceed to deserialise the object, now safely knowing that all its children are present in the local (fast) transport. + + Base? res = DeserializeStringToBase(serializerVersion, objString, settings, serializerV2); + if ((disposeTransports || !hasUserProvidedLocalTransport) && localTransport is IDisposable dl) + { + dl.Dispose(); + } + + if (disposeTransports && remoteTransport is IDisposable dr) + { + dr.Dispose(); + } + + SpeckleLog.Logger + .ForContext("deserializerElapsed", serializerV2?.Elapsed) + .ForContext( + "transportElapsedBreakdown", + new[] { localTransport, remoteTransport } + .Where(t => t != null) + .ToDictionary(t => t.TransportName, t => t.Elapsed) + ) + .Information( + "Finished receiving {objectId} from {source} in {elapsed} seconds", + objectId, + remoteTransport.TransportName, + timer.Elapsed.TotalSeconds + ); + return res; + + // Summary: + // Basically, receiving an object (and all its subchildren) operates with two transports, one that is potentially slow, and one that is fast. + // The fast transport ("localTransport") is used syncronously inside the deserialisation routine to get the value of nested references and set them. The slow transport ("remoteTransport") is used to get the raw data and populate the local transport with all necessary data for a successful deserialisation of the object. + // Note: if properly implemented, there is no hard distinction between what is a local or remote transport; it's still just a transport. So, for example, if you want to receive an object without actually writing it first to a local transport, you can just pass a Server/S3 transport as a local transport. + // This is not reccommended, but shows what you can do. Another tidbit: the local transport does not need to be disk-bound; it can easily be an in memory transport. In memory transports are the fastest ones, but they're of limited use for more + } + } + + [Obsolete("Serializer v1 is deprecated, use other overload(s)")] + private static Base? DeserializeStringToBase( + SerializerVersion serializerVersion, + string objString, + JsonSerializerSettings? settings, + BaseObjectDeserializerV2? serializerV2 + ) + { + if (serializerVersion == SerializerVersion.V1) + { + return JsonConvert.DeserializeObject(objString, settings); + } + else + { + return serializerV2!.Deserialize(objString); + } + } +} + +[Obsolete("Use " + nameof(TransportHelpers.Placeholder))] +internal sealed class Placeholder +{ + public Dictionary? __closure { get; set; } = new(); +} + +#pragma warning restore CA1068, IDE1006 diff --git a/src/Speckle.Core/Api/Operations/Operations.Receive.cs b/src/Speckle.Core/Api/Operations/Operations.Receive.cs new file mode 100644 index 00000000..2c8d1d98 --- /dev/null +++ b/src/Speckle.Core/Api/Operations/Operations.Receive.cs @@ -0,0 +1,206 @@ +using System; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.Diagnostics; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using Serilog.Context; +using Speckle.Core.Logging; +using Speckle.Core.Models; +using Speckle.Core.Serialisation; +using Speckle.Core.Transports; + +namespace Speckle.Core.Api; + +public static partial class Operations +{ + /// + /// Receives an object (and all its sub-children) from the two provided s. + ///
+ /// Will first try and find objects using the (the faster transport) + /// If not found, will attempt to copy the objects from the into the before deserialization + ///
+ /// + /// If Transports are properly implemented, there is no hard distinction between what is a local or remote transport; it's still just an . + ///
So, for example, if you want to receive an object without actually writing it first to a local transport, you can just pass a as a local transport. + ///
This is not recommended, but shows what you can do. Another tidbit: the local transport does not need to be disk-bound; it can easily be an in . In memory transports are the fastest ones, but they're of limited use for larger datasets + ///
+ /// The id of the object to receive + /// The remote transport (slower). If , will assume all objects are present in + /// The local transport (faster). If , will use a default cache + /// Action invoked on progress iterations + /// Action invoked once the total count of objects is known + /// + /// Failed to retrieve objects from the provided transport(s) + /// Deserialization of the requested object(s) failed + /// requested cancel + /// The requested Speckle Object + public static async Task Receive( + string objectId, + ITransport? remoteTransport = null, + ITransport? localTransport = null, + Action>? onProgressAction = null, + Action? onTotalChildrenCountKnown = null, + CancellationToken cancellationToken = default + ) + { + // Setup Progress Reporting + var internalProgressAction = GetInternalProgressAction(onProgressAction); + + // Setup Local Transport + using IDisposable? d1 = UseDefaultTransportIfNull(localTransport, out localTransport); + localTransport.OnProgressAction = internalProgressAction; + localTransport.CancellationToken = cancellationToken; + + // Setup Remote Transport + if (remoteTransport is not null) + { + remoteTransport.OnProgressAction = internalProgressAction; + remoteTransport.CancellationToken = cancellationToken; + } + + // Setup Serializer + BaseObjectDeserializerV2 serializerV2 = + new() + { + ReadTransport = localTransport, + OnProgressAction = internalProgressAction, + CancellationToken = cancellationToken, + BlobStorageFolder = (remoteTransport as IBlobCapableTransport)?.BlobStorageFolder + }; + + // Setup Logging + using IDisposable d2 = LogContext.PushProperty("remoteTransportContext", remoteTransport?.TransportContext); + using IDisposable d3 = LogContext.PushProperty("localTransportContext", localTransport.TransportContext); + using IDisposable d4 = LogContext.PushProperty("objectId", objectId); + var timer = Stopwatch.StartNew(); + + // Receive Json + SpeckleLog.Logger.Information( + "Starting receive {objectId} from transports {localTransport} / {remoteTransport}", + objectId, + localTransport.TransportName, + remoteTransport?.TransportName + ); + + // Try Local Receive + string? objString = LocalReceive(objectId, localTransport, onTotalChildrenCountKnown); + + if (objString is null) + { + // Fall back to remote + if (remoteTransport is null) + { + var ex = new TransportException( + $"Could not find specified object using the local transport {localTransport.TransportName}, and you didn't provide a fallback remote from which to pull it." + ); + + SpeckleLog.Logger.Error(ex, "Cannot receive object from the given transports {exceptionMessage}", ex.Message); + throw ex; + } + + SpeckleLog.Logger.Debug( + "Cannot find object {objectId} in the local transport, hitting remote {transportName}", + objectId, + remoteTransport.TransportName + ); + + objString = await RemoteReceive(objectId, remoteTransport, localTransport, onTotalChildrenCountKnown) + .ConfigureAwait(false); + } + + // Proceed to deserialize the object, now safely knowing that all its children are present in the local (fast) transport. + Base res = serializerV2.Deserialize(objString); + + timer.Stop(); + SpeckleLog.Logger + .ForContext("deserializerElapsed", serializerV2.Elapsed) + .ForContext( + "transportElapsedBreakdown", + new[] { localTransport, remoteTransport } + .Where(t => t != null) + .Select(t => new KeyValuePair(t!.TransportName, t.Elapsed)) + .ToArray() + ) + .Information( + "Finished receiving {objectId} from {source} in {elapsed} seconds", + objectId, + remoteTransport?.TransportName, + timer.Elapsed.TotalSeconds + ); + + return res; + } + + /// + /// Try and get the object from the local transport. If it's there, we assume all its children are there + /// This assumption is hard-wired into the + /// + /// + /// + /// + /// + /// + internal static string? LocalReceive( + string objectId, + ITransport localTransport, + Action? onTotalChildrenCountKnown + ) + { + string? objString = localTransport.GetObject(objectId); + if (objString is null) + { + return null; + } + + // Shoot out the total children count + var closures = TransportHelpers.GetClosureTable(objString); + + onTotalChildrenCountKnown?.Invoke(closures?.Count ?? 0); + + return objString; + } + + /// + /// Copies the requested object and all its children from to + /// + /// + /// + /// + /// + /// + /// + /// Remote transport was not specified + private static async Task RemoteReceive( + string objectId, + ITransport remoteTransport, + ITransport localTransport, + Action? onTotalChildrenCountKnown + ) + { + var objString = await remoteTransport + .CopyObjectAndChildren(objectId, localTransport, onTotalChildrenCountKnown) + .ConfigureAwait(false); + + // DON'T THINK THIS IS NEEDED CopyObjectAndChildren should call this + // Wait for the local transport to finish "writing" - in this case, it signifies that the remote transport has done pushing copying objects into it. (TODO: I can see some scenarios where latency can screw things up, and we should rather wait on the remote transport). + await localTransport.WriteComplete().ConfigureAwait(false); + + return objString; + } + + private static IDisposable? UseDefaultTransportIfNull(ITransport? userTransport, out ITransport actualLocalTransport) + { + if (userTransport is not null) + { + actualLocalTransport = userTransport; + return null; + } + + //User did not specify a transport, default to SQLite + SQLiteTransport defaultLocalTransport = new(); + actualLocalTransport = defaultLocalTransport; + return defaultLocalTransport; + } +} diff --git a/src/Speckle.Core/Api/Operations/Operations.Send.Obsolete.cs b/src/Speckle.Core/Api/Operations/Operations.Send.Obsolete.cs new file mode 100644 index 00000000..39ebc583 --- /dev/null +++ b/src/Speckle.Core/Api/Operations/Operations.Send.Obsolete.cs @@ -0,0 +1,241 @@ +using System; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.Diagnostics; +using System.Diagnostics.CodeAnalysis; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using Serilog.Context; +using Speckle.Core.Logging; +using Speckle.Core.Models; +using Speckle.Core.Serialisation; +using Speckle.Core.Transports; +using Speckle.Newtonsoft.Json; +using Speckle.Newtonsoft.Json.Linq; + +namespace Speckle.Core.Api; + +public static partial class Operations +{ + private const string DEPRECATION_NOTICE = """ + This Send overload has been replaced by an overload with fewer function arguments. + We are no longer supporting SerializerV1, OnErrorAction, or handling disposal of transports. + Consider switching one of the other send overloads instead. + This function will be kept around for several releases, but will eventually be removed. + """; + + /// + [Obsolete("This overload has been deprecated along with serializer v1. Use other Send overloads instead.")] + [SuppressMessage("Naming", "CA1720:Identifier contains type name")] + public static Task Send(Base @object) => Send(@object, CancellationToken.None); + + /// + [Obsolete("This overload has been deprecated along with serializer v1. Use other Send overloads instead.")] + [SuppressMessage("Naming", "CA1720:Identifier contains type name")] + public static Task Send( + Base @object, + List? transports, + bool useDefaultCache, + Action>? onProgressAction, + Action? onErrorAction, + bool disposeTransports, + SerializerVersion serializerVersion = SerializerVersion.V2 + ) => + Send( + @object, + CancellationToken.None, + transports, + useDefaultCache, + onProgressAction, + onErrorAction, + disposeTransports, + serializerVersion + ); + + /// + [Obsolete("This overload has been deprecated along with serializer v1. Use other Send overloads instead.")] + [SuppressMessage("Naming", "CA1720:Identifier contains type name")] + public static Task Send( + Base @object, + List? transports, + bool useDefaultCache, + bool disposeTransports, + SerializerVersion serializerVersion = SerializerVersion.V2 + ) => + Send( + @object, + CancellationToken.None, + transports, + useDefaultCache, + null, + null, + disposeTransports, + serializerVersion + ); + + /// + [Obsolete("This overload has been deprecated along with serializer v1. Use other Send overloads instead.")] + [SuppressMessage("Naming", "CA1720:Identifier contains type name")] + public static Task Send( + Base @object, + bool disposeTransports, + SerializerVersion serializerVersion = SerializerVersion.V2 + ) => Send(@object, CancellationToken.None, null, true, null, null, disposeTransports, serializerVersion); + + /// + /// Sends an object via the provided transports. Defaults to the local cache. + /// + /// + /// This overload is deprecated. You should consider using + ///
+ ///
or + ///
+ ///
+ /// These new overloads no longer support switching as v1 is now deprecated. + ///
+ /// We also no longer offer the option to . + /// You should instead handle disposal yourself + /// using conventional mechanisms like the using keyword.
+ ///
+ /// This function overload will be kept around for several releases, but will eventually be removed. + ///
+ /// The object you want to send. + /// A cancellation token that can be used by other objects or threads to send notice of cancellation. + /// Where you want to send them. + /// Toggle for the default cache. If set to false, it will only send to the provided transports. + /// Action that gets triggered on every progress tick (keeps track of all transports). + /// Use this to capture and handle any errors from within the transports. + /// + /// + /// The id (hash) of the object. + [SuppressMessage("Naming", "CA1720:Identifier contains type name")] + [Obsolete(DEPRECATION_NOTICE)] + public static async Task Send( + Base @object, + CancellationToken cancellationToken, + List? transports = null, + bool useDefaultCache = true, + Action>? onProgressAction = null, + Action? onErrorAction = null, + bool disposeTransports = false, + SerializerVersion serializerVersion = SerializerVersion.V2 + ) + { + transports ??= new List(); + using var sqLiteTransport = new SQLiteTransport { TransportName = "LC" }; + + if (transports.Count == 0 && useDefaultCache == false) + { + throw new ArgumentException( + "You need to provide at least one transport: cannot send with an empty transport list and no default cache.", + nameof(transports) + ); + } + + if (useDefaultCache) + { + transports.Insert(0, sqLiteTransport); + } + + var transportContext = transports.ToDictionary(t => t.TransportName, t => t.TransportContext); + + // make sure all logs in the operation have the proper context + using (LogContext.PushProperty("transportContext", transportContext)) + using (LogContext.PushProperty("correlationId", Guid.NewGuid().ToString())) + { + var sendTimer = Stopwatch.StartNew(); + SpeckleLog.Logger.Information("Starting send operation"); + + var internalProgressAction = GetInternalProgressAction(onProgressAction); + + BaseObjectSerializer? serializer = null; + JsonSerializerSettings? settings = null; + BaseObjectSerializerV2? serializerV2 = null; + if (serializerVersion == SerializerVersion.V1) + { + (serializer, settings) = GetSerializerInstance(); + serializer.WriteTransports = transports; + serializer!.OnProgressAction = internalProgressAction; + serializer.CancellationToken = cancellationToken; + serializer.OnErrorAction = onErrorAction; + } + else + { + serializerV2 = new BaseObjectSerializerV2(transports, internalProgressAction, false, cancellationToken); + } + + foreach (var t in transports) + { + t.OnProgressAction = internalProgressAction; + t.CancellationToken = cancellationToken; + t.BeginWrite(); + } + + string obj; + List transportAwaits; + if (serializerVersion == SerializerVersion.V1) + { + obj = JsonConvert.SerializeObject(@object, settings); + transportAwaits = serializer!.WriteTransports.Select(t => t.WriteComplete()).ToList(); + } + else + { + obj = serializerV2!.Serialize(@object); + transportAwaits = serializerV2.WriteTransports.Select(t => t.WriteComplete()).ToList(); + } + + if (cancellationToken.IsCancellationRequested) + { + SpeckleLog.Logger.Information( + "Send operation cancelled after {elapsed} seconds", + sendTimer.Elapsed.TotalSeconds + ); + cancellationToken.ThrowIfCancellationRequested(); + } + + await Task.WhenAll(transportAwaits).ConfigureAwait(false); + + foreach (var t in transports) + { + t.EndWrite(); + if (useDefaultCache && t is SQLiteTransport lc && lc.TransportName == "LC") + { + lc.Dispose(); + continue; + } + if (disposeTransports && t is IDisposable disp) + { + disp.Dispose(); + } + } + + if (cancellationToken.IsCancellationRequested) + { + SpeckleLog.Logger.Information("Send operation cancelled after {elapsed}", sendTimer.Elapsed.TotalSeconds); + cancellationToken.ThrowIfCancellationRequested(); + } + + var idToken = JObject.Parse(obj).GetValue("id"); + if (idToken == null) + { + throw new SpeckleException("Failed to get id of serialized object"); + } + + var hash = idToken.ToString(); + + sendTimer.Stop(); + SpeckleLog.Logger + .ForContext("transportElapsedBreakdown", transports.ToDictionary(t => t.TransportName, t => t.Elapsed)) + .ForContext("note", "the elapsed summary doesn't need to add up to the total elapsed... Threading magic...") + .ForContext("serializerElapsed", serializerV2?.Elapsed) + .Information( + "Finished sending {objectCount} objects after {elapsed}, result {objectId}", + transports.Max(t => t.SavedObjectCount), + sendTimer.Elapsed.TotalSeconds, + hash + ); + return hash; + } + } +} diff --git a/src/Speckle.Core/Api/Operations/Operations.Send.cs b/src/Speckle.Core/Api/Operations/Operations.Send.cs new file mode 100644 index 00000000..bf426935 --- /dev/null +++ b/src/Speckle.Core/Api/Operations/Operations.Send.cs @@ -0,0 +1,168 @@ +using System; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.Diagnostics; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using Serilog.Context; +using Speckle.Core.Logging; +using Speckle.Core.Models; +using Speckle.Core.Serialisation; +using Speckle.Core.Transports; +using Speckle.Newtonsoft.Json.Linq; + +namespace Speckle.Core.Api; + +public static partial class Operations +{ + /// + /// Sends a Speckle Object to the provided and (optionally) the default local cache + /// + /// + /// + /// When , an additional will be included + /// The or was + /// + /// using ServerTransport destination = new(account, streamId); + /// string objectId = await Send(mySpeckleObject, destination, true); + /// + public static async Task Send( + Base value, + ITransport transport, + bool useDefaultCache, + Action>? onProgressAction = null, + CancellationToken cancellationToken = default + ) + { + if (transport is null) + { + throw new ArgumentNullException(nameof(transport), "Expected a transport to be explicitly specified"); + } + + List transports = new() { transport }; + using SQLiteTransport? localCache = useDefaultCache ? new SQLiteTransport { TransportName = "LC" } : null; + if (localCache is not null) + { + transports.Add(localCache); + } + + return await Send(value, transports, onProgressAction, cancellationToken).ConfigureAwait(false); + } + + /// + /// Sends a Speckle Object to the provided + /// + /// Only sends to the specified transports, the default local cache won't be used unless you also pass it in + /// The id (hash) of the object sent + /// The object you want to send + /// Where you want to send them + /// Action that gets triggered on every progress tick (keeps track of all transports) + /// + /// No transports were specified + /// The was + /// Serialization or Send operation was unsuccessful + /// One or more failed to send + /// The requested cancellation + public static async Task Send( + Base value, + IReadOnlyCollection transports, + Action>? onProgressAction = null, + CancellationToken cancellationToken = default + ) + { + if (value is null) + { + throw new ArgumentNullException(nameof(value)); + } + + if (transports.Count == 0) + { + throw new ArgumentException("Expected at least on transport to be specified", nameof(transports)); + } + + var transportContext = transports.ToDictionary(t => t.TransportName, t => t.TransportContext); + + // make sure all logs in the operation have the proper context + using (LogContext.PushProperty("transportContext", transportContext)) + using (LogContext.PushProperty("correlationId", Guid.NewGuid().ToString())) + { + var sendTimer = Stopwatch.StartNew(); + SpeckleLog.Logger.Information("Starting send operation"); + + var internalProgressAction = GetInternalProgressAction(onProgressAction); + + BaseObjectSerializerV2 serializerV2 = new(transports, internalProgressAction, false, cancellationToken); + + foreach (var t in transports) + { + t.OnProgressAction = internalProgressAction; + t.CancellationToken = cancellationToken; + t.BeginWrite(); + } + + string hash; + try + { + hash = await SerializerSend(value, serializerV2, cancellationToken).ConfigureAwait(false); + } + catch (Exception ex) when (!ex.IsFatal()) + { + SpeckleLog.Logger.Information( + ex, + "Send operation failed after {elapsed} seconds", + sendTimer.Elapsed.TotalSeconds + ); + if (ex is OperationCanceledException or SpeckleException) + { + throw; + } + + throw new SpeckleException("Send operation was unsuccessful", ex); + } + finally + { + foreach (var t in transports) + { + t.EndWrite(); + } + } + + sendTimer.Stop(); + SpeckleLog.Logger + .ForContext("transportElapsedBreakdown", transports.ToDictionary(t => t.TransportName, t => t.Elapsed)) + .ForContext("note", "the elapsed summary doesn't need to add up to the total elapsed... Threading magic...") + .ForContext("serializerElapsed", serializerV2.Elapsed) + .Information( + "Finished sending {objectCount} objects after {elapsed}, result {objectId}", + transports.Max(t => t.SavedObjectCount), + sendTimer.Elapsed.TotalSeconds, + hash + ); + return hash; + } + } + + /// + internal static async Task SerializerSend( + Base value, + BaseObjectSerializerV2 serializer, + CancellationToken cancellationToken = default + ) + { + string obj = serializer.Serialize(value); + Task[] transportAwaits = serializer.WriteTransports.Select(t => t.WriteComplete()).ToArray(); + + cancellationToken.ThrowIfCancellationRequested(); + + await Task.WhenAll(transportAwaits).ConfigureAwait(false); + + JToken? idToken = JObject.Parse(obj).GetValue("id"); + if (idToken == null) + { + throw new SpeckleException("Failed to get id of serialized object"); + } + + return idToken.ToString(); + } +} diff --git a/src/Speckle.Core/Api/Operations/Operations.Serialize.cs b/src/Speckle.Core/Api/Operations/Operations.Serialize.cs new file mode 100644 index 00000000..bd3bea97 --- /dev/null +++ b/src/Speckle.Core/Api/Operations/Operations.Serialize.cs @@ -0,0 +1,151 @@ +using System; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Threading; +using Speckle.Core.Logging; +using Speckle.Core.Models; +using Speckle.Core.Serialisation; +using Speckle.Newtonsoft.Json; + +namespace Speckle.Core.Api; + +public static partial class Operations +{ + /// + /// Serializes a given object. + /// + /// + /// If you want to save and persist an object to Speckle Transport or Server, + /// please use any of the "Send" methods. + /// + /// + /// The object to serialise + /// + /// A json string representation of the object. + public static string Serialize(Base value, CancellationToken cancellationToken = default) + { + var serializer = new BaseObjectSerializerV2 { CancellationToken = cancellationToken }; + return serializer.Serialize(value); + } + + /// + /// Note: if you want to pull an object from a Speckle Transport or Server, + /// please use + /// + /// + /// The json string representation of a speckle object that you want to deserialize + /// + /// + /// was null + /// was not valid JSON + /// cannot be deserialised to type + /// contains closure references (see Remarks) + public static Base Deserialize(string value, CancellationToken cancellationToken = default) + { + var deserializer = new BaseObjectDeserializerV2 { CancellationToken = cancellationToken }; + return deserializer.Deserialize(value); + } + + #region obsolete + + [Obsolete("Serializer v1 is deprecated, use other overload(s)")] + public static string Serialize( + Base value, + SerializerVersion serializerVersion, + CancellationToken cancellationToken = default + ) + { + if (serializerVersion == SerializerVersion.V1) + { + var (serializer, settings) = GetSerializerInstance(); + serializer.CancellationToken = cancellationToken; + + return JsonConvert.SerializeObject(value, settings); + } + else + { + return Serialize(value, cancellationToken); + } + } + + [Obsolete("Serializer v1 is deprecated, use other overload(s)")] + public static Base Deserialize( + string value, + SerializerVersion serializerVersion, + CancellationToken cancellationToken = default + ) + { + if (serializerVersion == SerializerVersion.V1) + { + var (serializer, settings) = GetSerializerInstance(); + serializer.CancellationToken = cancellationToken; + var ret = JsonConvert.DeserializeObject(value, settings); + return ret ?? throw new SpeckleException($"{nameof(value)} failed to deserialize to a {nameof(Base)} object"); + } + + return Deserialize(value, cancellationToken); + } + + [Obsolete("Please use the Deserialize(string value) function.", true)] + public static List DeserializeArray( + string objectArr, + SerializerVersion serializerVersion = SerializerVersion.V2 + ) + { + throw new NotImplementedException(); + } + + [Obsolete( + "Please use the Deserialize(Base @object) function. This function will be removed in later versions.", + true + )] + public static Dictionary DeserializeDictionary(string dictionary) + { + throw new NotImplementedException(); + } + + [Obsolete("Use overload that takes cancellation token last")] + [SuppressMessage("Naming", "CA1720:Identifier contains type name")] + public static Base Deserialize( + string @object, + CancellationToken cancellationToken, + SerializerVersion serializerVersion = SerializerVersion.V2 + ) + { + return Deserialize(@object, serializerVersion, cancellationToken); + } + + [Obsolete("Use overload that takes cancellation token last")] + [SuppressMessage("Naming", "CA1720:Identifier contains type name")] + public static string Serialize( + Base @object, + CancellationToken cancellationToken, + SerializerVersion serializerVersion = SerializerVersion.V2 + ) + { + return Serialize(@object, serializerVersion, cancellationToken); + } + + /// + /// Serializes a list of objects. Note: if you want to save and persist objects to speckle, please use any of the "Send" methods. + /// + /// + /// + [Obsolete("Please use the Serialize(Base value) function. This function will be removed in later versions.", true)] + public static string Serialize(List objects) + { + throw new NotImplementedException(); + } + + /// + /// Serializes a list of objects. Note: if you want to save and persist objects to speckle, please use any of the "Send" methods. + /// + /// + /// + [Obsolete("Please use the Serialize(Base value) function. This function will be removed in later versions.")] + public static string Serialize(Dictionary objects) + { + throw new NotImplementedException(); + } + #endregion +} diff --git a/src/Speckle.Core/Api/Operations/Operations.cs b/src/Speckle.Core/Api/Operations/Operations.cs new file mode 100644 index 00000000..2d5cf5bf --- /dev/null +++ b/src/Speckle.Core/Api/Operations/Operations.cs @@ -0,0 +1,62 @@ +using System; +using System.Collections.Concurrent; +using System.Collections.Generic; +using Speckle.Core.Serialisation; +using Speckle.Newtonsoft.Json; +using Speckle.Newtonsoft.Json.Serialization; + +namespace Speckle.Core.Api; + +/// +/// Exposes several key methods for interacting with Speckle.Speckle.Core. +/// Serialize/Deserialize +/// Push/Pull (methods to serialize and send data to one or more servers) +/// +public static partial class Operations +{ + /// + /// Convenience method to instantiate an instance of the default object serializer and settings pre-populated with it. + /// + [Obsolete("V1 Serializer is deprecated. Use " + nameof(BaseObjectSerializerV2))] + public static (BaseObjectSerializer, JsonSerializerSettings) GetSerializerInstance() + { + var serializer = new BaseObjectSerializer(); + var settings = new JsonSerializerSettings + { + NullValueHandling = NullValueHandling.Ignore, + ContractResolver = new CamelCasePropertyNamesContractResolver(), + Formatting = Formatting.None, + ReferenceLoopHandling = ReferenceLoopHandling.Ignore, + Converters = new List { serializer } + }; + + return (serializer, settings); + } + + /// + /// Factory for progress actions used internally inside send and receive methods. + /// + /// + /// + private static Action? GetInternalProgressAction( + Action>? onProgressAction + ) + { + if (onProgressAction is null) + { + return null; + } + + var localProgressDict = new ConcurrentDictionary(); + + return (name, processed) => + { + if (!localProgressDict.TryAdd(name, processed)) + { + localProgressDict[name] += processed; + } + + onProgressAction.Invoke(localProgressDict); + }; + } +} diff --git a/src/Speckle.Core/Api/ServerLimits.cs b/src/Speckle.Core/Api/ServerLimits.cs new file mode 100644 index 00000000..7f78def9 --- /dev/null +++ b/src/Speckle.Core/Api/ServerLimits.cs @@ -0,0 +1,14 @@ +namespace Speckle.Core.Api; + +/// +/// Defines the limits for specific API calls on the Speckle Server. +/// These are magic numbers! Should be aligned with server always. +/// +/// +/// ⚠️ Not all limits are reflected here! +/// +public static class ServerLimits +{ + public const int BRANCH_GET_LIMIT = 500; + public const int OLD_BRANCH_GET_LIMIT = 100; +} diff --git a/src/Speckle.Core/Credentials/Account.cs b/src/Speckle.Core/Credentials/Account.cs new file mode 100644 index 00000000..2774a7a4 --- /dev/null +++ b/src/Speckle.Core/Credentials/Account.cs @@ -0,0 +1,125 @@ +#nullable disable +using System; +using System.Runtime.InteropServices; +using System.Threading.Tasks; +using Speckle.Core.Api; +using Speckle.Core.Helpers; + +namespace Speckle.Core.Credentials; + +[ClassInterface(ClassInterfaceType.AutoDual)] +[ComVisible(true)] +public class Account : IEquatable +{ + private string _id; + + /// + /// The account id is unique to user and server url. + /// + /// Account object invalid: missing required info + public string id + { + get + { + if (_id == null) + { + if (serverInfo == null || userInfo == null) + { + throw new InvalidOperationException("Incomplete account info: cannot generate id."); + } + + _id = Crypt.Md5(userInfo.email + serverInfo.url, "X2"); + } + return _id; + } + set => _id = value; + } + + public string token { get; set; } + + public string refreshToken { get; set; } + + public bool isDefault { get; set; } + public bool isOnline { get; set; } = true; + + public ServerInfo serverInfo { get; set; } + + public UserInfo userInfo { get; set; } + + #region private methods + + private static string CleanURL(string server) + { + if (Uri.TryCreate(server, UriKind.Absolute, out Uri newUri)) + { + server = newUri.Authority; + } + + return server; + } + + #endregion + + #region public methods + + public string GetHashedEmail() + { + string email = userInfo?.email ?? "unknown"; + return "@" + Crypt.Md5(email, "X2"); + } + + public string GetHashedServer() + { + string url = serverInfo?.url ?? AccountManager.DEFAULT_SERVER_URL; + return Crypt.Md5(CleanURL(url), "X2"); + } + + public async Task Validate() + { + Uri server = new(serverInfo.url); + return await AccountManager.GetUserInfo(token, server).ConfigureAwait(false); + } + + public override string ToString() + { + return $"Account ({userInfo.email} | {serverInfo.url})"; + } + + public bool Equals(Account other) + { + return other is not null && other.userInfo.email == userInfo.email && other.serverInfo.url == serverInfo.url; + } + + public override bool Equals(object obj) + { + return obj is Account acc && Equals(acc); + } + + #endregion + + /// + /// Retrieves the local identifier for the current user. + /// + /// + /// Returns a object representing the local identifier for the current user. + /// The local identifier is created by appending the user ID as a query parameter to the server URL. + /// + /// + /// Notice that the generated Uri is not intended to be used as a functioning Uri, but rather as a + /// unique identifier for a specific account in a local environment. The format of the Uri, containing a query parameter with the user ID, + /// serves this specific purpose. Therefore, it should not be used for forming network requests or + /// expecting it to lead to an actual webpage. The primary intent of this Uri is for unique identification in a Uri format. + /// + /// + /// This sample shows how to call the GetLocalIdentifier method. + /// + /// Uri localIdentifier = GetLocalIdentifier(); + /// Console.WriteLine(localIdentifier); + /// + /// For a fictional `User ID: 123` and `Server: https://speckle.xyz`, the output might look like this: + /// + /// https://speckle.xyz?id=123 + /// + /// + internal Uri GetLocalIdentifier() => new($"{serverInfo.url}?id={userInfo.id}"); +} diff --git a/src/Speckle.Core/Credentials/AccountManager.cs b/src/Speckle.Core/Credentials/AccountManager.cs new file mode 100644 index 00000000..0f913882 --- /dev/null +++ b/src/Speckle.Core/Credentials/AccountManager.cs @@ -0,0 +1,878 @@ +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.IO; +using System.Linq; +using System.Net; +using System.Net.Http; +using System.Net.Http.Headers; +using System.Security.Cryptography; +using System.Text; +using System.Text.RegularExpressions; +using System.Threading; +using System.Threading.Tasks; +using GraphQL; +using GraphQL.Client.Http; +using Speckle.Core.Api; +using Speckle.Core.Api.GraphQL; +using Speckle.Core.Api.GraphQL.Serializer; +using Speckle.Core.Helpers; +using Speckle.Core.Logging; +using Speckle.Core.Transports; +using Speckle.Newtonsoft.Json; +using Stream = System.IO.Stream; + +namespace Speckle.Core.Credentials; + +/// +/// Manage accounts locally for desktop applications. +/// +public static class AccountManager +{ + public const string DEFAULT_SERVER_URL = "https://app.speckle.systems"; + + private static readonly SQLiteTransport s_accountStorage = new(scope: "Accounts"); + private static volatile bool s_isAddingAccount; + private static readonly SQLiteTransport s_accountAddLockStorage = new(scope: "AccountAddFlow"); + + /// + /// Gets the basic information about a server. + /// + /// Server URL + /// + public static async Task GetServerInfo(Uri server, CancellationToken cancellationToken = default) + { + using var httpClient = Http.GetHttpProxyClient(); + + using var gqlClient = new GraphQLHttpClient( + new GraphQLHttpClientOptions + { + EndPoint = new Uri(server, "/graphql"), + UseWebSocketForQueriesAndMutations = false + }, + new NewtonsoftJsonSerializer(), + httpClient + ); + + System.Version version = await gqlClient + .GetServerVersion(cancellationToken: cancellationToken) + .ConfigureAwait(false); + + // serverMigration property was added in 2.18.5, so only query for it + // if the server has been updated past that version + System.Version serverMigrationVersion = new(2, 18, 5); + + string queryString; + if (version >= serverMigrationVersion) + { + //language=graphql + queryString = "query { serverInfo { name company migration { movedFrom movedTo } } }"; + } + else + { + //language=graphql + queryString = "query { serverInfo { name company } }"; + } + + var request = new GraphQLRequest { Query = queryString }; + + var response = await gqlClient.SendQueryAsync(request, cancellationToken).ConfigureAwait(false); + + if (response.Errors is not null) + { + throw new SpeckleGraphQLException( + $"GraphQL request {nameof(GetServerInfo)} failed", + request, + response + ); + } + + ServerInfo serverInfo = response.Data.serverInfo; + serverInfo.url = server.ToString().TrimEnd('/'); + serverInfo.frontend2 = await IsFrontend2Server(server).ConfigureAwait(false); + + return response.Data.serverInfo; + } + + /// + /// Gets basic user information given a token and a server. + /// + /// + /// Server URL + /// + public static async Task GetUserInfo( + string token, + Uri server, + CancellationToken cancellationToken = default + ) + { + using var httpClient = Http.GetHttpProxyClient(); + Http.AddAuthHeader(httpClient, token); + + using var gqlClient = new GraphQLHttpClient( + new GraphQLHttpClientOptions { EndPoint = new Uri(server, "/graphql") }, + new NewtonsoftJsonSerializer(), + httpClient + ); + + //language=graphql + var request = new GraphQLRequest { Query = " query { activeUser { name email id company } }" }; + + var response = await gqlClient.SendQueryAsync(request, cancellationToken).ConfigureAwait(false); + + if (response.Errors != null) + { + throw new SpeckleGraphQLException( + $"GraphQL request {nameof(GetUserInfo)} failed", + request, + response + ); + } + + return response.Data.activeUser; + } + + /// + /// Gets basic user and server information given a token and a server. + /// + /// + /// Server URL + /// + internal static async Task GetUserServerInfo( + string token, + Uri server, + CancellationToken ct = default + ) + { + try + { + using var httpClient = Http.GetHttpProxyClient(); + Http.AddAuthHeader(httpClient, token); + + using var client = new GraphQLHttpClient( + new GraphQLHttpClientOptions { EndPoint = new Uri(server, "/graphql") }, + new NewtonsoftJsonSerializer(), + httpClient + ); + + System.Version version = await client.GetServerVersion(ct).ConfigureAwait(false); + + // serverMigration property was added in 2.18.5, so only query for it + // if the server has been updated past that version + System.Version serverMigrationVersion = new(2, 18, 5); + + string queryString; + if (version >= serverMigrationVersion) + { + //language=graphql + queryString = + "query { activeUser { id name email company avatar streams { totalCount } commits { totalCount } } serverInfo { name company adminContact description version migration { movedFrom movedTo } } }"; + } + else + { + //language=graphql + queryString = + "query { activeUser { id name email company avatar streams { totalCount } commits { totalCount } } serverInfo { name company adminContact description version } }"; + } + + var request = new GraphQLRequest { Query = queryString }; + + var response = await client.SendQueryAsync(request, ct).ConfigureAwait(false); + + if (response.Errors != null) + { + throw new SpeckleGraphQLException( + $"Query {nameof(GetUserServerInfo)} failed", + request, + response + ); + } + + ServerInfo serverInfo = response.Data.serverInfo; + serverInfo.url = server.ToString().TrimEnd('/'); + serverInfo.frontend2 = await IsFrontend2Server(server).ConfigureAwait(false); + + return response.Data; + } + catch (Exception ex) when (!ex.IsFatal()) + { + throw new SpeckleException($"Failed to get user + server info from {server}", ex); + } + } + + /// + /// The Default Server URL for authentication, can be overridden by placing a file with the alternatrive url in the Speckle folder or with an ENV_VAR + /// + public static string GetDefaultServerUrl() + { + var serverUrl = DEFAULT_SERVER_URL; + var customServerUrl = ""; + + // first mechanism, check for local file + var customServerFile = Path.Combine(SpecklePathProvider.UserSpeckleFolderPath, "server"); + if (File.Exists(customServerFile)) + { + customServerUrl = File.ReadAllText(customServerFile); + } + + // second mechanism, check ENV VAR + var customServerEnvVar = Environment.GetEnvironmentVariable("SPECKLE_SERVER"); + if (!string.IsNullOrEmpty(customServerEnvVar)) + { + customServerUrl = customServerEnvVar; + } + + if (!string.IsNullOrEmpty(customServerUrl)) + { + Uri.TryCreate(customServerUrl, UriKind.Absolute, out Uri url); + if (url != null) + { + serverUrl = customServerUrl.TrimEnd('/'); + } + } + + return serverUrl; + } + + /// The Id of the account to fetch + /// + /// Account with was not found + public static Account GetAccount(string id) + { + return GetAccounts().FirstOrDefault(acc => acc.id == id) + ?? throw new SpeckleAccountManagerException($"Account {id} not found"); + } + + /// + /// Upgrades an account from the account.serverInfo.movedFrom account to the account.serverInfo.movedTo account + /// + /// Id of the account to upgrade + public static async Task UpgradeAccount(string id) + { + Account account = GetAccount(id); + + if (account.serverInfo.migration.movedTo is not Uri upgradeUri) + { + throw new SpeckleAccountManagerException( + $"Server with url {account.serverInfo.url} does not have information about the upgraded server" + ); + } + + account.serverInfo.migration.movedTo = null; + account.serverInfo.migration.movedFrom = new Uri(account.serverInfo.url); + account.serverInfo.url = upgradeUri.ToString().TrimEnd('/'); + account.serverInfo.frontend2 = true; + + // setting the id to null will force it to be recreated + account.id = null; + + RemoveAccount(id); + s_accountStorage.SaveObject(account.id, JsonConvert.SerializeObject(account)); + await s_accountStorage.WriteComplete().ConfigureAwait(false); + } + + /// + /// Returns all unique accounts matching the serverUrl provided. If an account exists on more than one server, + /// typically because it has been migrated, then only the upgraded account (and therefore server) are returned. + /// Accounts are deemed to be the same when the Account.Id matches. + /// + /// + /// + public static IEnumerable GetAccounts(string serverUrl) + { + var accounts = GetAccounts().ToList(); + List filtered = new(); + + foreach (var acc in accounts) + { + if (acc.serverInfo?.migration?.movedFrom == new Uri(serverUrl)) + { + filtered.Add(acc); + } + } + + foreach (var acc in accounts) + { + // we use the userInfo to detect the same account rather than the account.id + // which should NOT match for essentially the same accounts but on different servers - i.e. FE1 & FE2 + if (acc.serverInfo.url == serverUrl && !filtered.Any(x => x.userInfo.id == acc.userInfo.id)) + { + filtered.Add(acc); + } + } + + return filtered; + } + + /// + /// Gets this environment's default account if any. If there is no default, the first found will be returned and set as default. + /// + /// The default account or null. + public static Account? GetDefaultAccount() + { + var defaultAccount = GetAccounts().FirstOrDefault(acc => acc.isDefault); + if (defaultAccount != null) + { + return defaultAccount; + } + + var firstAccount = GetAccounts().FirstOrDefault(); + if (firstAccount == null) + { + SpeckleLog.Logger.Information("No Speckle accounts found. Visit the Speckle web app to create one"); + } + + return firstAccount; + } + + /// + /// Gets all the accounts present in this environment. + /// + /// This function does have potential side effects. Any invalid accounts found while enumerating will be removed + /// Un-enumerated enumerable of accounts + public static IEnumerable GetAccounts() + { + static bool IsInvalid(Account ac) => ac.userInfo == null || ac.serverInfo == null; + + var sqlAccounts = s_accountStorage.GetAllObjects().Select(x => JsonConvert.DeserializeObject(x)); + var localAccounts = GetLocalAccounts(); + + foreach (var acc in sqlAccounts) + { + if (IsInvalid(acc)) + { + RemoveAccount(acc.id); + } + else + { + yield return acc; + } + } + + foreach (var acc in localAccounts) + { + yield return acc; + } + } + + /// + /// Gets the local accounts + /// These are accounts not handled by Manager and are stored in json format in a local directory + /// + /// + private static IList GetLocalAccounts() + { + var accountsDir = SpecklePathProvider.AccountsFolderPath; + if (!Directory.Exists(accountsDir)) + { + return Array.Empty(); + } + + var accounts = new List(); + string[] files = Directory.GetFiles(accountsDir, "*.json", SearchOption.AllDirectories); + foreach (var file in files) + { + try + { + var json = File.ReadAllText(file); + Account? account = JsonConvert.DeserializeObject(json); + + if ( + account is not null + && !string.IsNullOrEmpty(account.token) + && !string.IsNullOrEmpty(account.userInfo.id) + && !string.IsNullOrEmpty(account.userInfo.email) + && !string.IsNullOrEmpty(account.userInfo.name) + && !string.IsNullOrEmpty(account.serverInfo.url) + && !string.IsNullOrEmpty(account.serverInfo.name) + ) + { + accounts.Add(account); + } + } + catch (Exception ex) when (!ex.IsFatal()) + { + SpeckleLog.Logger.Warning(ex, "Failed to load json account at {filePath}", file); + } + } + + return accounts; + } + + /// + /// Refetches user and server info for each account + /// + /// + public static async Task UpdateAccounts(CancellationToken ct = default) + { + // need to ToList() the GetAccounts call or the UpdateObject call at the end of this method + // will not work because sqlite does not support concurrent db calls + foreach (var account in GetAccounts().ToList()) + { + try + { + Uri url = new(account.serverInfo.url); + var userServerInfo = await GetUserServerInfo(account.token, url, ct).ConfigureAwait(false); + + //the token has expired + //TODO: once we get a token expired exception from the server use that instead + if (userServerInfo?.activeUser == null || userServerInfo.serverInfo == null) + { + var tokenResponse = await GetRefreshedToken(account.refreshToken, url).ConfigureAwait(false); + userServerInfo = await GetUserServerInfo(tokenResponse.token, url, ct).ConfigureAwait(false); + + if (userServerInfo?.activeUser == null || userServerInfo.serverInfo == null) + { + throw new SpeckleException("Could not refresh token"); + } + + account.token = tokenResponse.token; + account.refreshToken = tokenResponse.refreshToken; + } + + account.isOnline = true; + account.userInfo = userServerInfo.activeUser; + account.serverInfo = userServerInfo.serverInfo; + } + catch (OperationCanceledException) + { + throw; + } + catch (Exception ex) when (!ex.IsFatal()) + { + account.isOnline = false; + } + + ct.ThrowIfCancellationRequested(); + s_accountStorage.UpdateObject(account.id, JsonConvert.SerializeObject(account)); + } + } + + /// + /// Removes an account + /// + /// ID of the account to remove + public static void RemoveAccount(string id) + { + //TODO: reset default account + s_accountStorage.DeleteObject(id); + + var accounts = GetAccounts(); + //BUG: Clearly this is a bug bug bug! + if (accounts.Any() && !accounts.Any(x => x.isDefault)) + { + ChangeDefaultAccount(accounts.First().id); + } + } + + /// + /// Changes the default account + /// + /// + public static void ChangeDefaultAccount(string id) + { + foreach (var account in GetAccounts()) + { + if (account.id != id) + { + account.isDefault = false; + } + else + { + account.isDefault = true; + } + + s_accountStorage.UpdateObject(account.id, JsonConvert.SerializeObject(account)); + } + } + + /// + /// Retrieves the local identifier for the specified account. + /// + /// The account for which to retrieve the local identifier. + /// The local identifier for the specified account in the form of "SERVER_URL?u=USER_ID". + /// + /// + /// + public static Uri? GetLocalIdentifierForAccount(Account account) + { + var identifier = account.GetLocalIdentifier(); + + // Validate account is stored locally + var searchResult = GetAccountForLocalIdentifier(identifier); + + return searchResult == null ? null : identifier; + } + + /// + /// Gets the account that corresponds to the given local identifier. + /// + /// The local identifier of the account. + /// The account that matches the local identifier, or null if no match is found. + public static Account? GetAccountForLocalIdentifier(Uri localIdentifier) + { + var searchResult = GetAccounts() + .FirstOrDefault(acc => + { + var id = acc.GetLocalIdentifier(); + return id == localIdentifier; + }); + + return searchResult; + } + + private static string EnsureCorrectServerUrl(string server) + { + var localUrl = server; + if (string.IsNullOrEmpty(localUrl)) + { + localUrl = GetDefaultServerUrl(); + SpeckleLog.Logger.Debug( + "The provided server url was null or empty. Changed to the default url {serverUrl}", + localUrl + ); + } + return localUrl.TrimEnd('/'); + } + + private static void EnsureGetAccessCodeFlowIsSupported() + { + if (!HttpListener.IsSupported) + { + SpeckleLog.Logger.Error("HttpListener not supported"); + throw new PlatformNotSupportedException("Your operating system is not supported"); + } + } + + private static async Task GetAccessCode(string server, string challenge, TimeSpan timeout) + { + EnsureGetAccessCodeFlowIsSupported(); + + SpeckleLog.Logger.Debug("Starting auth process for {server}/authn/verify/sca/{challenge}", server, challenge); + + var accessCode = ""; + + Process.Start(new ProcessStartInfo($"{server}/authn/verify/sca/{challenge}") { UseShellExecute = true }); + + var task = Task.Run(() => + { + using var listener = new HttpListener(); + var localUrl = "http://localhost:29363/"; + listener.Prefixes.Add(localUrl); + listener.Start(); + SpeckleLog.Logger.Debug("Listening for auth redirects on {localUrl}", localUrl); + // Note: The GetContext method blocks while waiting for a request. + HttpListenerContext context = listener.GetContext(); + HttpListenerRequest request = context.Request; + HttpListenerResponse response = context.Response; + + accessCode = request.QueryString["access_code"]; + SpeckleLog.Logger.Debug("Got access code {accessCode}", accessCode); + + string message = + accessCode != null + ? "Success!

You can close this window now." + : "Oups, something went wrong...!"; + + var responseString = + $"
{message}"; + byte[] buffer = Encoding.UTF8.GetBytes(responseString); + response.ContentLength64 = buffer.Length; + Stream output = response.OutputStream; + output.Write(buffer, 0, buffer.Length); + output.Close(); + SpeckleLog.Logger.Debug("Processed finished processing the access code"); + listener.Stop(); + listener.Close(); + }); + + var completedTask = await Task.WhenAny(task, Task.Delay(timeout)).ConfigureAwait(false); + + // this is means the task timed out + if (completedTask != task) + { + SpeckleLog.Logger.Warning( + "Local auth flow failed to complete within the timeout window. Access code is {accessCode}", + accessCode + ); + throw new Exception("Local auth flow failed to complete within the timeout window"); + } + + if (task.IsFaulted) + { + SpeckleLog.Logger.Error( + task.Exception, + "Getting access code flow failed with {exceptionMessage}", + task.Exception.Message + ); + throw new Exception($"Auth flow failed: {task.Exception.Message}", task.Exception); + } + + // task completed within timeout + SpeckleLog.Logger.Information( + "Local auth flow completed successfully within the timeout window. Access code is {accessCode}", + accessCode + ); + return accessCode; + } + + private static async Task CreateAccount(string accessCode, string challenge, string server) + { + try + { + var tokenResponse = await GetToken(accessCode, challenge, server).ConfigureAwait(false); + var userResponse = await GetUserServerInfo(tokenResponse.token, new(server)).ConfigureAwait(false); + + var account = new Account + { + token = tokenResponse.token, + refreshToken = tokenResponse.refreshToken, + isDefault = !GetAccounts().Any(), + serverInfo = userResponse.serverInfo, + userInfo = userResponse.activeUser + }; + SpeckleLog.Logger.Information("Successfully created account for {serverUrl}", server); + + return account; + } + catch (Exception ex) when (!ex.IsFatal()) + { + throw new SpeckleAccountManagerException("Failed to create account from access code and challenge", ex); + } + } + + private static void TryLockAccountAddFlow(TimeSpan timespan) + { + // use a static variable to quickly + // prevent launching this flow multiple times + if (s_isAddingAccount) + { + // this should probably throw with an error message + throw new SpeckleAccountFlowLockedException("The account add flow is already launched."); + } + + // this uses the SQLite transport to store locks + var lockIds = s_accountAddLockStorage.GetAllObjects().OrderByDescending(d => d).ToList(); + var now = DateTime.Now; + foreach (var l in lockIds) + { + var lockArray = l.Split('@'); + var lockName = lockArray.Length == 2 ? lockArray[0] : "the other app"; + var lockTime = + lockArray.Length == 2 + ? DateTime.ParseExact(lockArray[1], "o", null) + : DateTime.ParseExact(lockArray[0], "o", null); + + if (lockTime > now) + { + var lockString = string.Format("{0:mm} minutes {0:ss} seconds", lockTime - now); + throw new SpeckleAccountFlowLockedException( + $"The account add flow was already started in {lockName}, retry in {lockString}" + ); + } + } + + var lockId = Setup.HostApplication + "@" + DateTime.Now.Add(timespan).ToString("o"); + + // using the lock release time as an id and value + // for ease of deletion and retrieval + s_accountAddLockStorage.SaveObjectSync(lockId, lockId); + s_isAddingAccount = true; + } + + private static void UnlockAccountAddFlow() + { + s_isAddingAccount = false; + // make sure all old locks are removed + foreach (var id in s_accountAddLockStorage.GetAllObjects()) + { + s_accountAddLockStorage.DeleteObject(id); + } + } + + /// + /// Adds an account by propting the user to log in via a web flow + /// + /// Server to use to add the account, if not provied the default Server will be used + /// + public static async Task AddAccount(string server = "") + { + SpeckleLog.Logger.Debug("Starting to add account for {serverUrl}", server); + + server = EnsureCorrectServerUrl(server); + + // locking for 1 minute + var timeout = TimeSpan.FromMinutes(1); + // this is not part of the try finally block + // we do not want to clean up the existing locks + TryLockAccountAddFlow(timeout); + var challenge = GenerateChallenge(); + + try + { + string accessCode = await GetAccessCode(server, challenge, timeout).ConfigureAwait(false); + if (string.IsNullOrEmpty(accessCode)) + { + throw new SpeckleAccountManagerException("Access code is invalid"); + } + + var account = await CreateAccount(accessCode, challenge, server).ConfigureAwait(false); + + //if the account already exists it will not be added again + s_accountStorage.SaveObject(account.id, JsonConvert.SerializeObject(account)); + SpeckleLog.Logger.Debug("Finished adding account {accountId} for {serverUrl}", account.id, server); + } + catch (SpeckleAccountManagerException ex) + { + SpeckleLog.Logger.Fatal(ex, "Failed to add account: {exceptionMessage}", ex.Message); + // rethrowing any known errors + throw; + } + catch (Exception ex) when (!ex.IsFatal()) + { + SpeckleLog.Logger.Fatal(ex, "Failed to add account: {exceptionMessage}", ex.Message); + throw new SpeckleAccountManagerException($"Failed to add account: {ex.Message}", ex); + } + finally + { + UnlockAccountAddFlow(); + } + } + + private static async Task GetToken(string accessCode, string challenge, string server) + { + try + { + using var client = Http.GetHttpProxyClient(); + + var body = new + { + appId = "sca", + appSecret = "sca", + accessCode, + challenge + }; + + using var content = new StringContent(JsonConvert.SerializeObject(body)); + content.Headers.ContentType = new MediaTypeHeaderValue("application/json"); + var response = await client.PostAsync($"{server}/auth/token", content).ConfigureAwait(false); + + return JsonConvert.DeserializeObject( + await response.Content.ReadAsStringAsync().ConfigureAwait(false) + ); + } + catch (Exception ex) when (!ex.IsFatal()) + { + throw new SpeckleException($"Failed to get authentication token from {server}", ex); + } + } + + private static async Task GetRefreshedToken(string refreshToken, Uri server) + { + try + { + using var client = Http.GetHttpProxyClient(); + + var body = new + { + appId = "sca", + appSecret = "sca", + refreshToken + }; + + using var content = new StringContent(JsonConvert.SerializeObject(body)); + content.Headers.ContentType = new MediaTypeHeaderValue("application/json"); + var response = await client.PostAsync(new Uri(server, "/auth/token"), content).ConfigureAwait(false); + + return JsonConvert.DeserializeObject( + await response.Content.ReadAsStringAsync().ConfigureAwait(false) + ); + } + catch (Exception ex) when (!ex.IsFatal()) + { + throw new SpeckleException($"Failed to get refreshed token from {server}", ex); + } + } + + /// + /// Sends a simple get request to the , and checks the response headers for a "x-speckle-frontend-2" value + /// + /// Server endpoint to get header + /// if response contains FE2 header and the value was + /// response contained FE2 header, but the value was , empty, or not parseable to a + /// Request to failed to send or response was not successful + private static async Task IsFrontend2Server(Uri server) + { + using var httpClient = Http.GetHttpProxyClient(); + + var response = await Http.HttpPing(server).ConfigureAwait(false); + + var headers = response.Headers; + const string HEADER = "x-speckle-frontend-2"; + if (!headers.TryGetValues(HEADER, out IEnumerable values)) + { + return false; + } + + string? headerValue = values.FirstOrDefault(); + + if (!bool.TryParse(headerValue, out bool value)) + { + throw new SpeckleException( + $"Headers contained {HEADER} header, but value {headerValue} could not be parsed to a bool" + ); + } + + return value; + } + + private static string GenerateChallenge() + { + using RNGCryptoServiceProvider rng = new(); + byte[] challengeData = new byte[32]; + rng.GetBytes(challengeData); + + //escaped chars like % do not play nice with the server + return Regex.Replace(Convert.ToBase64String(challengeData), @"[^\w\.@-]", ""); + } + + /// + [Obsolete("Use URI overload (note: that one throws exceptions, this one returns null)")] + public static async Task GetServerInfo(string server) + { + try + { + return await GetServerInfo(new Uri(server)).ConfigureAwait(false); + } + catch (SpeckleGraphQLException ex) + { + SpeckleLog.Logger.Warning( + ex, + "Swallowing exception in {methodName}: {exceptionMessage}", + nameof(GetServerInfo), + ex.Message + ); + return null; + } + } + + /// + [Obsolete("Use URI overload (note: that one throws exceptions, this one returns null)")] + public static async Task GetUserInfo(string token, string server) + { + try + { + return await GetUserInfo(token, new Uri(server)).ConfigureAwait(false); + } + catch (SpeckleGraphQLException ex) + { + SpeckleLog.Logger.Warning( + ex, + "Swallowing exception in {methodName}: {exceptionMessage}", + nameof(GetUserInfo), + ex.Message + ); + return null; + } + } +} diff --git a/src/Speckle.Core/Credentials/Exceptions.cs b/src/Speckle.Core/Credentials/Exceptions.cs new file mode 100644 index 00000000..32aba319 --- /dev/null +++ b/src/Speckle.Core/Credentials/Exceptions.cs @@ -0,0 +1,26 @@ +using System; +using Speckle.Core.Logging; + +namespace Speckle.Core.Credentials; + +public class SpeckleAccountManagerException : SpeckleException +{ + public SpeckleAccountManagerException(string message) + : base(message) { } + + public SpeckleAccountManagerException(string message, Exception? inner) + : base(message, inner) { } + + public SpeckleAccountManagerException() { } +} + +public class SpeckleAccountFlowLockedException : SpeckleAccountManagerException +{ + public SpeckleAccountFlowLockedException(string message) + : base(message) { } + + public SpeckleAccountFlowLockedException() { } + + public SpeckleAccountFlowLockedException(string message, Exception? innerException) + : base(message, innerException) { } +} diff --git a/src/Speckle.Core/Credentials/Responses.cs b/src/Speckle.Core/Credentials/Responses.cs new file mode 100644 index 00000000..04eb87cf --- /dev/null +++ b/src/Speckle.Core/Credentials/Responses.cs @@ -0,0 +1,64 @@ +#nullable disable +using System; +using System.Runtime.InteropServices; +using Speckle.Core.Api; + +namespace Speckle.Core.Credentials; + +[Obsolete("Use activeUser query and ActiveUserServerInfoResponse instead", true)] +public class UserServerInfoResponse +{ + public UserInfo user { get; set; } + public ServerInfo serverInfo { get; set; } +} + +public class ActiveUserServerInfoResponse +{ + public UserInfo activeUser { get; set; } + public ServerInfo serverInfo { get; set; } +} + +[Obsolete("Use activeUser query and ActiveUserResponse instead", true)] +public class UserInfoResponse +{ + public UserInfo user { get; set; } +} + +public class ActiveUserResponse +{ + public UserInfo activeUser { get; set; } +} + +[ClassInterface(ClassInterfaceType.AutoDual)] +[ComVisible(true)] +public class UserInfo +{ + public string id { get; set; } + public string name { get; set; } + public string email { get; set; } + public string company { get; set; } + public string avatar { get; set; } + + public Streams streams { get; set; } + public Commits commits { get; set; } +} + +public class TokenExchangeResponse +{ + public string token { get; set; } + public string refreshToken { get; set; } +} + +[ClassInterface(ClassInterfaceType.AutoDual)] +[ComVisible(true)] +public class Streams +{ + public int totalCount { get; set; } +} + +[ClassInterface(ClassInterfaceType.AutoDual)] +[ComVisible(true)] +public class Commits +{ + public int totalCount { get; set; } +} diff --git a/src/Speckle.Core/Credentials/StreamWrapper.cs b/src/Speckle.Core/Credentials/StreamWrapper.cs new file mode 100644 index 00000000..9d53ae7e --- /dev/null +++ b/src/Speckle.Core/Credentials/StreamWrapper.cs @@ -0,0 +1,504 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Net.Http; +using System.Text.RegularExpressions; +using System.Threading.Tasks; +using System.Web; +using Speckle.Core.Api; +using Speckle.Core.Helpers; +using Speckle.Core.Logging; + +namespace Speckle.Core.Credentials; + +public class StreamWrapper +{ + private Account? _account; + + public StreamWrapper() { } + + /// + /// Creates a StreamWrapper from a stream url or a stream id + /// + /// Stream Url eg: http://speckle.server/streams/8fecc9aa6d/commits/76a23d7179 or stream ID eg: 8fecc9aa6d + /// + public StreamWrapper(string streamUrlOrId) + { + OriginalInput = streamUrlOrId; + + if (!Uri.TryCreate(streamUrlOrId, UriKind.Absolute, out _)) + { + StreamWrapperFromId(streamUrlOrId); + } + else + { + StreamWrapperFromUrl(streamUrlOrId); + } + } + + /// + /// Creates a StreamWrapper by streamId, userId and serverUrl + /// + /// + /// + /// + public StreamWrapper(string streamId, string? userId, string serverUrl) + { + UserId = userId; + ServerUrl = serverUrl; + StreamId = streamId; + + OriginalInput = $"{ServerUrl}/streams/{StreamId}{(UserId != null ? "?u=" + UserId : "")}"; + } + + //this needs to be public so it's serialized and stored in Dynamo + public string? OriginalInput { get; set; } + + public string? UserId { get; set; } + public string ServerUrl { get; set; } + public string StreamId { get; set; } + public string? CommitId { get; set; } + + /// May be an ID instead for FE2 urls + public string? BranchName { get; set; } + public string? ObjectId { get; set; } + + /// + /// Determines if the current stream wrapper contains a valid stream. + /// + public bool IsValid => Type != StreamWrapperType.Undefined; + + public StreamWrapperType Type + { + // Quick solution to determine whether a wrapper points to a branch, commit or stream. + get + { + if (!string.IsNullOrEmpty(ObjectId)) + { + return StreamWrapperType.Object; + } + + if (!string.IsNullOrEmpty(CommitId)) + { + return StreamWrapperType.Commit; + } + + if (!string.IsNullOrEmpty(BranchName)) + { + return StreamWrapperType.Branch; + } + + // If we reach here and there is no stream id, it means that the stream is invalid for some reason. + return !string.IsNullOrEmpty(StreamId) ? StreamWrapperType.Stream : StreamWrapperType.Undefined; + } + } + + private void StreamWrapperFromId(string streamId) + { + Account? account = AccountManager.GetDefaultAccount(); + + if (account == null) + { + throw new SpeckleException("You do not have any account. Please create one or add it to the Speckle Manager."); + } + + ServerUrl = account.serverInfo.url; + UserId = account.userInfo.id; + StreamId = streamId; + } + + /// + /// The ReGex pattern to determine if a URL's AbsolutePath is a Frontend2 URL or not. + /// This is used in conjunction with to extract the correct values into the instance. + /// + private static readonly Regex s_fe2UrlRegex = + new( + @"/projects/(?[\w\d]+)(?:/models/(?[\w\d]+(?:@[\w\d]+)?)(?:,(?[\w\d]+(?:@[\w\d]+)?))*)?" + ); + + /// + /// Parses a FrontEnd2 URL Regex match and assigns it's data to this StreamWrapper instance. + /// + /// A regex match coming from + /// Will throw when the URL is not properly formatted. + /// Will throw when the URL is correct, but is not currently supported by the StreamWrapper class. + private void ParseFe2RegexMatch(Match match) + { + var projectId = match.Groups["projectId"]; + var model = match.Groups["model"]; + var additionalModels = match.Groups["additionalModels"]; + + if (!projectId.Success) + { + throw new SpeckleException("The provided url is not a valid Speckle url"); + } + + if (!model.Success) + { + throw new SpeckleException("The provided url is not pointing to any model in the project."); + } + + if (additionalModels.Success || model.Value == "all") + { + throw new NotSupportedException("Multi-model urls are not supported yet"); + } + + if (model.Value.StartsWith("$")) + { + throw new NotSupportedException("Federation model urls are not supported"); + } + + var modelRes = ParseFe2ModelValue(model.Value); + + // INFO: The Branch endpoint is being updated to fallback to checking a branch ID if no name is found. + // Assigning the BranchID as the BranchName is a workaround to support FE2 links in the old StreamWrapper. + // A better solution must be redesigned taking into account all the new Frontend2 URL features. + StreamId = projectId.Value; + BranchName = modelRes.branchId; + CommitId = modelRes.commitId; + ObjectId = modelRes.objectId; + } + + /// + /// Parses the segment of the FE2 URL that represents a modelID, modelID@versionID or objectID. + /// It is meant to parse a single value. If url is multi-model it should be used once per model. + /// + /// The a single value of the model url segment + /// A tuple containing the branch, commit and object information for that value. Each value can be null + /// Determines if a modelValue is an ObjectId by checking it's length is exactly 32 chars long. + private static (string? branchId, string? commitId, string? objectId) ParseFe2ModelValue(string modelValue) + { + if (modelValue.Length == 32) + { + return (null, null, modelValue); // Model value is an ObjectID + } + + if (!modelValue.Contains('@')) + { + return (modelValue, null, null); // Model has no version attached + } + + var res = modelValue.Split('@'); + return (res[0], res[1], null); // Model has version attached + } + + private void StreamWrapperFromUrl(string streamUrl) + { + Uri uri = new(streamUrl); + ServerUrl = uri.GetLeftPart(UriPartial.Authority); + + var fe2Match = s_fe2UrlRegex.Match(uri.AbsolutePath); + if (fe2Match.Success) + { + //NEW FRONTEND URL! + ParseFe2RegexMatch(fe2Match); + return; + } + + // Note: this is a hack. It's because new Uri() is parsed escaped in .net framework; wheareas in .netstandard it's not. + // Tests pass in Speckle.Core without this hack. + if (uri.Segments.Length >= 4 && uri.Segments[3]?.ToLowerInvariant() == "branches/") + { + StreamId = uri.Segments[2].Replace("/", ""); + if (uri.Segments.Length > 5) + { + var branchSegs = uri.Segments.ToList().GetRange(4, uri.Segments.Length - 4); + BranchName = Uri.UnescapeDataString(string.Concat(branchSegs)); + } + else + { + BranchName = Uri.UnescapeDataString(uri.Segments[4]); + } + } + else + { + switch (uri.Segments.Length) + { + case 3: // ie http://speckle.server/streams/8fecc9aa6d + if (uri.Segments[1].ToLowerInvariant() != "streams/") + { + throw new SpeckleException($"Cannot parse {uri} into a stream wrapper class."); + } + else + { + StreamId = uri.Segments[2].Replace("/", ""); + } + + break; + case 4: // ie https://speckle.server/streams/0c6ad366c4/globals/ + if (uri.Segments[3].ToLowerInvariant().StartsWith("globals")) + { + StreamId = uri.Segments[2].Replace("/", ""); + BranchName = Uri.UnescapeDataString(uri.Segments[3].Replace("/", "")); + } + else + { + throw new SpeckleException($"Cannot parse {uri} into a stream wrapper class"); + } + + break; + case 5: // ie http://speckle.server/streams/8fecc9aa6d/commits/76a23d7179 + switch (uri.Segments[3].ToLowerInvariant()) + { + // NOTE: this is a good practice reminder on how it should work + case "commits/": + StreamId = uri.Segments[2].Replace("/", ""); + CommitId = uri.Segments[4].Replace("/", ""); + break; + case "globals/": + StreamId = uri.Segments[2].Replace("/", ""); + BranchName = Uri.UnescapeDataString(uri.Segments[3].Replace("/", "")); + CommitId = uri.Segments[4].Replace("/", ""); + break; + case "branches/": + StreamId = uri.Segments[2].Replace("/", ""); + BranchName = Uri.UnescapeDataString(uri.Segments[4].Replace("/", "")); + break; + case "objects/": + StreamId = uri.Segments[2].Replace("/", ""); + ObjectId = uri.Segments[4].Replace("/", ""); + break; + default: + throw new SpeckleException($"Cannot parse {uri} into a stream wrapper class."); + } + + break; + + default: + throw new SpeckleException($"Cannot parse {uri} into a stream wrapper class."); + } + } + + var queryDictionary = HttpUtility.ParseQueryString(uri.Query); + UserId = queryDictionary["u"]; + } + + /// + /// Gets a valid account for this stream wrapper. + /// Note: this method ensures that the stream exists and/or that the user has an account which has access to that stream. If used in a sync manner, make sure it's not blocking. + /// + /// Throws exception if account fetching failed. This could be due to non-existent account or stream. + /// The valid account object for this stream. + public async Task GetAccount() + { + if (_account != null) + { + return _account; + } + + // Step 1: check if direct account id (?u=) + if (OriginalInput != null && OriginalInput.Contains("?u=")) + { + var userId = OriginalInput.Split(new[] { "?u=" }, StringSplitOptions.None)[1]; + var acc = AccountManager.GetAccounts().FirstOrDefault(acc => acc.userInfo.id == userId); + if (acc != null) + { + await ValidateWithAccount(acc).ConfigureAwait(false); + _account = acc; + return acc; + } + } + + // Step 2: check the default + var defAcc = AccountManager.GetDefaultAccount(); + List err = new(); + try + { + await ValidateWithAccount(defAcc).ConfigureAwait(false); + _account = defAcc; + return defAcc; + } + catch (Exception ex) when (!ex.IsFatal()) + { + err.Add(new SpeckleException($"Account {defAcc?.userInfo?.email} failed to auth stream wrapper", ex)); + } + + // Step 3: all the rest + var accs = AccountManager.GetAccounts(ServerUrl).ToList(); + if (accs.Count == 0) + { + throw new SpeckleException($"You don't have any accounts for {ServerUrl}."); + } + + foreach (var acc in accs) + { + try + { + await ValidateWithAccount(acc).ConfigureAwait(false); + _account = acc; + return acc; + } + catch (Exception ex) when (!ex.IsFatal()) + { + err.Add(new SpeckleException($"Account {acc} failed to auth stream wrapper", ex)); + } + } + + AggregateException inner = new(null, err); + throw new SpeckleException("Failed to validate stream wrapper", inner); + } + + public void SetAccount(Account acc) + { + _account = acc; + UserId = _account.userInfo.id; + } + + public bool Equals(StreamWrapper? wrapper) + { + if (wrapper == null) + { + return false; + } + + if (Type != wrapper.Type) + { + return false; + } + + return Type == wrapper.Type + && ServerUrl == wrapper.ServerUrl + && UserId == wrapper.UserId + && StreamId == wrapper.StreamId + && Type == StreamWrapperType.Branch + && BranchName == wrapper.BranchName + || Type == StreamWrapperType.Object && ObjectId == wrapper.ObjectId + || Type == StreamWrapperType.Commit && CommitId == wrapper.CommitId; + } + + /// + /// Verifies that the state of the stream wrapper represents a valid Speckle resource e.g. points to a valid stream/branch etc. + /// + /// The account to use to verify the current state of the stream wrapper + /// The of the provided is invalid or does not match the 's + /// You are not connected to the internet + /// Verification of the current state of the stream wrapper with provided was unsuccessful. The could be invalid, or lack permissions for the , or the or are invalid + public async Task ValidateWithAccount(Account acc) + { + Uri url; + try + { + url = new(ServerUrl); + } + catch (UriFormatException ex) + { + throw new ArgumentException("Server Url is improperly formatted", nameof(acc), ex); + } + + if (ServerUrl != acc.serverInfo.url && url != acc.serverInfo.migration?.movedFrom) + { + throw new ArgumentException($"Account is not from server {ServerUrl}", nameof(acc)); + } + + try + { + await Http.HttpPing(url).ConfigureAwait(false); + } + catch (HttpRequestException ex) + { + throw new HttpRequestException("You are not connected to the internet.", ex); + } + + using var client = new Client(acc); + // First check if the stream exists + try + { + await client.StreamGet(StreamId).ConfigureAwait(false); + } + catch (Exception ex) when (!ex.IsFatal()) + { + throw new SpeckleException( + $"You don't have access to stream {StreamId} on server {ServerUrl}, or the stream does not exist.", + ex + ); + } + + // Check if the branch exists + if (Type == StreamWrapperType.Branch) + { + var branch = await client.BranchGet(StreamId, BranchName!, 1).ConfigureAwait(false); + if (branch == null) + { + throw new SpeckleException( + $"The branch with name '{BranchName}' doesn't exist in stream {StreamId} on server {ServerUrl}" + ); + } + } + } + + public Uri ToServerUri() + { + if (_account != null) + { + return _account.serverInfo.frontend2 ? ToProjectUri() : ToStreamUri(); + } + + if (OriginalInput != null) + { + Uri uri = new(OriginalInput); + var fe2Match = s_fe2UrlRegex.Match(uri.AbsolutePath); + return fe2Match.Success ? ToProjectUri() : ToStreamUri(); + } + + // Default to old FE1 + return ToStreamUri(); + } + + private Uri ToProjectUri() + { + var uri = new Uri(ServerUrl); + + // TODO: THis has to be the branch ID or it won't work. + var branchID = BranchName; + var leftPart = $"projects/{StreamId}/models/"; + switch (Type) + { + case StreamWrapperType.Commit: + leftPart += $"{branchID}@{CommitId}"; + break; + case StreamWrapperType.Branch: + leftPart += $"{branchID}"; + break; + case StreamWrapperType.Object: + leftPart += $"{ObjectId}"; + break; + } + var acc = $"{(UserId != null ? "?u=" + UserId : "")}"; + + var finalUri = new Uri(uri, leftPart + acc); + return finalUri; + } + + private Uri ToStreamUri() + { + var uri = new Uri(ServerUrl); + var leftPart = $"streams/{StreamId}"; + switch (Type) + { + case StreamWrapperType.Commit: + leftPart += $"/commits/{CommitId}"; + break; + case StreamWrapperType.Branch: + leftPart += $"/branches/{BranchName}"; + break; + case StreamWrapperType.Object: + leftPart += $"/objects/{ObjectId}"; + break; + } + var acc = $"{(UserId != null ? "?u=" + UserId : "")}"; + + var finalUri = new Uri(uri, leftPart + acc); + return finalUri; + } + + public override string ToString() => ToServerUri().ToString(); +} + +public enum StreamWrapperType +{ + Undefined, + Stream, + Commit, + Branch, + Object +} diff --git a/src/Speckle.Core/Credentials/graphql.config.yml b/src/Speckle.Core/Credentials/graphql.config.yml new file mode 100644 index 00000000..64c50ab2 --- /dev/null +++ b/src/Speckle.Core/Credentials/graphql.config.yml @@ -0,0 +1,2 @@ +schema: https://app.speckle.systems/graphql +documents: '**/*.graphql' diff --git a/src/Speckle.Core/Helpers/Constants.cs b/src/Speckle.Core/Helpers/Constants.cs new file mode 100644 index 00000000..07ccf51b --- /dev/null +++ b/src/Speckle.Core/Helpers/Constants.cs @@ -0,0 +1,26 @@ +using System; +using System.Diagnostics.CodeAnalysis; +using System.Text.RegularExpressions; + +namespace Speckle.Core.Helpers; + +public static class Constants +{ + public const double EPS = 1e-5; + public const double SMALL_EPS = 1e-8; + public const double EPS_SQUARED = EPS * EPS; + + public static readonly Regex ChunkPropertyNameRegex = new(@"^@\((\d*)\)"); //TODO: Experiment with compiled flag + + [Obsolete("Renamed to " + nameof(EPS))] + [SuppressMessage("Style", "IDE1006:Naming Styles", Justification = "Obsolete")] + public const double Eps = 1e-5; + + [Obsolete("Renamed to " + nameof(SMALL_EPS))] + [SuppressMessage("Style", "IDE1006:Naming Styles", Justification = "Obsolete")] + public const double SmallEps = 1e-8; + + [Obsolete("Renamed to " + nameof(EPS_SQUARED))] + [SuppressMessage("Style", "IDE1006:Naming Styles", Justification = "Obsolete")] + public const double Eps2 = Eps * Eps; +} diff --git a/src/Speckle.Core/Helpers/Crypt.cs b/src/Speckle.Core/Helpers/Crypt.cs new file mode 100644 index 00000000..307980fe --- /dev/null +++ b/src/Speckle.Core/Helpers/Crypt.cs @@ -0,0 +1,56 @@ +using System; +using System.Diagnostics.CodeAnalysis; +using System.Diagnostics.Contracts; +using System.Security.Cryptography; +using System.Text; + +namespace Speckle.Core.Helpers; + +public static class Crypt +{ + /// the value to hash + /// NumericFormat + /// + /// + /// the hash string + /// is not a recognised numeric format + /// + [Pure] + public static string Sha256(string input, string? format = "x2", int startIndex = 0, int length = 64) + { + var inputBytes = Encoding.UTF8.GetBytes(input); + + using var sha256 = SHA256.Create(); + byte[] hash = sha256.ComputeHash(inputBytes); + + StringBuilder sb = new(64); + foreach (byte b in hash) + { + sb.Append(b.ToString(format)); + } + + return sb.ToString(startIndex, length); + } + + /// + /// MD5 is a broken cryptographic algorithm and should be used subject to review see CA5351 + [Pure] + [SuppressMessage("Security", "CA5351:Do Not Use Broken Cryptographic Algorithms")] + public static string Md5(string input, string? format = "x2", int startIndex = 0, int length = 32) + { + using MD5 md5 = MD5.Create(); + byte[] inputBytes = Encoding.ASCII.GetBytes(input.ToLowerInvariant()); + byte[] hashBytes = md5.ComputeHash(inputBytes); + + StringBuilder sb = new(32); + for (int i = 0; i < hashBytes.Length; i++) + { + sb.Append(hashBytes[i].ToString(format)); + } + + return sb.ToString(startIndex, length); + } + + [Obsolete("Use Md5(input, \"X2\") instead")] + public static string Hash(string input) => Md5(input, "X2"); +} diff --git a/src/Speckle.Core/Helpers/Http.cs b/src/Speckle.Core/Helpers/Http.cs new file mode 100644 index 00000000..8cb90768 --- /dev/null +++ b/src/Speckle.Core/Helpers/Http.cs @@ -0,0 +1,251 @@ +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Net; +using System.Net.Http; +using System.Net.NetworkInformation; +using System.Net.Sockets; +using System.Threading; +using System.Threading.Tasks; +using Polly; +using Polly.Contrib.WaitAndRetry; +using Polly.Extensions.Http; +using Polly.Retry; +using Serilog.Context; +using Speckle.Core.Credentials; +using Speckle.Core.Logging; + +namespace Speckle.Core.Helpers; + +public static class Http +{ + public static IEnumerable DefaultDelay() + { + return Backoff.DecorrelatedJitterBackoffV2(TimeSpan.FromMilliseconds(100), 5); + } + + public static AsyncRetryPolicy HttpAsyncPolicy(IEnumerable? delay = null) + { + return HttpPolicyExtensions + .HandleTransientHttpError() + .WaitAndRetryAsync( + delay ?? DefaultDelay(), + (ex, timeSpan, retryAttempt, context) => { + //context.Remove("retryCount"); + //context.Add("retryCount", retryAttempt); + //Log.Information( + // ex.Exception, + // "The http request failed with {exceptionType} exception retrying after {cooldown} milliseconds. This is retry attempt {retryAttempt}", + // ex.GetType().Name, + // timeSpan.TotalSeconds * 1000, + // retryAttempt + //); + } + ); + } + + /// + /// Checks if the user has a valid internet connection by first pinging cloudfare (fast) + /// and then trying get from the default Speckle server (slower) + /// + /// True if the user is connected to the internet, false otherwise. + public static async Task UserHasInternet() + { + string? defaultServer = null; + try + { + //Perform a quick ping test e.g. to cloudflaire dns, as is quicker than pinging server + if (await Ping("1.1.1.1").ConfigureAwait(false)) + { + return true; + } + + defaultServer = AccountManager.GetDefaultServerUrl(); + Uri serverUrl = new(defaultServer); + await HttpPing(serverUrl).ConfigureAwait(false); + return true; + } + catch (HttpRequestException ex) + { + SpeckleLog.Logger.ForContext("defaultServer", defaultServer).Warning(ex, "Failed to ping internet"); + + return false; + } + } + + /// + /// Pings a specific url to verify it's accessible. Retries 3 times. + /// + /// The hostname or address to ping. + /// True if the the status code is 200, false otherwise. + public static async Task Ping(string hostnameOrAddress) + { + SpeckleLog.Logger.Information("Pinging {hostnameOrAddress}", hostnameOrAddress); + var policy = Policy + .Handle() + .Or() + .WaitAndRetryAsync( + DefaultDelay(), + (ex, timeSpan, retryAttempt, context) => { + //Log.Information( + // ex, + // "The http request failed with {exceptionType} exception retrying after {cooldown} milliseconds. This is retry attempt {retryAttempt}", + // ex.GetType().Name, + // timeSpan.TotalSeconds * 1000, + // retryAttempt + //); + } + ); + var policyResult = await policy + .ExecuteAndCaptureAsync(async () => + { + Ping myPing = new(); + var hostname = + Uri.CheckHostName(hostnameOrAddress) != UriHostNameType.Unknown + ? hostnameOrAddress + : new Uri(hostnameOrAddress).DnsSafeHost; + byte[] buffer = new byte[32]; + int timeout = 1000; + PingOptions pingOptions = new(); + PingReply reply = await myPing.SendPingAsync(hostname, timeout, buffer, pingOptions).ConfigureAwait(false); + if (reply.Status != IPStatus.Success) + { + throw new SpeckleException($"The ping operation failed with status {reply.Status}"); + } + + return true; + }) + .ConfigureAwait(false); + if (policyResult.Outcome == OutcomeType.Successful) + { + return true; + } + + SpeckleLog.Logger.Warning( + policyResult.FinalException, + "Failed to ping {hostnameOrAddress} cause: {exceptionMessage}", + policyResult.FinalException.Message + ); + return false; + } + + /// + /// Sends a GET request to the provided + /// + /// The URI that should be pinged + /// Request to failed + public static async Task HttpPing(Uri uri) + { + try + { + using var httpClient = GetHttpProxyClient(); + HttpResponseMessage response = await httpClient.GetAsync(uri).ConfigureAwait(false); + response.EnsureSuccessStatusCode(); + SpeckleLog.Logger.Information("Successfully pinged {uri}", uri); + return response; + } + catch (HttpRequestException ex) + { + SpeckleLog.Logger.Warning(ex, "Ping to {uri} was unsuccessful: {message}", uri, ex.Message); + throw new HttpRequestException($"Ping to {uri} was unsuccessful", ex); + } + } + + public static HttpClient GetHttpProxyClient(SpeckleHttpClientHandler? handler = null, TimeSpan? timeout = null) + { + IWebProxy proxy = WebRequest.GetSystemWebProxy(); + proxy.Credentials = CredentialCache.DefaultCredentials; + + handler ??= new SpeckleHttpClientHandler(); + var client = new HttpClient(handler) { Timeout = timeout ?? TimeSpan.FromSeconds(100) }; + return client; + } + + public static bool CanAddAuth(string? authToken, out string? bearerHeader) + { + if (!string.IsNullOrEmpty(authToken)) + { + bearerHeader = authToken!.ToLowerInvariant().Contains("bearer") ? authToken : $"Bearer {authToken}"; + return true; + } + + bearerHeader = null; + return false; + } + + public static void AddAuthHeader(HttpClient client, string? authToken) + { + if (CanAddAuth(authToken, out string? value)) + { + client.DefaultRequestHeaders.Add("Authorization", value); + } + } +} + +public sealed class SpeckleHttpClientHandler : HttpClientHandler +{ + private readonly IEnumerable _delay; + + public SpeckleHttpClientHandler(IEnumerable? delay = null) + { + _delay = delay ?? Http.DefaultDelay(); + } + + /// requested cancel + /// Send request failed + protected override async Task SendAsync( + HttpRequestMessage request, + CancellationToken cancellationToken + ) + { + // this is a preliminary client server correlation implementation + // refactor this, when we have a better observability stack + var context = new Context(); + using (LogContext.PushProperty("correlationId", context.CorrelationId)) + using (LogContext.PushProperty("targetUrl", request.RequestUri)) + using (LogContext.PushProperty("httpMethod", request.Method)) + { + SpeckleLog.Logger.Debug("Starting execution of http request to {targetUrl}", request.RequestUri); + var timer = new Stopwatch(); + timer.Start(); + context.Add("retryCount", 0); + var policyResult = await Http.HttpAsyncPolicy(_delay) + .ExecuteAndCaptureAsync( + ctx => + { + request.Headers.Add("x-request-id", ctx.CorrelationId.ToString()); + return base.SendAsync(request, cancellationToken); + }, + context + ) + .ConfigureAwait(false); + timer.Stop(); + var status = policyResult.Outcome == OutcomeType.Successful ? "succeeded" : "failed"; + context.TryGetValue("retryCount", out var retryCount); + SpeckleLog.Logger + .ForContext("ExceptionType", policyResult.FinalException?.GetType()) + .Information( + "Execution of http request to {httpScheme}://{hostUrl}/{relativeUrl} {resultStatus} with {httpStatusCode} after {elapsed} seconds and {retryCount} retries", + request.RequestUri.Scheme, + request.RequestUri.Host, + request.RequestUri.PathAndQuery, + status, + policyResult.Result?.StatusCode, + timer.Elapsed.TotalSeconds, + retryCount ?? 0 + ); + if (policyResult.Outcome == OutcomeType.Successful) + { + return policyResult.Result!; + } + + // if the policy failed due to a cancellation, AND it was our cancellation token, then don't wrap the exception, and rethrow an new cancellation + if (policyResult.FinalException is OperationCanceledException) + { + cancellationToken.ThrowIfCancellationRequested(); + } + + throw new HttpRequestException("Policy Failed", policyResult.FinalException); + } + } +} diff --git a/src/Speckle.Core/Helpers/Path.cs b/src/Speckle.Core/Helpers/Path.cs new file mode 100644 index 00000000..999639c8 --- /dev/null +++ b/src/Speckle.Core/Helpers/Path.cs @@ -0,0 +1,175 @@ +using System; +using System.IO; +using System.Reflection; +using Speckle.Core.Logging; + +namespace Speckle.Core.Helpers; + +/// +/// Helper class dedicated for Speckle specific Path operations. +/// +public static class SpecklePathProvider +{ + private static string s_applicationName = "Speckle"; + + private static string s_blobFolderName = "Blobs"; + + private static string s_kitsFolderName = "Kits"; + + private static string s_accountsFolderName = "Accounts"; + + private static string s_objectsFolderName = "Objects"; + + private const string LOG_FOLDER_NAME = "Logs"; + + private static string UserDataPathEnvVar => "SPECKLE_USERDATA_PATH"; + private static string? Path => Environment.GetEnvironmentVariable(UserDataPathEnvVar); + + /// + /// Get the installation path. + /// + public static string InstallApplicationDataPath => + Assembly.GetAssembly(typeof(SpecklePathProvider)).Location.Contains("ProgramData") + ? Environment.GetFolderPath(Environment.SpecialFolder.CommonApplicationData) + : UserApplicationDataPath(); + + /// + /// Get the path where the Speckle applications should be installed + /// + public static string InstallSpeckleFolderPath => EnsureFolderExists(InstallApplicationDataPath, s_applicationName); + + /// + /// Get the folder where the user's Speckle data should be stored. + /// + public static string UserSpeckleFolderPath => EnsureFolderExists(UserApplicationDataPath(), s_applicationName); + + /// + /// Get the folder where the Speckle kits should be stored. + /// + public static string KitsFolderPath => EnsureFolderExists(InstallSpeckleFolderPath, s_kitsFolderName); + + /// + /// + /// + public static string ObjectsFolderPath => EnsureFolderExists(KitsFolderPath, s_objectsFolderName); + + /// + /// Get the folder where the Speckle accounts data should be stored. + /// + public static string AccountsFolderPath => EnsureFolderExists(UserSpeckleFolderPath, s_accountsFolderName); + + /// + /// Override the global Speckle application name. + /// + /// + public static void OverrideApplicationName(string applicationName) + { + s_applicationName = applicationName; + } + + /// + /// Override the global Speckle application data path. + /// + public static void OverrideApplicationDataPath(string? path) + { + Environment.SetEnvironmentVariable(UserDataPathEnvVar, path); + } + + /// + /// Override the global Blob storage folder name. + /// + public static void OverrideBlobStorageFolder(string blobFolderName) + { + s_blobFolderName = blobFolderName; + } + + /// + /// Override the global Kits folder name. + /// + public static void OverrideKitsFolderName(string kitsFolderName) + { + s_kitsFolderName = kitsFolderName; + } + + /// + /// Override the global Accounts folder name. + /// + public static void OverrideAccountsFolderName(string accountsFolderName) + { + s_accountsFolderName = accountsFolderName; + } + + /// + /// + /// + public static void OverrideObjectsFolderName(string objectsFolderName) + { + s_objectsFolderName = objectsFolderName; + } + + /// + /// Get the platform specific user configuration folder path. + /// + public static string UserApplicationDataPath() + { + // if we have an override, just return that + var pathOverride = Path; + if (pathOverride != null && !string.IsNullOrEmpty(pathOverride)) + { + return pathOverride; + } + + // on desktop linux and macos we use the appdata. + // but we might not have write access to the disk + // so the catch falls back to the user profile + try + { + return Environment.GetFolderPath( + Environment.SpecialFolder.ApplicationData, + // if the folder doesn't exist, we get back an empty string on OSX, + // which in turn, breaks other stuff down the line. + // passing in the Create option ensures that this directory exists, + // which is not a given on all OS-es. + Environment.SpecialFolderOption.Create + ); + } + catch (SystemException ex) when (ex is PlatformNotSupportedException or ArgumentException) + { + //Adding this log just so we confidently know which Exception type to catch here. + SpeckleLog.Logger.Warning(ex, "Falling back to user profile path"); + + // on server linux, there might not be a user setup, things can run under root + // in that case, the appdata variable is most probably not set up + // we fall back to the value of the home folder + return Environment.GetFolderPath(Environment.SpecialFolder.UserProfile); + } + } + + /// + /// Get the folder where the user's Speckle blobs should be stored. + /// + public static string BlobStoragePath(string? path = null) + { + return EnsureFolderExists(path ?? UserSpeckleFolderPath, s_blobFolderName); + } + + private static string EnsureFolderExists(string basePath, string folderName) + { + var path = System.IO.Path.Combine(basePath, folderName); + Directory.CreateDirectory(path); + return path; + } + + /// + /// Get the folder where the Speckle logs should be stored. + /// + /// Name of the application using this SDK ie.: "Rhino" + /// Public version slug of the application using this SDK ie.: "2023" + public static string LogFolderPath(string hostApplicationName, string? hostApplicationVersion) + { + return EnsureFolderExists( + EnsureFolderExists(UserSpeckleFolderPath, LOG_FOLDER_NAME), + $"{hostApplicationName}{hostApplicationVersion ?? ""}" + ); + } +} diff --git a/src/Speckle.Core/Helpers/State.cs b/src/Speckle.Core/Helpers/State.cs new file mode 100644 index 00000000..6dbebec5 --- /dev/null +++ b/src/Speckle.Core/Helpers/State.cs @@ -0,0 +1,89 @@ +#nullable disable +using System; + +namespace Speckle.Core.Helpers; + +public class State : IDisposable + where T : State, new() +{ + private static T root; + private static T current; + private T previous = current; + + private static readonly object StateChangeLock = new(); + + protected State() + { + lock (StateChangeLock) + { + current = (T)this; + if (root == null) + { + root = (T)this; + } + } + } + + void IDisposable.Dispose() + { + lock (StateChangeLock) + { + if (previous == null) + { + return; // Already disposed or root + } + + if (current == this) + { + current = previous; + } + else + { + // If this state is still in the stack is safe to pop it + var state = this; + do + { + if (state == root) + { + current = previous; + break; + } + + state = state.previous; + } while (state != null); + } + + previous = null; + } + } + + public static T Peek => current; + + public static T Push() + { + lock (StateChangeLock) + { + var peek = current ?? new T(); + var top = (T)peek.MemberwiseClone(); + top.previous = current; + + return current = top; + } + } + + public static void Pop() + { + lock (StateChangeLock) + { + ((IDisposable)current).Dispose(); + } + } + + protected void Pull() + { + lock (StateChangeLock) + { + ((IDisposable)this).Dispose(); + } + } +} diff --git a/src/Speckle.Core/Kits/Applications.cs b/src/Speckle.Core/Kits/Applications.cs new file mode 100644 index 00000000..ea3ccb01 --- /dev/null +++ b/src/Speckle.Core/Kits/Applications.cs @@ -0,0 +1,264 @@ +using System.Diagnostics.Contracts; + +namespace Speckle.Core.Kits; + +public enum HostAppVersion +{ + v, + v3, + v6, + v7, + v8, + v2019, + v2020, + v2021, + v2022, + v2023, + v2024, + v2025, + vSandbox, + vRevit, + vRevit2021, + vRevit2022, + vRevit2023, + vRevit2024, + vRevit2025, + v25, + v26, + v715, + v716, + v717 +} + +public readonly struct HostApplication +{ + public string Name { get; } + public string Slug { get; } + + public HostApplication(string name, string slug) + { + Name = name; + Slug = slug; + } + + /// + /// Returns the versioned app name given a specific version + /// + /// + /// + public string GetVersion(HostAppVersion version) + { + return Name.Replace(" ", "") + version.ToString().TrimStart('v'); + } +} + +/// +/// List of Host Applications - their slugs should match our ghost tags and ci/cd slugs +/// +public static class HostApplications +{ + public static readonly HostApplication Rhino = new("Rhino", "rhino"), + Grasshopper = new("Grasshopper", "grasshopper"), + Revit = new("Revit", "revit"), + Dynamo = new("Dynamo", "dynamo"), + Unity = new("Unity", "unity"), + GSA = new("GSA", "gsa"), + Civil = new("Civil 3D", "civil3d"), + AutoCAD = new("AutoCAD", "autocad"), + MicroStation = new("MicroStation", "microstation"), + OpenRoads = new("OpenRoads", "openroads"), + OpenRail = new("OpenRail", "openrail"), + OpenBuildings = new("OpenBuildings", "openbuildings"), + ETABS = new("ETABS", "etabs"), + SAP2000 = new("SAP2000", "sap2000"), + CSiBridge = new("CSiBridge", "csibridge"), + SAFE = new("SAFE", "safe"), + TeklaStructures = new("Tekla Structures", "teklastructures"), + Dxf = new("DXF Converter", "dxf"), + Excel = new("Excel", "excel"), + Unreal = new("Unreal", "unreal"), + PowerBI = new("Power BI", "powerbi"), + Blender = new("Blender", "blender"), + QGIS = new("QGIS", "qgis"), + ArcGIS = new("ArcGIS", "arcgis"), + SketchUp = new("SketchUp", "sketchup"), + Archicad = new("Archicad", "archicad"), + TopSolid = new("TopSolid", "topsolid"), + Python = new("Python", "python"), + NET = new(".NET", "net"), + Navisworks = new("Navisworks", "navisworks"), + AdvanceSteel = new("Advance Steel", "advancesteel"), + Other = new("Other", "other"); + + /// + /// Gets a HostApplication form a string. It could be the versioned name or a string coming from a process running. + /// + /// String with the name of the app + /// + [Pure] + public static HostApplication GetHostAppFromString(string? appname) + { + if (appname == null) + { + return Other; + } + + appname = appname.ToLowerInvariant().Replace(" ", ""); + if (appname.Contains("dynamo")) + { + return Dynamo; + } + + if (appname.Contains("revit")) + { + return Revit; + } + + if (appname.Contains("autocad")) + { + return AutoCAD; + } + + if (appname.Contains("civil")) + { + return Civil; + } + + if (appname.Contains("rhino")) + { + return Rhino; + } + + if (appname.Contains("grasshopper")) + { + return Grasshopper; + } + + if (appname.Contains("unity")) + { + return Unity; + } + + if (appname.Contains("gsa")) + { + return GSA; + } + + if (appname.Contains("microstation")) + { + return MicroStation; + } + + if (appname.Contains("openroads")) + { + return OpenRoads; + } + + if (appname.Contains("openrail")) + { + return OpenRail; + } + + if (appname.Contains("openbuildings")) + { + return OpenBuildings; + } + + if (appname.Contains("etabs")) + { + return ETABS; + } + + if (appname.Contains("sap")) + { + return SAP2000; + } + + if (appname.Contains("csibridge")) + { + return CSiBridge; + } + + if (appname.Contains("safe")) + { + return SAFE; + } + + if (appname.Contains("teklastructures")) + { + return TeklaStructures; + } + + if (appname.Contains("dxf")) + { + return Dxf; + } + + if (appname.Contains("excel")) + { + return Excel; + } + + if (appname.Contains("unreal")) + { + return Unreal; + } + + if (appname.Contains("powerbi")) + { + return PowerBI; + } + + if (appname.Contains("blender")) + { + return Blender; + } + + if (appname.Contains("qgis")) + { + return QGIS; + } + + if (appname.Contains("arcgis")) + { + return ArcGIS; + } + + if (appname.Contains("sketchup")) + { + return SketchUp; + } + + if (appname.Contains("archicad")) + { + return Archicad; + } + + if (appname.Contains("topsolid")) + { + return TopSolid; + } + + if (appname.Contains("python")) + { + return Python; + } + + if (appname.Contains("net")) + { + return NET; + } + + if (appname.Contains("navisworks")) + { + return Navisworks; + } + + if (appname.Contains("advancesteel")) + { + return AdvanceSteel; + } + + return new HostApplication(appname, appname); + } +} diff --git a/src/Speckle.Core/Kits/Attributes.cs b/src/Speckle.Core/Kits/Attributes.cs new file mode 100644 index 00000000..f706859a --- /dev/null +++ b/src/Speckle.Core/Kits/Attributes.cs @@ -0,0 +1,66 @@ +#nullable disable +using System; + +namespace Speckle.Core.Kits; + +[AttributeUsage(AttributeTargets.Constructor)] +public sealed class SchemaInfo : Attribute +{ + public SchemaInfo(string name, string description) + : this(name, description, null, null) { } + + public SchemaInfo(string name, string description, string category, string subcategory) + { + Name = name; + Description = description; + Category = category; + Subcategory = subcategory; + } + + public string Subcategory { get; } + + public string Category { get; } + + public string Description { get; } + + public string Name { get; } +} + +[AttributeUsage(AttributeTargets.Constructor)] +public sealed class SchemaDeprecated : Attribute { } + +[AttributeUsage(AttributeTargets.Parameter)] +public sealed class SchemaParamInfo : Attribute +{ + public SchemaParamInfo(string description) + { + Description = description; + } + + public string Description { get; } +} + +/// +/// Used to indicate which is the main input parameter of the schema builder component. Schema info will be attached to this object. +/// +[AttributeUsage(AttributeTargets.Parameter)] +public sealed class SchemaMainParam : Attribute { } + +// TODO: this could be nuked, as it's only used to hide props on Base, +// which we might want to expose anyways... +/// +/// Used to ignore properties from expand objects etc +/// +[AttributeUsage(AttributeTargets.Property)] +public sealed class SchemaIgnore : Attribute { } + +[AttributeUsage(AttributeTargets.Method)] +public sealed class SchemaComputedAttribute : Attribute +{ + public SchemaComputedAttribute(string name) + { + Name = name; + } + + public string Name { get; } +} diff --git a/src/Speckle.Core/Kits/ConverterInterfaces/IFinalizable.cs b/src/Speckle.Core/Kits/ConverterInterfaces/IFinalizable.cs new file mode 100644 index 00000000..7f10c2fc --- /dev/null +++ b/src/Speckle.Core/Kits/ConverterInterfaces/IFinalizable.cs @@ -0,0 +1,6 @@ +namespace Speckle.Core.Kits.ConverterInterfaces; + +public interface IFinalizable +{ + void FinalizeConversion(); +} diff --git a/src/Speckle.Core/Kits/Exceptions.cs b/src/Speckle.Core/Kits/Exceptions.cs new file mode 100644 index 00000000..6e3c4d4d --- /dev/null +++ b/src/Speckle.Core/Kits/Exceptions.cs @@ -0,0 +1,140 @@ +using System; +using Speckle.Core.Logging; + +namespace Speckle.Core.Kits; + +/// +/// Exception thrown when an fails to load/initialise +/// +/// +/// Does NOT inherit from , because this usage of this exception is not dependent on Speckle Data (user data) +/// Ideally, this exception should contain a meaningful message, and a reference to the +/// +public class KitException : Exception +{ + /// + /// A reference to the that failed to perform + /// + public ISpeckleKit? Kit { get; } + + public KitException(string? message, ISpeckleKit? kit, Exception? innerException = null) + : base(message, innerException) + { + Kit = kit; + } + + public KitException() { } + + public KitException(string? message) + : base(message) { } + + public KitException(string? message, Exception? innerException) + : base(message, innerException) { } +} + +/// +/// Exception thrown when conversion of an object was not successful +/// +/// +/// Ideally this exception contains a meaningful message, and reference to the object that failed to be converted. +/// This exception can be used for both ToSpeckle and ToNative conversion +/// +public class ConversionException : SpeckleException +{ + private object? ObjectThatFailed { get; } + + public ConversionException(string? message, object? objectToConvert, Exception? innerException = null) + : base(message, innerException) + { + ObjectThatFailed = objectToConvert; + } + + public ConversionException(string? message, Exception? innerException) + : base(message, innerException) { } + + public ConversionException(string? message) + : base(message) { } + + public ConversionException() { } +} + +/// +/// Exception used when an object could not be converted, because we don't support a specific conversion. +/// +/// +/// This Exception should be thrown as part of a pre-emptive check in conversions (not as part reactive error handling) +/// and usage (throwing) should not be dependent on external state: +/// i.e. given the same object and converter state, the outcome (exception throw or not) should be the same. +/// +/// +/// It can be used for: +///
    +///
  • objects who's we don't support (e.g. "Walls are not supported")
  • +///
  • objects with a property who's value we don't support (e.g. "Beams with shape type of Circular are not supported")
  • +///
  • complex object requirements (e.g. "We don't support walls with zero width and no displayValue")
  • +///
+/// It should NOT be used for: +///
    +///
  • Invalid Speckle Objects (e.g. "We don't support walls with null lines")
  • +///
  • Objects that we have already converted, and therefore now skip (e.g. "A Wall with the same name was already converted")
  • +///
  • Reactive error handling (e.g. "Failed to convert wall, I guess it wasn't supported")
  • +///
+///
+public class ConversionNotSupportedException : ConversionException +{ + public ConversionNotSupportedException(string? message, object? objectToConvert, Exception? innerException = null) + : base(message, objectToConvert, innerException) { } + + public ConversionNotSupportedException(string message, Exception innerException) + : base(message, innerException) { } + + public ConversionNotSupportedException(string message) + : base(message) { } + + public ConversionNotSupportedException() { } +} + +/// +/// Exception thrown when an object was desirably skipped
+///
+/// +/// Avoid throwing this exception Type!
+/// As it introduces some bad patterns for exception handling. +///
+/// Namely, it encodes how the exception WILL be handled, Not simply what HAS happened. +/// Exceptions shouldn't care how they are handled. +///
+/// We were also misusing this exception in Revit, to correct for ambiguity in the way certain objects should be traversed, +/// by selectively skipping objects that were already converted by other means. +///
+[Obsolete("Avoid using this type. Use " + nameof(ConversionNotSupportedException) + " instead, if appropriate")] +public class ConversionSkippedException : ConversionException +{ + public ConversionSkippedException(string? message, object? objectToConvert, Exception? innerException = null) + : base(message, objectToConvert, innerException) { } + + public ConversionSkippedException(string message, Exception innerException) + : base(message, innerException) { } + + public ConversionSkippedException(string message) + : base(message) { } + + public ConversionSkippedException() { } +} + +/// +/// Exception thrown when an object was not ready to be baked into the document (i.e. the element's host doesn't exist yet) +/// +public class ConversionNotReadyException : ConversionException +{ + public ConversionNotReadyException(string? message, object? objectToConvert, Exception? innerException = null) + : base(message, objectToConvert, innerException) { } + + public ConversionNotReadyException(string message, Exception innerException) + : base(message, innerException) { } + + public ConversionNotReadyException(string message) + : base(message) { } + + public ConversionNotReadyException() { } +} diff --git a/src/Speckle.Core/Kits/ISpeckleConverter.cs b/src/Speckle.Core/Kits/ISpeckleConverter.cs new file mode 100644 index 00000000..c7ba36c5 --- /dev/null +++ b/src/Speckle.Core/Kits/ISpeckleConverter.cs @@ -0,0 +1,152 @@ +#nullable disable +using System.Collections.Generic; +using Speckle.Core.Models; + +namespace Speckle.Core.Kits; + +public interface ISpeckleConverter +{ + string Description { get; } + string Name { get; } + string Author { get; } + string WebsiteOrEmail { get; } + + /// + /// Keeps track of the conversion process + /// + public ProgressReport Report { get; } + + /// + /// Decides what to do when an element being received already exists + /// + public ReceiveMode ReceiveMode { get; set; } + + /// + /// Converts a native object to a Speckle one + /// + /// Native object to convert + /// + /// + public Base ConvertToSpeckle(object value); + + /// + /// Converts a list of objects to Speckle. + /// + /// + /// + /// + public List ConvertToSpeckle(List values); + + /// + /// Checks if it can convert a native object to a Speckle one + /// + /// Native object to convert + /// + public bool CanConvertToSpeckle(object value); + + /// + /// Converts a Speckle object to a native one + /// + /// Speckle object to convert + /// + /// + public object ConvertToNative(Base value); + + /// + /// Converts a list of Speckle objects to a native ones. + /// + /// + /// + /// + public List ConvertToNative(List values); + + /// + /// Converts a given speckle objects as a generic native object. + /// This should assume has been called and returned True, + /// or call it within this method's implementation to ensure non-displayable objects are gracefully handled. + /// + /// + /// This method should not try to convert an object to it's native representation (i.e Speckle Wall -> Wall), + /// but rather use the 'displayValue' of that wall to create a geometrically correct representation of that object + /// in the native application. + /// An object may be able to be converted both with and . + /// In this case, deciding which to use is dependent on each connector developer. + /// Preferably, should be used as a fallback to the logic. + /// + /// Speckle object to convert + /// The native object that resulted after converting the input + public object ConvertToNativeDisplayable(Base value); + + /// + /// Checks if it can convert a Speckle object to a native one + /// + /// Speckle object to convert + /// + public bool CanConvertToNative(Base value); + + /// + /// Checks to verify if a given object is: 1) displayable and 2) can be supported for conversion to the native application. + /// An object is considered "displayable" if it has a 'displayValue' property (defined in its class or dynamically attached to it, detached or not). + /// + /// + /// An object may return "True" for both and + /// In this case, deciding which to use is dependent on each connector developer. + /// Preferably, should be used as a fallback to the logic. + /// Objects found in the 'displayValue' property are assumed to be universally convertible by all converters and the viewer, but are not guaranteed to be so. + /// + /// Speckle object to convert + /// True if the object is "displayable" and the converter supports native conversion of the given speckle object in particular. + public bool CanConvertToNativeDisplayable(Base value); + + /// + /// Returns a list of applications serviced by this converter + /// + /// + public IEnumerable GetServicedApplications(); + + /// + /// Sets the application document that the converter is targeting + /// + /// The current application document + public void SetContextDocument(object doc); + + /// + /// Some converters need to know which other objects are being converted, in order to sort relationships between them (ie, Revit). Use this method to set them. + /// + /// + public void SetContextObjects(List objects); + + /// + /// Some converters need to know which objects have been converted before in order to update them (ie, Revit). Use this method to set them. + /// + /// + public void SetPreviousContextObjects(List objects); + + /// + /// Some converters need to be able to receive some settings to modify their internal behaviour (i.e. Rhino's Brep Meshing options). Use this method to set them. + /// + /// The object representing the settings for your converter. + public void SetConverterSettings(object settings); +} + +// NOTE: Do not change the order of the existing ones +/// +/// Receive modes indicate what to do and not do when receiving objects +/// +public enum ReceiveMode +{ + /// + /// Attemts updating previously received objects by ID, deletes previously received objects that do not exist anymore and creates new ones + /// + Update, + + /// + /// Always creates new objects + /// + Create, + + /// + /// Ignores updating previously received objects and does not attempt updating or deleting them, creates new objects + /// + Ignore +} diff --git a/src/Speckle.Core/Kits/ISpeckleKit.cs b/src/Speckle.Core/Kits/ISpeckleKit.cs new file mode 100644 index 00000000..c784ab00 --- /dev/null +++ b/src/Speckle.Core/Kits/ISpeckleKit.cs @@ -0,0 +1,48 @@ +using System; +using System.Collections.Generic; + +namespace Speckle.Core.Kits; + +/// +/// Defines the basic interface for creating a "Speckle Kit" +/// +public interface ISpeckleKit +{ + /// + /// Gets all the object types (the object model) provided by this kit. + /// + IEnumerable Types { get; } + + /// + /// Gets all available converters for this Kit. + /// + IEnumerable Converters { get; } + + /// + /// Gets this Kit's description. + /// + string Description { get; } + + /// + /// Gets this Kit's name. + /// + string Name { get; } + + /// + /// Gets this Kit's author. + /// + string Author { get; } + + /// + /// Gets the website (or email) to contact the Kit's author. + /// + string WebsiteOrEmail { get; } + + /// + /// Tries to load a converter for a specific . + /// + /// The host app string for which a is desired. see + /// The converter for the specific + /// Thrown if the requested converter failed to load + public ISpeckleConverter LoadConverter(string app); +} diff --git a/src/Speckle.Core/Kits/KitDeclaration.cs b/src/Speckle.Core/Kits/KitDeclaration.cs new file mode 100644 index 00000000..abf8bc3e --- /dev/null +++ b/src/Speckle.Core/Kits/KitDeclaration.cs @@ -0,0 +1,30 @@ +#nullable disable +using System; +using System.Collections.Generic; +using System.Linq; +using Speckle.Core.Models; + +namespace Speckle.Core.Kits; + +/// +/// Needed so we can properly deserialize all the Base-derived objects from Speckle.Core itself. +/// +public sealed class CoreKit : ISpeckleKit +{ + public IEnumerable Types => GetType().Assembly.GetTypes().Where(type => type.IsSubclassOf(typeof(Base))); + + public string Description => "Base Speckle models for revisions, streams, etc."; + + public string Name => nameof(CoreKit); + + public string Author => "Dimitrie"; + + public string WebsiteOrEmail => "hello@speckle.systems"; + + public IEnumerable Converters => new List(); + + public ISpeckleConverter LoadConverter(string app) + { + return null; + } +} diff --git a/src/Speckle.Core/Kits/KitManager.cs b/src/Speckle.Core/Kits/KitManager.cs new file mode 100644 index 00000000..05174459 --- /dev/null +++ b/src/Speckle.Core/Kits/KitManager.cs @@ -0,0 +1,339 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Reflection; +using Speckle.Core.Helpers; +using Speckle.Core.Logging; +using Speckle.Core.Models; + +namespace Speckle.Core.Kits; + +public static class KitManager +{ + private static string? s_kitsFolder; + + public static readonly AssemblyName SpeckleAssemblyName = typeof(Base).GetTypeInfo().Assembly.GetName(); + + private static readonly Dictionary s_speckleKits = new(); + + private static List s_availableTypes = new(); + + private static bool s_initialized; + + /// + /// Local installations store kits in C:\Users\USERNAME\AppData\Roaming\Speckle\Kits + /// Admin/System-wide installations in C:\ProgramData\Speckle\Kits + /// + public static string KitsFolder + { + get => s_kitsFolder ??= SpecklePathProvider.KitsFolderPath; + set => s_kitsFolder = value; + } + + /// + /// Returns a list of all the kits found on this user's device. + /// + public static IEnumerable Kits + { + get + { + Initialize(); + return s_speckleKits.Values.Where(v => v != null); //NOTE: null check here should be unnecessary + } + } + + /// + /// Returns a list of all the types found in all the kits on this user's device. + /// + public static IEnumerable Types + { + get + { + Initialize(); + return s_availableTypes; + } + } + + /// + /// Checks whether a specific kit exists. + /// + /// + /// + public static bool HasKit(string assemblyFullName) + { + Initialize(); + return s_speckleKits.ContainsKey(assemblyFullName); + } + + /// + /// Gets a specific kit. + /// + /// + /// + public static ISpeckleKit GetKit(string assemblyFullName) + { + Initialize(); + return s_speckleKits[assemblyFullName]; + } + + /// + /// Gets the default Speckle provided kit, "Objects". + /// + /// + public static ISpeckleKit GetDefaultKit() + { + Initialize(); + return s_speckleKits.First(kvp => kvp.Value.Name == "Objects").Value; + } + + /// + /// Returns all the kits with potential converters for the software app. + /// + /// + /// + public static IEnumerable GetKitsWithConvertersForApp(string app) + { + foreach (var kit in Kits) + { + if (kit.Converters.Contains(app)) + { + yield return kit; + } + } + } + + /// + /// Tells the kit manager to initialise from a specific location. + /// + /// + public static void Initialize(string kitFolderLocation) + { + if (s_initialized) + { + SpeckleLog.Logger.Error("{objectType} is already initialised", typeof(KitManager)); + throw new SpeckleException( + "The kit manager has already been initialised. Make sure you call this method earlier in your code!" + ); + } + + KitsFolder = kitFolderLocation; + Load(); + s_initialized = true; + } + + #region Private Methods + + private static void Initialize() + { + if (!s_initialized) + { + Load(); + s_initialized = true; + } + } + + private static void Load() + { + SpeckleLog.Logger.Information("Initializing Kit Manager in {KitsFolder}", SpecklePathProvider.KitsFolderPath); + + GetLoadedSpeckleReferencingAssemblies(); + LoadSpeckleReferencingAssemblies(); + + s_availableTypes = s_speckleKits + .Where(kit => kit.Value != null) //Null check should be unnecessary + .SelectMany(kit => kit.Value.Types) + .ToList(); + } + + // recursive search for referenced assemblies + public static List GetReferencedAssemblies() + { + var returnAssemblies = new List(); + var loadedAssemblies = new HashSet(); + var assembliesToCheck = new Queue(); + + assembliesToCheck.Enqueue(Assembly.GetEntryAssembly()); + + while (assembliesToCheck.Count > 0) + { + var assemblyToCheck = assembliesToCheck.Dequeue(); + + if (assemblyToCheck == null) + { + continue; + } + + foreach (var reference in assemblyToCheck.GetReferencedAssemblies()) + { + // filtering out system dlls + if (reference.FullName.StartsWith("System.")) + { + continue; + } + + if (reference.FullName.StartsWith("Microsoft.")) + { + continue; + } + + if (loadedAssemblies.Contains(reference.FullName)) + { + continue; + } + + Assembly assembly; + try + { + assembly = Assembly.Load(reference); + } + catch (SystemException ex) when (ex is IOException or BadImageFormatException) + { + continue; + } + + assembliesToCheck.Enqueue(assembly); + loadedAssemblies.Add(reference.FullName); + returnAssemblies.Add(assembly); + } + } + + return returnAssemblies; + } + + private static void GetLoadedSpeckleReferencingAssemblies() + { + List assemblies = AppDomain.CurrentDomain.GetAssemblies().ToList(); + assemblies.AddRange(GetReferencedAssemblies()); + + foreach (var assembly in assemblies) + { + if (assembly.IsDynamic || assembly.ReflectionOnly) + { + continue; + } + + if (!assembly.IsReferencing(SpeckleAssemblyName)) + { + continue; + } + + if (s_speckleKits.ContainsKey(assembly.FullName)) + { + continue; + } + + var kitClass = GetKitClass(assembly); + if (kitClass == null) + { + continue; + } + + if (Activator.CreateInstance(kitClass) is ISpeckleKit speckleKit) + { + s_speckleKits.Add(assembly.FullName, speckleKit); + } + } + } + + private static void LoadSpeckleReferencingAssemblies() + { + if (!Directory.Exists(KitsFolder)) + { + return; + } + + var directories = Directory.GetDirectories(KitsFolder); + + foreach (var directory in directories) + { + foreach (var assemblyPath in Directory.EnumerateFiles(directory, "*.dll")) + { + var unloadedAssemblyName = SafeGetAssemblyName(assemblyPath); + + if (unloadedAssemblyName == null) + { + continue; + } + + try + { + var assembly = Assembly.LoadFrom(assemblyPath); + var kitClass = GetKitClass(assembly); + if (assembly.IsReferencing(SpeckleAssemblyName) && kitClass != null) + { + if (!s_speckleKits.ContainsKey(assembly.FullName)) + { + if (Activator.CreateInstance(kitClass) is ISpeckleKit speckleKit) + { + s_speckleKits.Add(assembly.FullName, speckleKit); + } + } + } + } + catch (FileLoadException) { } + catch (BadImageFormatException) { } + } + } + } + + private static Type? GetKitClass(Assembly assembly) + { + try + { + var kitClass = assembly + .GetTypes() + .FirstOrDefault(type => + { + return type.GetInterfaces().Any(iface => iface.Name == nameof(ISpeckleKit)); + }); + + return kitClass; + } + catch (ReflectionTypeLoadException) + { + return null; + } + } + + private static AssemblyName? SafeGetAssemblyName(string? assemblyPath) + { + try + { + return AssemblyName.GetAssemblyName(assemblyPath); + } + catch (Exception ex) when (ex is ArgumentException or IOException or BadImageFormatException) + { + return null; + } + } + + #endregion +} + +internal static class AssemblyExtensions +{ + /// + /// Indicates if a given assembly references another which is identified by its name. + /// + /// The assembly which will be probed. + /// The reference assembly name. + /// A boolean value indicating if there is a reference. + public static bool IsReferencing(this Assembly assembly, AssemblyName referenceName) + { + if (AssemblyName.ReferenceMatchesDefinition(assembly.GetName(), referenceName)) + { + return true; + } + + foreach (var referencedAssemblyName in assembly.GetReferencedAssemblies()) + { + if (AssemblyName.ReferenceMatchesDefinition(referencedAssemblyName, referenceName)) + { + return true; + } + } + + return false; + } +} diff --git a/src/Speckle.Core/Kits/Units.cs b/src/Speckle.Core/Kits/Units.cs new file mode 100644 index 00000000..45389d4d --- /dev/null +++ b/src/Speckle.Core/Kits/Units.cs @@ -0,0 +1,323 @@ +using System; +using System.Collections.Generic; +using System.Diagnostics.Contracts; + +namespace Speckle.Core.Kits; + +public static class Units +{ + public const string Millimeters = "mm"; + public const string Centimeters = "cm"; + public const string Meters = "m"; + public const string Kilometers = "km"; + public const string Inches = "in"; + + /// International Foot + public const string Feet = "ft"; + public const string Yards = "yd"; + public const string Miles = "mi"; + public const string None = "none"; + + /// US Survey foot + /// Considered an obsolete unit, superseded by the international foot + public const string USFeet = "us_ft"; + + private static readonly List s_supportedUnits = + new() { Millimeters, Centimeters, Meters, Kilometers, Inches, Feet, USFeet, Yards, Miles, None }; + + /// + /// if is a recognised/supported unit string, otherwise + public static bool IsUnitSupported(string unit) + { + return s_supportedUnits.Contains(unit); + } + + /// + /// Gets the conversion factor from one unit system to another + /// + /// Semantic unit string for the units to convert from + /// Semantic unit string for the units to convert to + /// A + /// The scaling factor to convert from the units to the units, or 1 if either unit param is null or none + [Pure] + public static double GetConversionFactor(string? from, string? to) + { + string? fromUnits = GetUnitsFromString(from); + string? toUnits = GetUnitsFromString(to); + + switch (fromUnits) + { + // METRIC + case Millimeters: + switch (toUnits) + { + case Centimeters: + return 0.1; + case Meters: + return 0.001; + case Kilometers: + return 1e-6; + case Inches: + return 0.0393701; + case Feet: + return 0.00328084; + case USFeet: + return 0.0032808333; + case Yards: + return 0.00109361; + case Miles: + return 6.21371e-7; + } + break; + case Centimeters: + switch (toUnits) + { + case Millimeters: + return 10; + case Meters: + return 0.01; + case Kilometers: + return 1e-5; + case Inches: + return 0.393701; + case Feet: + return 0.0328084; + case USFeet: + return 0.0328083333; + case Yards: + return 0.0109361; + case Miles: + return 6.21371e-6; + } + break; + case Meters: + switch (toUnits) + { + case Millimeters: + return 1000; + case Centimeters: + return 100; + case Kilometers: + return 1000; + case Inches: + return 39.3701; + case Feet: + return 3.28084; + case USFeet: + return 3.28083333; + case Yards: + return 1.09361; + case Miles: + return 0.000621371; + } + break; + case Kilometers: + switch (toUnits) + { + case Millimeters: + return 1000000; + case Centimeters: + return 100000; + case Meters: + return 1000; + case Inches: + return 39370.1; + case Feet: + return 3280.84; + case USFeet: + return 3280.83333; + case Yards: + return 1093.61; + case Miles: + return 0.621371; + } + break; + + // IMPERIAL + case Inches: + switch (toUnits) + { + case Millimeters: + return 25.4; + case Centimeters: + return 2.54; + case Meters: + return 0.0254; + case Kilometers: + return 2.54e-5; + case Feet: + return 0.0833333; + case USFeet: + return 0.0833331667; + case Yards: + return 0.027777694; + case Miles: + return 1.57828e-5; + } + break; + case Feet: + switch (toUnits) + { + case Millimeters: + return 304.8; + case Centimeters: + return 30.48; + case Meters: + return 0.3048; + case Kilometers: + return 0.0003048; + case Inches: + return 12; + case USFeet: + return 0.999998; + case Yards: + return 0.333332328; + case Miles: + return 0.000189394; + } + break; + case USFeet: + switch (toUnits) + { + case Millimeters: + return 120000d / 3937d; + case Centimeters: + return 12000d / 3937d; + case Meters: + return 1200d / 3937d; + case Kilometers: + return 1.2 / 3937d; + case Inches: + return 12.000024000000002; + case Feet: + return 1.000002; + case Yards: + return 1.000002 / 3d; + case Miles: + return 1.000002 / 5280d; + } + break; + case Yards: + switch (toUnits) + { + case Millimeters: + return 914.4; + case Centimeters: + return 91.44; + case Meters: + return 0.9144; + case Kilometers: + return 0.0009144; + case Inches: + return 36; + case Feet: + return 3; + case USFeet: + return 2.999994; + case Miles: + return 1d / 1760d; + } + break; + case Miles: + switch (toUnits) + { + case Millimeters: + return 1.609e+6; + case Centimeters: + return 160934; + case Meters: + return 1609.34; + case Kilometers: + return 1.60934; + case Inches: + return 63360; + case Feet: + return 5280; + case USFeet: + return 5279.98944002112; + case Yards: + return 1759.99469184; + } + break; + case None: + return 1; + } + return 1; + } + + /// + /// Given , maps several friendly unit aliases to a a semantic unit string + /// + /// + /// The semantic unit string, if is + /// Unit string is not a supported unit (see ) + [Pure] + public static string? GetUnitsFromString(string? unit) + { + if (string.IsNullOrWhiteSpace(unit)) + { + return null; + } + + return unit.ToLower() switch + { + "mm" or "mil" or "millimeter" or "millimeters" or "millimetres" => Millimeters, + "cm" or "centimetre" or "centimeter" or "centimetres" or "centimeters" => Centimeters, + "m" or "meter" or "metre" or "meters" or "metres" => Meters, + "inches" or "inch" or "in" => Inches, + "feet" or "foot" or "ft" => Feet, + "ussurveyfeet" => USFeet, //BUG: why don't we match on "us_ft"? + "yard" or "yards" or "yd" => Yards, + "miles" or "mile" or "mi" => Miles, + "kilometers" or "kilometer" or "km" => Kilometers, + "none" => None, + _ => throw new ArgumentOutOfRangeException(nameof(unit), $"Unrecognised unit string {unit}"), + }; + } + + /// + /// Maps semantic unit strings to a numeric encoding + /// + /// + /// non-recognised unit encodings will be silently mapped to 0 + /// + [Pure] + public static int GetEncodingFromUnit(string unit) + { + return unit switch + { + Millimeters => 1, + Centimeters => 2, + Meters => 3, + Kilometers => 4, + Inches => 5, + Feet => 6, + Yards => 7, + Miles => 8, + _ => 0, + }; + } + + /// + /// Maps a numeric encoding to the semantic unit string + /// + /// numeric encoded unit + /// non-recognised unit encodings will be silently mapped to + /// Semantic unit string + [Pure] + public static string GetUnitFromEncoding(double unit) + { + return unit switch + { + 1 => Millimeters, + 2 => Centimeters, + 3 => Meters, + 4 => Kilometers, + 5 => Inches, + 6 => Feet, + 7 => Yards, + 8 => Miles, + _ => None, + }; + } +} diff --git a/src/Speckle.Core/Logging/Analytics.cs b/src/Speckle.Core/Logging/Analytics.cs new file mode 100644 index 00000000..66f77895 --- /dev/null +++ b/src/Speckle.Core/Logging/Analytics.cs @@ -0,0 +1,347 @@ +#nullable disable +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.IO; +using System.Linq; +using System.Net.Http; +using System.Net.Http.Headers; +using System.Net.NetworkInformation; +using System.Reflection; +using System.Runtime.InteropServices; +using System.Text; +using System.Threading.Tasks; +using System.Web; +using Speckle.Core.Credentials; +using Speckle.Core.Helpers; +using Speckle.Newtonsoft.Json; + +namespace Speckle.Core.Logging; + +/// +/// Anonymous telemetry to help us understand how to make a better Speckle. +/// This really helps us to deliver a better open source project and product! +/// +public static class Analytics +{ + /// + /// Default Mixpanel events + /// + public enum Events + { + /// + /// Event triggered when data is sent to a Speckle Server + /// + Send, + + /// + /// Event triggered when data is received from a Speckle Server + /// + Receive, + + /// + /// Event triggered when a node is executed in a visual programming environment, it should contain the name of the action and the host application + /// + NodeRun, + + /// + /// Event triggered when an action is executed in Desktop UI, it should contain the name of the action and the host application + /// + DUIAction, + + /// + /// Event triggered when a node is first created in a visual programming environment, it should contain the name of the action and the host application + /// + NodeCreate, + + /// + /// Event triggered when the import/export alert is launched or closed + /// + ImportExportAlert, + + /// + /// Event triggered when the connector is registered + /// + Registered, + + /// + /// Event triggered by the Mapping Tool + /// + MappingsAction, + + /// + /// Event triggered when user selects object to convert to Speckle on Send + /// + ConvertToSpeckle, + + /// + /// Event triggered when user selects object to convert to Native on Receive + /// + ConvertToNative + } + + private const string MIXPANEL_TOKEN = "acd87c5a50b56df91a795e999812a3a4"; + private const string MIXPANEL_SERVER = "https://analytics.speckle.systems"; + + /// + /// Cached email + /// + private static string LastEmail { get; set; } + + /// + /// Cached server URL + /// + private static string LastServer { get; set; } + + /// + /// when the DEBUG pre-processor directive is , otherwise + /// + /// This must be kept as a computed property, not a compile time const + internal static bool IsReleaseMode => +#if DEBUG + false; +#else + true; +#endif + + /// + /// Tracks an event without specifying the email and server. + /// It's not always possible to know which account the user has selected, especially in visual programming. + /// Therefore we are caching the email and server values so that they can be used also when nodes such as "Serialize" are used. + /// If no account info is cached, we use the default account data. + /// + /// Name of the even + /// Additional parameters to pass in to event + /// True if it's an action performed by a logged user + public static void TrackEvent( + Events eventName, + Dictionary customProperties = null, + bool isAction = true + ) + { + string email; + string server; + + if (LastEmail != null && LastServer != null && LastServer != "no-account-server") + { + email = LastEmail; + server = LastServer; + } + else + { + var acc = AccountManager.GetDefaultAccount(); + if (acc == null) + { + var macAddr = NetworkInterface + .GetAllNetworkInterfaces() + .Where( + nic => + nic.OperationalStatus == OperationalStatus.Up && nic.NetworkInterfaceType != NetworkInterfaceType.Loopback + ) + .Select(nic => nic.GetPhysicalAddress().ToString()) + .FirstOrDefault(); + + email = macAddr; + server = "no-account-server"; + isAction = false; + } + else + { + email = acc.GetHashedEmail(); + server = acc.GetHashedServer(); + } + } + + TrackEvent(email, server, eventName, customProperties, isAction); + } + + /// + /// Tracks an event from a specified account, anonymizes personal information + /// + /// Account to use, it will be anonymized + /// Name of the event + /// Additional parameters to pass to the event + /// True if it's an action performed by a logged user + public static void TrackEvent( + Account account, + Events eventName, + Dictionary customProperties = null, + bool isAction = true + ) + { + if (account == null) + { + TrackEvent(eventName, customProperties, isAction); + } + else + { + TrackEvent(account.GetHashedEmail(), account.GetHashedServer(), eventName, customProperties, isAction); + } + } + + /// + /// Tracks an event from a specified email and server, anonymizes personal information + /// + /// Email of the user anonymized + /// Server URL anonymized + /// Name of the event + /// Additional parameters to pass to the event + /// True if it's an action performed by a logged user + private static void TrackEvent( + string hashedEmail, + string hashedServer, + Events eventName, + Dictionary customProperties = null, + bool isAction = true + ) + { + LastEmail = hashedEmail; + LastServer = hashedServer; + + if (!IsReleaseMode) + { + //only track in prod + return; + } + + Task.Run(async () => + { + try + { + var executingAssembly = Assembly.GetExecutingAssembly(); + var properties = new Dictionary + { + { "distinct_id", hashedEmail }, + { "server_id", hashedServer }, + { "token", MIXPANEL_TOKEN }, + { "hostApp", Setup.HostApplication }, + { "hostAppVersion", Setup.VersionedHostApplication }, + { + "core_version", + FileVersionInfo.GetVersionInfo(executingAssembly.Location).ProductVersion + ?? executingAssembly.GetName().Version.ToString() + }, + { "$os", GetOs() } + }; + + if (isAction) + { + properties.Add("type", "action"); + } + + if (customProperties != null) + { + foreach (KeyValuePair customProp in customProperties) + { + properties[customProp.Key] = customProp.Value; + } + } + + string json = JsonConvert.SerializeObject(new { @event = eventName.ToString(), properties }); + + var query = new StreamContent(new MemoryStream(Encoding.UTF8.GetBytes("data=" + HttpUtility.UrlEncode(json)))); + + using HttpClient client = new(); + client.DefaultRequestHeaders.Accept.Add(new MediaTypeWithQualityHeaderValue("text/plain")); + query.Headers.ContentType = new MediaTypeHeaderValue("application/json"); + var res = await client.PostAsync(MIXPANEL_SERVER + "/track?ip=1", query).ConfigureAwait(false); + res.EnsureSuccessStatusCode(); + } + catch (Exception ex) when (!ex.IsFatal()) + { + SpeckleLog.Logger + .ForContext("eventName", eventName.ToString()) + .ForContext("isAction", isAction) + .Warning(ex, "Analytics event failed {exceptionMessage}", ex.Message); + } + }); + } + + internal static void AddConnectorToProfile(string hashedEmail, string connector) + { + Task.Run(async () => + { + try + { + var data = new Dictionary + { + { "$token", MIXPANEL_TOKEN }, + { "$distinct_id", hashedEmail }, + { + "$union", + new Dictionary + { + { + "Connectors", + new List { connector } + } + } + } + }; + string json = JsonConvert.SerializeObject(data); + + var query = new StreamContent(new MemoryStream(Encoding.UTF8.GetBytes("data=" + HttpUtility.UrlEncode(json)))); + using HttpClient client = Http.GetHttpProxyClient(); + client.DefaultRequestHeaders.Accept.Add(new MediaTypeWithQualityHeaderValue("text/plain")); + query.Headers.ContentType = new MediaTypeHeaderValue("application/json"); + var res = await client.PostAsync(MIXPANEL_SERVER + "/engage#profile-union", query).ConfigureAwait(false); + res.EnsureSuccessStatusCode(); + } + catch (Exception ex) when (!ex.IsFatal()) + { + SpeckleLog.Logger.ForContext("connector", connector).Warning(ex, "Failed add connector to profile"); + } + }); + } + + internal static void IdentifyProfile(string hashedEmail, string connector) + { + Task.Run(async () => + { + try + { + var data = new Dictionary + { + { "$token", MIXPANEL_TOKEN }, + { "$distinct_id", hashedEmail }, + { + "$set", + new Dictionary { { "Identified", true } } + } + }; + string json = JsonConvert.SerializeObject(data); + + var query = new StreamContent(new MemoryStream(Encoding.UTF8.GetBytes("data=" + HttpUtility.UrlEncode(json)))); + using HttpClient client = Http.GetHttpProxyClient(); + client.DefaultRequestHeaders.Accept.Add(new MediaTypeWithQualityHeaderValue("text/plain")); + query.Headers.ContentType = new MediaTypeHeaderValue("application/json"); + var res = await client.PostAsync(MIXPANEL_SERVER + "/engage#profile-set", query).ConfigureAwait(false); + res.EnsureSuccessStatusCode(); + } + catch (Exception ex) when (!ex.IsFatal()) + { + SpeckleLog.Logger.ForContext("connector", connector).Warning(ex, "Failed identify profile"); + } + }); + } + + private static string GetOs() + { + if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) + { + return "Windows"; + } + + if (RuntimeInformation.IsOSPlatform(OSPlatform.OSX)) + { + return "Mac OS X"; + } + + if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux)) + { + return "Linux"; + } + + return "Unknown"; + } +} diff --git a/src/Speckle.Core/Logging/CumulativeTimer.cs b/src/Speckle.Core/Logging/CumulativeTimer.cs new file mode 100644 index 00000000..a5286635 --- /dev/null +++ b/src/Speckle.Core/Logging/CumulativeTimer.cs @@ -0,0 +1,85 @@ +using System; +using System.Collections.Generic; +using System.Diagnostics; + +namespace Speckle.Core.Logging; + +public class CumulativeTimer +{ + internal class Operation : IDisposable + { + private static readonly double s_stopwatchToTimeSpanTicks = Stopwatch.Frequency / 10000000.0; + + private readonly CumulativeTimer _cumulativeTimer; + private readonly string _operationName; + private readonly long _start; + private long? _stop; + + internal Operation(CumulativeTimer cumulativeTimer, string operationName) + { + _cumulativeTimer = cumulativeTimer; + _operationName = operationName; + _start = GetTimestamp(); + } + + public TimeSpan Elapsed + { + get + { + long num = (_stop ?? GetTimestamp()) - _start; + if (num < 0) + { + return TimeSpan.Zero; + } + + return TimeSpan.FromTicks(num); + } + } + + private static long GetTimestamp() + { + return (long)(Stopwatch.GetTimestamp() / s_stopwatchToTimeSpanTicks); + } + + private void StopTiming() + { + if (!_stop.HasValue) + { + _stop = GetTimestamp(); + } + } + + public void Dispose() + { + StopTiming(); + _cumulativeTimer.AddTimer(_operationName, Elapsed); + } + } + + private readonly Dictionary _operationTimings = new(); + + public IDisposable Begin(string operationNameTemplate, params object[] args) + { + return new Operation(this, string.Format(operationNameTemplate, args)); + } + + public void AddTimer(string operationName, TimeSpan time) + { + if (_operationTimings.TryGetValue(operationName, out TimeSpan prevTimings)) + { + _operationTimings[operationName] = prevTimings + time; + } + else + { + _operationTimings.Add(operationName, time); + } + } + + public void EnrichSerilogOperation(SerilogTimings.Operation operation) + { + foreach (var timing in _operationTimings) + { + operation.EnrichWith(timing.Key, timing.Value.TotalMilliseconds); + } + } +} diff --git a/src/Speckle.Core/Logging/ExceptionHelpers.cs b/src/Speckle.Core/Logging/ExceptionHelpers.cs new file mode 100644 index 00000000..9af1c0d9 --- /dev/null +++ b/src/Speckle.Core/Logging/ExceptionHelpers.cs @@ -0,0 +1,47 @@ +using System; +using System.Diagnostics.Contracts; +using System.Threading; + +namespace Speckle.Core.Logging; + +public static class ExceptionHelpers +{ + /// + /// Helper function for catch blocks to avoid catching and handling/wrapping of some critical exception types that are unlikely to be truly handleable + /// + /// + /// We should aim to always catch specific exception types, and have all functions document the types they may throw. + /// However, this is not always achievable. + /// e.g. when dealing with legacy code, some third-party APIs, or in cases where we want to prevent a host app crash. + /// In these cases, we often want to catch all exceptions, and opt out only of the ones that definitely shouldn't be handled + /// + /// + /// + /// try + /// { + /// SomethingSketchy(); + /// } + /// catch (Exception ex) when (!IsFatal(ex)) + /// { + /// throw new SpeckleException("Failed to do something", ex); + /// } + /// + /// + /// + /// for types that are unlikely to ever be recoverable + [Pure] + public static bool IsFatal(this Exception ex) + { + return ex switch + { + OutOfMemoryException + or ThreadAbortException + or InvalidProgramException + or AccessViolationException + or AppDomainUnloadedException + or BadImageFormatException + => true, + _ => false, + }; + } +} diff --git a/src/Speckle.Core/Logging/LoggingHelpers.cs b/src/Speckle.Core/Logging/LoggingHelpers.cs new file mode 100644 index 00000000..52596e6b --- /dev/null +++ b/src/Speckle.Core/Logging/LoggingHelpers.cs @@ -0,0 +1,16 @@ +using System; +using System.Diagnostics; + +namespace Speckle.Core.Logging; + +internal static class LoggingHelpers +{ + private const long TICKS_PER_MILLISECOND = 10000; + private const long TICKS_PER_SECOND = TICKS_PER_MILLISECOND * 1000; + private static readonly double s_sTickFrequency = (double)TICKS_PER_SECOND / Stopwatch.Frequency; + + public static TimeSpan GetElapsedTime(long startingTimestamp, long endingTimestamp) + { + return new TimeSpan((long)((endingTimestamp - startingTimestamp) * s_sTickFrequency)); + } +} diff --git a/src/Speckle.Core/Logging/Setup.cs b/src/Speckle.Core/Logging/Setup.cs new file mode 100644 index 00000000..3c409896 --- /dev/null +++ b/src/Speckle.Core/Logging/Setup.cs @@ -0,0 +1,92 @@ +#nullable disable +using System; +using System.Diagnostics; +using System.Diagnostics.CodeAnalysis; +using System.Threading; +using Speckle.Core.Credentials; +using Speckle.Core.Kits; + +namespace Speckle.Core.Logging; + +/// +/// Anonymous telemetry to help us understand how to make a better Speckle. +/// This really helps us to deliver a better open source project and product! +/// +[SuppressMessage( + "Naming", + "CA1708:Identifiers should differ by more than case", + Justification = "Class contains obsolete members that are kept for backwards compatiblity" +)] +public static class Setup +{ + public static Mutex Mutex { get; set; } + + private static bool s_initialized; + + static Setup() + { + //Set fallback values + try + { + HostApplication = Process.GetCurrentProcess().ProcessName; + } + catch (InvalidOperationException) + { + HostApplication = "other (.NET)"; + } + } + + /// + /// Set from the connectors, defines which current host application we're running on. + /// + internal static string HostApplication { get; private set; } + + /// + /// Set from the connectors, defines which current host application we're running on - includes the version. + /// + internal static string VersionedHostApplication { get; private set; } = HostApplications.Other.Slug; + + public static void Init( + string versionedHostApplication, + string hostApplication, + SpeckleLogConfiguration logConfiguration = null + ) + { + if (s_initialized) + { + SpeckleLog.Logger + .ForContext("newVersionedHostApplication", versionedHostApplication) + .ForContext("newHostApplication", hostApplication) + .Information( + "Setup was already initialized with {currentHostApp} {currentVersionedHostApp}", + hostApplication, + versionedHostApplication + ); + return; + } + + s_initialized = true; + + HostApplication = hostApplication; + VersionedHostApplication = versionedHostApplication; + + //start mutex so that Manager can detect if this process is running + Mutex = new Mutex(false, "SpeckleConnector-" + hostApplication); + + SpeckleLog.Initialize(hostApplication, versionedHostApplication, logConfiguration); + + foreach (var account in AccountManager.GetAccounts()) + { + Analytics.AddConnectorToProfile(account.GetHashedEmail(), hostApplication); + Analytics.IdentifyProfile(account.GetHashedEmail(), hostApplication); + } + } + + [Obsolete("Use " + nameof(Mutex))] + [SuppressMessage("Style", "IDE1006:Naming Styles")] + public static Mutex mutex + { + get => Mutex; + set => Mutex = value; + } +} diff --git a/src/Speckle.Core/Logging/SpeckleException.cs b/src/Speckle.Core/Logging/SpeckleException.cs new file mode 100644 index 00000000..397b8b16 --- /dev/null +++ b/src/Speckle.Core/Logging/SpeckleException.cs @@ -0,0 +1,38 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using GraphQL; +using Sentry; + +namespace Speckle.Core.Logging; + +public class SpeckleException : Exception +{ + public SpeckleException() { } + + public SpeckleException(string? message) + : base(message) { } + + public SpeckleException(string? message, Exception? inner = null) + : base(message, inner) { } + + #region obsolete + [Obsolete("Use any other constructor", true)] + public SpeckleException(string? message, Exception? inner, bool log = true, SentryLevel level = SentryLevel.Info) + : base(message, inner) { } + + [Obsolete("Use any other constructor")] + public SpeckleException(string? message, GraphQLError[] errors, bool log = true, SentryLevel level = SentryLevel.Info) + : base(message) + { + GraphQLErrors = errors.Select(error => new KeyValuePair("error", error.Message)).ToList(); + } + + [Obsolete("Use any other constructor", true)] + public SpeckleException(string message, bool log, SentryLevel level = SentryLevel.Info) + : base(message) { } + + [Obsolete("Use any other constructor", true)] + public List> GraphQLErrors { get; set; } + #endregion +} diff --git a/src/Speckle.Core/Logging/SpeckleLog.cs b/src/Speckle.Core/Logging/SpeckleLog.cs new file mode 100644 index 00000000..eb8a3218 --- /dev/null +++ b/src/Speckle.Core/Logging/SpeckleLog.cs @@ -0,0 +1,331 @@ +using System; +using System.Diagnostics; +using System.IO; +using System.Reflection; +using System.Runtime.InteropServices; +using Sentry; +using Serilog; +using Serilog.Core; +using Serilog.Events; +using Serilog.Exceptions; +using Speckle.Core.Credentials; +using Speckle.Core.Helpers; + +namespace Speckle.Core.Logging; + +/// +/// Configuration object for the Speckle logging system. +/// +public sealed class SpeckleLogConfiguration +{ + /// + /// Flag to enable enhanced log context. This adds the following enrich calls: + /// - WithClientAgent + /// - WithClientIp + /// - WithExceptionDetails + /// + public bool EnhancedLogContext { get; } + + /// + /// Flag to enable console sink + /// + public bool LogToConsole { get; } + + /// + /// Flag to enable File sink + /// + public bool LogToFile { get; } + + /// + /// Flag to enable Sentry sink + /// + public bool LogToSentry { get; } + + /// + /// Flag to enable Seq sink + /// + public bool LogToSeq { get; } + + /// + /// Log events bellow this level are silently dropped + /// + public LogEventLevel MinimumLevel { get; } + + /// + /// Flag to override the default Sentry DNS + /// + public string SentryDns { get; } + + private const string DEFAULT_SENTRY_DNS = "https://f29ec716d14d4121bb2a71c4f3ef7786@o436188.ingest.sentry.io/5396846"; + + /// + /// Default SpeckleLogConfiguration constructor. + /// These are the sane defaults we should be using across connectors. + /// + /// Log events bellow this level are silently dropped + /// Flag to enable console log sink + /// Flag to enable Seq log sink + /// Flag to enable Sentry log sink + /// Flag to enable File log sink + /// Flag to enable enhanced context on every log event + public SpeckleLogConfiguration( + LogEventLevel minimumLevel = LogEventLevel.Debug, + bool logToConsole = true, + bool logToSeq = true, + bool logToSentry = true, + bool logToFile = true, + bool enhancedLogContext = true, + string sentryDns = DEFAULT_SENTRY_DNS + ) + { + MinimumLevel = minimumLevel; + LogToConsole = logToConsole; + LogToSeq = logToSeq; + LogToSentry = logToSentry; + LogToFile = logToFile; + EnhancedLogContext = enhancedLogContext; + SentryDns = sentryDns; + } +} + +/// +/// Configurator class for a standardized logging system across Speckle (sharp). +/// +public static class SpeckleLog +{ + private static ILogger? s_logger; + + public static ILogger Logger + { + get + { + if (s_logger == null) + { + Initialize("Speckle.Core", "unknown"); + } + + return s_logger!; + } + } + + private static bool s_initialized; + + private static bool s_isMachineIdUsed; + + private static string s_logFolderPath; + + /// + /// Initialize logger configuration for a global Serilog.Log logger. + /// + public static void Initialize( + string hostApplicationName, + string? hostApplicationVersion, + SpeckleLogConfiguration? logConfiguration = null + ) + { + if (s_initialized) + { + Logger + .ForContext("hostApplicationVersion", hostApplicationVersion) + .ForContext("hostApplicationName", hostApplicationName) + .Information("Setup was already initialized"); + return; + } + + logConfiguration ??= new SpeckleLogConfiguration(); + + s_logger = CreateConfiguredLogger(hostApplicationName, hostApplicationVersion, logConfiguration); + var id = GetUserIdFromDefaultAccount(); + s_logger = s_logger.ForContext("id", id).ForContext("isMachineId", s_isMachineIdUsed); + + // Configure scope after logger created. + SentrySdk.ConfigureScope(scope => + { + scope.User = new User { Id = id }; + }); + + SentrySdk.ConfigureScope(scope => + { + scope.SetTag("hostApplication", hostApplicationName); + }); + + Logger + .ForContext("userApplicationDataPath", SpecklePathProvider.UserApplicationDataPath()) + .ForContext("installApplicationDataPath", SpecklePathProvider.InstallApplicationDataPath) + .ForContext("speckleLogConfiguration", logConfiguration) + .Information( + "Initialized logger inside {hostApplication}/{productVersion}/{version} for user {id}. Path info {userApplicationDataPath} {installApplicationDataPath}." + ); + + s_initialized = true; + } + + /// + /// Create a new fully configured Logger instance. + /// + /// Name of the application using this SDK ie.: "Rhino" + /// Public version slug of the application using this SDK ie.: "2023" + /// Input configuration object. + /// Logger instance + public static Logger CreateConfiguredLogger( + string hostApplicationName, + string? hostApplicationVersion, + SpeckleLogConfiguration logConfiguration + ) + { + // TODO: check if we have write permissions to the file. + // if not, disable file sink, even if its enabled in the config + // show a warning about that... + var canLogToFile = true; + s_logFolderPath = SpecklePathProvider.LogFolderPath(hostApplicationName, hostApplicationVersion); + var logFilePath = Path.Combine(s_logFolderPath, "SpeckleCoreLog.txt"); + + var fileVersionInfo = GetFileVersionInfo(); + var serilogLogConfiguration = new LoggerConfiguration().MinimumLevel + .Is(logConfiguration.MinimumLevel) + .Enrich.FromLogContext() + .Enrich.WithProperty("version", fileVersionInfo.FileVersion) + .Enrich.WithProperty("productVersion", fileVersionInfo.ProductVersion) + .Enrich.WithProperty("hostOs", DetermineHostOsSlug()) + .Enrich.WithProperty("hostOsVersion", Environment.OSVersion) + .Enrich.WithProperty("hostOsArchitecture", RuntimeInformation.ProcessArchitecture.ToString()) + .Enrich.WithProperty("runtime", RuntimeInformation.FrameworkDescription) + .Enrich.WithProperty("hostApplication", $"{hostApplicationName}{hostApplicationVersion ?? ""}"); + + if (logConfiguration.EnhancedLogContext) + { + serilogLogConfiguration = serilogLogConfiguration.Enrich + .WithClientAgent() + .Enrich.WithClientIp() + .Enrich.WithExceptionDetails(); + } + + if (logConfiguration.LogToFile && canLogToFile) + { + serilogLogConfiguration = serilogLogConfiguration.WriteTo.File( + logFilePath, + rollingInterval: RollingInterval.Day, + retainedFileCountLimit: 10 + ); + } + + if (logConfiguration.LogToConsole) + { + serilogLogConfiguration = serilogLogConfiguration.WriteTo.Console(); + } + + if (logConfiguration.LogToSeq) + { + serilogLogConfiguration = serilogLogConfiguration.WriteTo.Seq( + "https://seq.speckle.systems", + apiKey: "agZqxG4jQELxQQXh0iZQ" + ); + } + + if (logConfiguration.LogToSentry) + { + const string ENV = +#if DEBUG + "dev"; +#else + "production"; +#endif + + serilogLogConfiguration = serilogLogConfiguration.WriteTo.Sentry(o => + { + o.Dsn = logConfiguration.SentryDns; + o.Debug = false; + o.Environment = ENV; + o.Release = "SpeckleCore@" + Assembly.GetExecutingAssembly().GetName().Version; + o.AttachStacktrace = true; + o.StackTraceMode = StackTraceMode.Enhanced; + // Set traces_sample_rate to 1.0 to capture 100% of transactions for performance monitoring. + // We recommend adjusting this value in production. + o.TracesSampleRate = 1.0; + // Enable Global Mode if running in a client app + o.IsGlobalModeEnabled = true; + // Debug and higher are stored as breadcrumbs (default is Information) + o.MinimumBreadcrumbLevel = LogEventLevel.Debug; + // Warning and higher is sent as event (default is Error) + o.MinimumEventLevel = LogEventLevel.Error; + }); + } + + var logger = serilogLogConfiguration.CreateLogger(); + + if (logConfiguration.LogToFile && !canLogToFile) + { + logger.Warning("Log to file is enabled, but cannot write to {LogFilePath}", logFilePath); + } + + if (s_isMachineIdUsed) + { + logger.Warning("Cannot set user id for the global log context."); + } + + return logger; + } + + public static void OpenCurrentLogFolder() + { + try + { + Process.Start(s_logFolderPath); + } + catch (FileNotFoundException ex) + { + Logger.Error(ex, "Unable to open log file folder at the following path, {path}", s_logFolderPath); + } + } + + private static string GetUserIdFromDefaultAccount() + { + var machineName = Environment.MachineName; + var userName = Environment.UserName; + var id = Crypt.Md5($"{machineName}:{userName}", "X2"); + try + { + var defaultAccount = AccountManager.GetDefaultAccount(); + if (defaultAccount != null) + { + id = defaultAccount.GetHashedEmail(); + } + else + { + s_isMachineIdUsed = true; + } + } + catch (Exception ex) when (!ex.IsFatal()) + { + // To log it after Logger initialized as deferred action. + s_isMachineIdUsed = true; + } + return id; + } + + private static FileVersionInfo GetFileVersionInfo() + { + var assembly = Assembly.GetExecutingAssembly().Location; + return FileVersionInfo.GetVersionInfo(assembly); + } + + private static string DetermineHostOsSlug() + { + if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) + { + return "Windows"; + } + + if (RuntimeInformation.IsOSPlatform(OSPlatform.OSX)) + { + return "MacOS"; + } + + if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux)) + { + return "Linux"; + } + + return RuntimeInformation.OSDescription; + } +} diff --git a/src/Speckle.Core/Logging/SpeckleNonUserFacingException.cs b/src/Speckle.Core/Logging/SpeckleNonUserFacingException.cs new file mode 100644 index 00000000..0c2849a6 --- /dev/null +++ b/src/Speckle.Core/Logging/SpeckleNonUserFacingException.cs @@ -0,0 +1,17 @@ +using System; + +namespace Speckle.Core.Logging; + +/// +/// These are exceptions who's message is not user friendly +/// +public class SpeckleNonUserFacingException : SpeckleException +{ + public SpeckleNonUserFacingException() { } + + public SpeckleNonUserFacingException(string? message) + : base(message) { } + + public SpeckleNonUserFacingException(string? message, Exception? innerException) + : base(message, innerException) { } +} diff --git a/src/Speckle.Core/Models/ApplicationObject.cs b/src/Speckle.Core/Models/ApplicationObject.cs new file mode 100644 index 00000000..e58550b8 --- /dev/null +++ b/src/Speckle.Core/Models/ApplicationObject.cs @@ -0,0 +1,140 @@ +#nullable disable +using System.Collections.Generic; +using System.Linq; +using Speckle.Newtonsoft.Json; + +namespace Speckle.Core.Models; + +/// +/// A simple wrapper to keep track of the relationship between speckle objects and their host-application siblings in cases where the +/// cannot correspond with the (ie, on receiving operations). +/// +public class ApplicationObject +{ + public enum State + { + Unknown = default, + Created, // Speckle object created on send, or native objects created on receive + Skipped, // Speckle or Application object is not going to be sent or received + Updated, // Application object is replacing an existing object in the application + Failed, // Tried to convert & send or convert & bake but something went wrong + Removed, //Removed object from application + } + + public ApplicationObject(string id, string type) + { + OriginalId = id; + Descriptor = type; + Status = State.Unknown; + } + + /// + /// ID of the object from host application that generated it. + /// + public string applicationId { get; set; } + + /// + /// The container for the object in the native application + /// + public string Container { get; set; } + + /// + /// Indicates if conversion is supported by the converter + /// + public bool Convertible { get; set; } + + /// + /// The fallback values if direct conversion is not available, typically displayValue + /// + [JsonIgnore] + public List Fallback { get; set; } = new(); + + /// + /// The Speckle id (on receive) or native id (on send) + /// + /// + /// Used to retrieve this object in ProgressReport.GetReportObject(), typically to pass between connectors and converters + /// + public string OriginalId { get; set; } + + /// + /// A descriptive string to describe the object. Use the object type as default. + /// + public string Descriptor { get; set; } + + /// + /// The created object ids associated with this object + /// + /// + /// On send, this is currently left empty as generating Speckle ids would be performance expensive + /// + public List CreatedIds { get; set; } = new(); + + /// + /// Conversion status of object + /// + public State Status { get; set; } + + /// + /// Conversion notes or other important information to expose to the user + /// + public List Log { get; set; } = new(); + + /// + /// Converted objects corresponding to this object + /// + /// + /// Used during receive for convenience, corresponds to CreatedIds + /// + [JsonIgnore] + public List Converted { get; set; } = new(); + + public void Update( + string createdId = null, + List createdIds = null, + State? status = null, + string container = null, + List log = null, + string logItem = null, + List converted = null, + object convertedItem = null, + string descriptor = null + ) + { + createdIds?.Where(o => !string.IsNullOrEmpty(o) && !CreatedIds.Contains(o))?.ToList().ForEach(CreatedIds.Add); + + if (createdId != null && !CreatedIds.Contains(createdId)) + { + CreatedIds.Add(createdId); + } + + if (status.HasValue) + { + Status = status.Value; + } + + log?.Where(o => !string.IsNullOrEmpty(o) && !Log.Contains(o))?.ToList().ForEach(Log.Add); + + if (!string.IsNullOrEmpty(logItem) && !Log.Contains(logItem)) + { + Log.Add(logItem); + } + + if (convertedItem != null && !Converted.Contains(convertedItem)) + { + Converted.Add(convertedItem); + } + + converted?.Where(o => o != null && !Converted.Contains(o))?.ToList().ForEach(Converted.Add); + + if (!string.IsNullOrEmpty(container)) + { + Container = container; + } + + if (!string.IsNullOrEmpty(descriptor)) + { + Descriptor = descriptor; + } + } +} diff --git a/src/Speckle.Core/Models/Attributes.cs b/src/Speckle.Core/Models/Attributes.cs new file mode 100644 index 00000000..b7721b6b --- /dev/null +++ b/src/Speckle.Core/Models/Attributes.cs @@ -0,0 +1,41 @@ +using System; + +namespace Speckle.Core.Models; + +/// +/// Flags an object's property as being detachable. +/// If set to true the default serialiser will persist it separately, and add a reference to the property's value in the original object. +/// Only applies to properties of types derived from the Base class. +/// +[AttributeUsage(AttributeTargets.Property)] +public sealed class DetachProperty : Attribute +{ + /// + /// Flags an object's property as being detachable. + /// If set to true the default serialiser will persist it separately, and add a reference to the property's value in the original object. + /// Only applies to properties of types derived from the Base class. + /// + /// Whether to detach the property or not. + public DetachProperty(bool detachable = true) + { + Detachable = detachable; + } + + public bool Detachable { get; } +} + +/// +/// Flags a list or array as splittable into chunks during serialisation. These chunks will be recomposed on deserialisation into the original list. Note: this attribute should be used in conjunction with . +/// Use this attribute on properties that can become very long and are not worth detaching into individual elements. +/// Objects per chunk: for simple types, like numbers, use a high value (>10000); for other objects, use a more conservative number depending on their serialised size. +/// +[AttributeUsage(AttributeTargets.Property)] +public sealed class Chunkable : Attribute +{ + public Chunkable(int maxObjCountPerChunk = 1000) + { + MaxObjCountPerChunk = maxObjCountPerChunk; + } + + public int MaxObjCountPerChunk { get; } +} diff --git a/src/Speckle.Core/Models/Base.cs b/src/Speckle.Core/Models/Base.cs new file mode 100644 index 00000000..b7e6eeac --- /dev/null +++ b/src/Speckle.Core/Models/Base.cs @@ -0,0 +1,308 @@ +#nullable disable +using System; +using System.Collections; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Linq; +using System.Reflection; +using System.Text.RegularExpressions; +using Speckle.Core.Api; +using Speckle.Core.Helpers; +using Speckle.Core.Kits; +using Speckle.Core.Logging; +using Speckle.Core.Serialisation; +using Speckle.Core.Transports; +using Speckle.Newtonsoft.Json; +using Speckle.Newtonsoft.Json.Linq; + +namespace Speckle.Core.Models; + +/// +/// Base class for all Speckle object definitions. Provides unified hashing, type extraction and serialisation. +/// When developing a speckle kit, use this class as a parent class. +/// Dynamic properties naming conventions: +/// 👉 "__" at the start of a property means it will be ignored, both for hashing and serialisation (e.g., "__ignoreMe"). +/// 👉 "@" at the start of a property name means it will be detached (when serialised with a transport) (e.g.((dynamic)obj)["@meshEquivalent"] = ...) . +/// +[Serializable] +[SuppressMessage("ReSharper", "InconsistentNaming")] +[SuppressMessage("Style", "IDE1006:Naming Styles", Justification = "Serialized property names are camelCase by design")] +public class Base : DynamicBase +{ + private static readonly Regex s_chunkSyntax = Constants.ChunkPropertyNameRegex; + + private string _type; + + /// + /// A speckle object's id is an unique hash based on its properties. NOTE: this field will be null unless the object was deserialised from a source. Use the function to get it. + /// + [SchemaIgnore] + public virtual string id { get; set; } + +#nullable enable //Starting nullability syntax here so that `id` null oblivious, + + /// + /// This property will only be populated if the object is retreieved from storage. Use otherwise. + /// + [SchemaIgnore] + public virtual long totalChildrenCount { get; set; } + + /// + /// Secondary, ideally host application driven, object identifier. + /// + [SchemaIgnore] + public string? applicationId { get; set; } + + /// + /// Holds the type information of this speckle object, derived automatically + /// from its assembly name and inheritance. + /// + [SchemaIgnore] + public virtual string speckle_type + { + get + { + if (_type == null) + { + List bases = new(); + Type myType = GetType(); + + while (myType.Name != nameof(Base)) + { + if (!myType.IsAbstract) + { + bases.Add(myType.FullName); + } + + myType = myType.BaseType!; + } + + if (bases.Count == 0) + { + _type = nameof(Base); + } + else + { + bases.Reverse(); + _type = string.Join(":", bases); + } + } + + return _type; + } + } + + /// + /// Calculates the id (a unique hash) of this object. + /// + /// + /// This method fully serialize the object and any referenced objects. This has a tangible cost and should be avoided.
+ /// Objects retrieved from a already have a property populated
+ /// The hash of a decomposed object differs from the hash of a non-decomposed object. + ///
+ /// If , will decompose the object in the process of hashing. + /// the resulting id (hash) + public string GetId(bool decompose = false) + { + var transports = decompose ? new[] { new MemoryTransport() } : Array.Empty(); + var serializer = new BaseObjectSerializerV2(transports); + + string obj = serializer.Serialize(this); + return JObject.Parse(obj).GetValue(nameof(id))!.ToString(); + } + + /// + /// Attempts to count the total number of detachable objects. + /// + /// The total count of the detachable children + 1 (itself). + public long GetTotalChildrenCount() + { + var parsed = new HashSet(); + return 1 + CountDescendants(this, parsed); + } + + private static long CountDescendants(Base @base, ISet parsed) + { + if (parsed.Contains(@base.GetHashCode())) + { + return 0; + } + + parsed.Add(@base.GetHashCode()); + + long count = 0; + var typedProps = @base.GetInstanceMembers(); + foreach (var prop in typedProps.Where(p => p.CanRead)) + { + bool isIgnored = + prop.IsDefined(typeof(ObsoleteAttribute), true) || prop.IsDefined(typeof(JsonIgnoreAttribute), true); + if (isIgnored) + { + continue; + } + + var detachAttribute = prop.GetCustomAttribute(true); + + object value = prop.GetValue(@base); + + if (detachAttribute is { Detachable: true }) + { + var chunkAttribute = prop.GetCustomAttribute(true); + if (chunkAttribute == null) + { + count += HandleObjectCount(value, parsed); + } + else + { + // Simplified chunking count handling. + if (value is IList asList) + { + count += asList.Count / chunkAttribute.MaxObjCountPerChunk; + } + } + } + } + + var dynamicProps = @base.GetDynamicMembers(); + foreach (var propName in dynamicProps) + { + if (!propName.StartsWith("@")) + { + continue; + } + + // Simplfied dynamic prop chunking handling + if (s_chunkSyntax.IsMatch(propName)) + { + var match = s_chunkSyntax.Match(propName); + _ = int.TryParse(match.Groups[match.Groups.Count - 1].Value, out int chunkSize); + + if (chunkSize != -1 && @base[propName] is IList asList) + { + count += asList.Count / chunkSize; + continue; + } + } + + count += HandleObjectCount(@base[propName], parsed); + } + + return count; + } + + private static long HandleObjectCount(object? value, ISet parsed) + { + long count = 0; + switch (value) + { + case Base b: + count++; + count += CountDescendants(b, parsed); + return count; + case IDictionary d: + { + foreach (DictionaryEntry kvp in d) + { + if (kvp.Value is Base b) + { + count++; + count += CountDescendants(b, parsed); + } + else + { + count += HandleObjectCount(kvp.Value, parsed); + } + } + + return count; + } + case IEnumerable e + and not string: + { + foreach (var arrValue in e) + { + if (arrValue is Base b) + { + count++; + count += CountDescendants(b, parsed); + } + else + { + count += HandleObjectCount(arrValue, parsed); + } + } + + return count; + } + default: + return count; + } + } + + /// + /// Creates a shallow copy of the current base object. + /// This operation does NOT copy/duplicate the data inside each prop. + /// The new object's property values will be pointers to the original object's property value. + /// + /// A shallow copy of the original object. + public Base ShallowCopy() + { + Type type = GetType(); + Base myDuplicate = (Base)Activator.CreateInstance(type); + myDuplicate.id = id; + myDuplicate.applicationId = applicationId; + + foreach ( + var kvp in GetMembers( + DynamicBaseMemberType.Instance | DynamicBaseMemberType.Dynamic | DynamicBaseMemberType.SchemaIgnored + ) + ) + { + var propertyInfo = type.GetProperty(kvp.Key); + if (propertyInfo is not null && !propertyInfo.CanWrite) + { + continue; + } + + try + { + myDuplicate[kvp.Key] = kvp.Value; + } + catch (Exception ex) when (!ex.IsFatal()) + { + // avoids any last ditch unsettable or strange props. + SpeckleLog.Logger + .ForContext("canWrite", propertyInfo?.CanWrite) + .ForContext("canRead", propertyInfo?.CanRead) + .Warning( + "Shallow copy of {type} failed to copy {propertyName} of type {propertyType} with value {valueType}", + type, + kvp.Key, + propertyInfo?.PropertyType, + kvp.Value?.GetType() + ); + } + } + + return myDuplicate; + } + + #region Obsolete + /// + [Obsolete("Serializer v1 is deprecated, use other overload(s)", true)] + public string GetId(SerializerVersion serializerVersion) + { + return GetId(false, serializerVersion); + } + + /// + [Obsolete("Serializer v1 is deprecated, use other overload(s)", true)] + public string GetId(bool decompose, SerializerVersion serializerVersion) + { + throw new NotImplementedException( + "Overload has been deprecated along with SerializerV1, use other overload (uses SerializerV2)" + ); + } + + #endregion +} diff --git a/src/Speckle.Core/Models/Blob.cs b/src/Speckle.Core/Models/Blob.cs new file mode 100644 index 00000000..0310824d --- /dev/null +++ b/src/Speckle.Core/Models/Blob.cs @@ -0,0 +1,74 @@ +#nullable disable +using System; +using System.Diagnostics.CodeAnalysis; +using System.IO; +using System.Runtime.Serialization; +using Speckle.Newtonsoft.Json; + +namespace Speckle.Core.Models; + +public class Blob : Base +{ + [JsonIgnore] + public static int LocalHashPrefixLength => 20; + + private string _filePath; + private string _hash; + private bool _isHashExpired = true; + + public Blob() { } + + public Blob(string filePath) + { + this.filePath = filePath; + } + + public string filePath + { + get => _filePath; + set + { + originalPath ??= value; + + _filePath = value; + _isHashExpired = true; + } + } + + public string originalPath { get; set; } + + /// + /// For blobs, the id is the same as the file hash. Please note, when deserialising, the id will be set from the original hash generated on sending. + /// + public override string id + { + get => GetFileHash(); + set => base.id = value; + } + + public string GetFileHash() + { + if ((_isHashExpired || _hash == null) && filePath != null) + { + _hash = Utilities.HashFile(filePath); + } + + return _hash; + } + + [OnDeserialized] + internal void OnDeserialized(StreamingContext context) + { + _isHashExpired = false; + } + + public string GetLocalDestinationPath(string blobStorageFolder) + { + var fileName = Path.GetFileName(filePath); + return Path.Combine(blobStorageFolder, $"{id.Substring(0, 10)}-{fileName}"); + } + + [Obsolete("Renamed to " + nameof(GetLocalDestinationPath))] + [SuppressMessage("Style", "IDE1006:Naming Styles", Justification = "Obsolete")] + public string getLocalDestinationPath(string blobStorageFolder) => GetLocalDestinationPath(blobStorageFolder); +} diff --git a/src/Speckle.Core/Models/Collection.cs b/src/Speckle.Core/Models/Collection.cs new file mode 100644 index 00000000..c5d45436 --- /dev/null +++ b/src/Speckle.Core/Models/Collection.cs @@ -0,0 +1,48 @@ +using System.Collections.Generic; + +namespace Speckle.Core.Models; + +/// +/// A simple container for organising objects within a model and preserving object hierarchy. +/// A container is defined by a human-readable , a unique , and its list of contained . +/// The can include an unrestricted number of objects including additional nested s. +/// +/// +/// A can be for example a Layer in Rhino/AutoCad, a collection in Blender, or a Category in Revit. +/// The location of each collection in the hierarchy of collections in a commit will be retrieved through commit traversal. +/// +public class Collection : Base +{ + public Collection() { } + + /// + /// Constructor for a basic collection. + /// + /// The human-readable name of this collection + /// + public Collection(string name, string collectionType) + { + this.name = name; + this.collectionType = collectionType; + } + + /// + /// The human-readable name of the . + /// + /// This name is not necessarily unique within a commit. Set the applicationId for a unique identifier. + public string name { get; set; } + + /// + /// The type of this collection + /// + public string collectionType { get; set; } + + /// + /// The elements contained in this . + /// + /// + /// This can include additional nested s. + /// + [DetachProperty] + public List elements { get; set; } = new(); +} diff --git a/src/Speckle.Core/Models/CommitObjectBuilder.cs b/src/Speckle.Core/Models/CommitObjectBuilder.cs new file mode 100644 index 00000000..3bdda977 --- /dev/null +++ b/src/Speckle.Core/Models/CommitObjectBuilder.cs @@ -0,0 +1,185 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using Speckle.Core.Logging; +using Speckle.Core.Models.Extensions; + +namespace Speckle.Core.Models; + +/// +/// Abstract Builder class for a root commit object. +/// +/// The native object data type needed as input for building +/// +/// It is designed to be inherited by a host app specific implementation, +/// to give connectors flexibility in constructing their objects. +/// Inheritors should also create some function to add +/// +[SuppressMessage( + "Naming", + "CA1708:Identifiers should differ by more than case", + Justification = "Class contains obsolete members that are kept for backwards compatiblity" +)] +public abstract class CommitObjectBuilder +{ + /// Special appId symbol for the root object + protected const string ROOT = "__Root"; + private const string ELEMENTS = nameof(Collection.elements); + + /// app id -> base + protected IDictionary Converted { get; } + + /// Base -> NestingInstructions ordered by priority + private readonly Dictionary> _nestingInstructions = new(); + + protected CommitObjectBuilder() + { + Converted = new Dictionary(); + } + + /// + /// Given the parameters, builds connector specific + /// to be applied when is called. + /// + /// + /// + public abstract void IncludeObject(Base conversionResult, TNativeObjectData nativeElement); + + /// + /// Iterates through the converted objects applying + /// + /// + /// Can be overriden to adjust exactly which objects get automatically applied, + /// or to inject additional items into the dict that should not be automatically applied. + /// + /// + public virtual void BuildCommitObject(Base rootCommitObject) + { + ApplyRelationships(Converted.Values, rootCommitObject); + } + + protected void SetRelationship(Base conversionResult, NestingInstructions nestingInstructions) + { + SetRelationship(conversionResult, new List { nestingInstructions }); + } + + /// + /// Sets information on how a given object should be nested in the commit tree. + /// encodes the order in which we should try and nest the given + /// when is called + /// + /// The object to be nested + /// Information about how the object ideally should be nested, in order of priority + protected void SetRelationship(Base conversionResult, IList nestingInstructionsList) + { + string? appId = conversionResult.applicationId; + if (appId != null) + { + Converted[appId] = conversionResult; + } + _nestingInstructions[conversionResult] = nestingInstructionsList; + } + + /// + /// For each object in + /// + /// + /// + /// + protected void ApplyRelationships(IEnumerable toAdd, Base rootCommitObject) + { + foreach (Base c in toAdd) + { + try + { + ApplyRelationship(c, rootCommitObject); + } + catch (Exception ex) when (!ex.IsFatal()) + { + // This should never happen, we should be ensuring that at least one of the parents is valid. + SpeckleLog.Logger.Fatal(ex, "Failed to add object {speckleType} to commit object", c?.GetType()); + } + } + } + + /// + /// Will attempt to find and nest the object + /// under the first valid parent according to the dictionary. + /// + /// + /// A parent is considered valid if + /// 1. Is non null + /// 2. Is in the dictionary + /// 3. Has (or can dynamically accept) a typed property with the propName specified by the item + /// 4. Said can accept the object's type + /// + /// + /// + /// Thrown when no valid parent was found for given + protected void ApplyRelationship(Base current, Base rootCommitObject) + { + var instructions = _nestingInstructions[current]; + foreach (var instruction in instructions) + { + if (instruction.ParentApplicationId is null) + { + continue; + } + + Base? parent; + if (instruction.ParentApplicationId == ROOT) + { + parent = rootCommitObject; + } + else + { + Converted.TryGetValue(instruction.ParentApplicationId, out parent); + } + + if (parent is null) + { + continue; + } + + try + { + instruction.Nest(parent, current); + return; + } + catch (Exception ex) when (!ex.IsFatal()) + { + // A parent was found, but it was invalid (Likely because of a type mismatch on a `elements` property) + SpeckleLog.Logger.Warning(ex, "Failed to add object {speckleType} to a converted parent", current.GetType()); + } + } + + throw new InvalidOperationException( + $"Could not find a valid parent for object of type {current.GetType()}. Checked {instructions.Count} potential parent, and non were converted!" + ); + } + + protected static void NestUnderElementsProperty(Base parent, Base child) + { + NestUnderProperty(parent, child, ELEMENTS); + } + + protected static void NestUnderProperty(Base parent, Base child, string property) + { + if (parent.GetDetachedProp(property) is not IList elements) + { + elements = new List(); + parent.SetDetachedProp(property, elements); + } + + elements.Add(child); + } + + [Obsolete("Renamed to " + nameof(ROOT))] + [SuppressMessage("Style", "IDE1006:Naming Styles")] + protected const string Root = ROOT; + + [Obsolete("Renamed to " + nameof(Converted))] + [SuppressMessage("Style", "IDE1006:Naming Styles")] + protected IDictionary converted => Converted; +} diff --git a/src/Speckle.Core/Models/DynamicBase.cs b/src/Speckle.Core/Models/DynamicBase.cs new file mode 100644 index 00000000..9c024df8 --- /dev/null +++ b/src/Speckle.Core/Models/DynamicBase.cs @@ -0,0 +1,339 @@ +using System; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Dynamic; +using System.Linq; +using System.Reflection; +using Speckle.Core.Kits; +using Speckle.Core.Logging; + +namespace Speckle.Core.Models; + +/// +/// Base class implementing a bunch of nice dynamic object methods, like adding and removing props dynamically. Makes c# feel like json. +/// Orginally adapted from Rick Strahl 🤘 +/// https://weblog.west-wind.com/posts/2012/feb/08/creating-a-dynamic-extensible-c-expando-object +/// +public class DynamicBase : DynamicObject, IDynamicMetaObjectProvider +{ + /// + /// Default value for + /// + public const DynamicBaseMemberType DEFAULT_INCLUDE_MEMBERS = + DynamicBaseMemberType.Instance | DynamicBaseMemberType.Dynamic; + + private static readonly Dictionary> s_propInfoCache = new(); + + /// + /// The actual property bag, where dynamically added props are stored. + /// + private readonly Dictionary _properties = new(); + + /// + /// Sets and gets properties using the key accessor pattern. + /// + /// + /// myObject["superProperty"] = 42; + /// + /// + /// + [IgnoreTheItem] + public object? this[string key] + { + get + { + if (_properties.TryGetValue(key, out object? value)) + { + return value; + } + + PopulatePropInfoCache(GetType()); + var prop = s_propInfoCache[GetType()].FirstOrDefault(p => p.Name == key); + + if (prop == null) + { + return null; + } + + return prop.GetValue(this); + } + set + { + if (!IsPropNameValid(key, out string reason)) + { + throw new InvalidPropNameException(key, reason); + } + + if (_properties.ContainsKey(key)) + { + _properties[key] = value; + return; + } + + PopulatePropInfoCache(GetType()); + var prop = s_propInfoCache[GetType()].FirstOrDefault(p => p.Name == key); + + if (prop == null) + { + _properties[key] = value; + return; + } + try + { + prop.SetValue(this, value); + } + catch (Exception ex) when (!ex.IsFatal()) + { + throw new SpeckleException($"Failed to set value for {GetType().Name}.{prop.Name}", ex); + } + } + } + + /// + /// + /// Gets properties via the dot syntax. + /// ((dynamic)myObject).superProperty; + /// + /// + public override bool TryGetMember(GetMemberBinder binder, out object? result) + { + return _properties.TryGetValue(binder.Name, out result); + } + + /// + /// Sets properties via the dot syntax. + ///
((dynamic)myObject).superProperty = something;
+ ///
+ /// + /// + /// + public override bool TrySetMember(SetMemberBinder binder, object? value) + { + var valid = IsPropNameValid(binder.Name, out _); + if (valid) + { + _properties[binder.Name] = value; + } + + return valid; + } + + private static readonly HashSet s_disallowedPropNameChars = new() { '.', '/' }; + + public static string RemoveDisallowedPropNameChars(string name) + { + foreach (char c in s_disallowedPropNameChars) + { + name = name.Replace(c, ' '); + } + + return name; + } + + public bool IsPropNameValid(string name, out string reason) + { + if (string.IsNullOrEmpty(name) || name == "@") + { + reason = "Found empty prop name"; + return false; + } + + if (name.StartsWith("@@")) + { + reason = "Only one leading '@' char is allowed. This signals the property value should be detached."; + return false; + } + + foreach (char c in name) + { + if (s_disallowedPropNameChars.Contains(c)) + { + reason = $"Prop with name '{name}' contains invalid characters. The following characters are not allowed: ./"; + return false; + } + } + + reason = ""; + return true; + } + + private static void PopulatePropInfoCache(Type type) + { + if (!s_propInfoCache.ContainsKey(type)) + { + s_propInfoCache[type] = type.GetProperties(BindingFlags.Instance | BindingFlags.Public) + .Where(p => !p.IsDefined(typeof(IgnoreTheItemAttribute), true)) + .ToList(); + } + } + + /// + /// Gets all of the property names on this class, dynamic or not. + /// + [Obsolete("Use `GetMembers(DynamicBaseMemberType.All).Keys` instead")] + public override IEnumerable GetDynamicMemberNames() + { + PopulatePropInfoCache(GetType()); + var pinfos = s_propInfoCache[GetType()]; + + var names = new List(_properties.Count + pinfos.Count); + foreach (var pinfo in pinfos) + { + names.Add(pinfo.Name); + } + + foreach (var kvp in _properties) + { + names.Add(kvp.Key); + } + + return names; + } + + /// + /// Gets the names of the defined class properties (typed). + /// + /// + [Obsolete("Use GetMembers(DynamicBaseMemberType.InstanceAll).Keys instead")] + public IEnumerable GetInstanceMembersNames() + { + return GetInstanceMembersNames(GetType()); + } + + public static IEnumerable GetInstanceMembersNames(Type t) + { + PopulatePropInfoCache(t); + var pinfos = s_propInfoCache[t]; + + var names = new List(pinfos.Count); + foreach (var pinfo in pinfos) + { + names.Add(pinfo.Name); + } + + return names; + } + + /// + /// Gets the defined (typed) properties of this object. + /// + /// + public IEnumerable GetInstanceMembers() + { + return GetInstanceMembers(GetType()); + } + + public static IEnumerable GetInstanceMembers(Type t) + { + PopulatePropInfoCache(t); + var pinfos = s_propInfoCache[t]; + + var names = new List(pinfos.Count); + + foreach (var pinfo in pinfos) + { + if (pinfo.Name != "Item") + { + names.Add(pinfo); + } + } + + return names; + } + + /// + /// Gets the names of the typed and dynamic properties that don't have a [SchemaIgnore] attribute. + /// + /// + [Obsolete("Use GetMembers().Keys instead")] + public IEnumerable GetMemberNames() + { + return GetMembers().Keys; + } + + /// + /// Gets the typed and dynamic properties. + /// + /// Specifies which members should be included in the resulting dictionary. Can be concatenated with "|" + /// A dictionary containing the key's and values of the object. + public Dictionary GetMembers(DynamicBaseMemberType includeMembers = DEFAULT_INCLUDE_MEMBERS) + { + // Initialize an empty dict + var dic = new Dictionary(); + + // Add dynamic members + if (includeMembers.HasFlag(DynamicBaseMemberType.Dynamic)) + { + dic = new Dictionary(_properties); + } + + if (includeMembers.HasFlag(DynamicBaseMemberType.Instance)) + { + PopulatePropInfoCache(GetType()); + var pinfos = s_propInfoCache[GetType()].Where(x => + { + var hasIgnored = x.IsDefined(typeof(SchemaIgnore), true); + var hasObsolete = x.IsDefined(typeof(ObsoleteAttribute), true); + + // If obsolete is false and prop has obsolete attr + // OR + // If schemaIgnored is true and prop has schemaIgnore attr + return !( + !includeMembers.HasFlag(DynamicBaseMemberType.SchemaIgnored) && hasIgnored + || !includeMembers.HasFlag(DynamicBaseMemberType.Obsolete) && hasObsolete + ); + }); + foreach (var pi in pinfos) + { + if (!dic.ContainsKey(pi.Name)) //todo This is a TEMP FIX FOR #1969, and should be reverted after a proper fix is made! + { + dic.Add(pi.Name, pi.GetValue(this)); + } + } + } + + if (includeMembers.HasFlag(DynamicBaseMemberType.SchemaComputed)) + { + GetType() + .GetMethods() + .Where(e => e.IsDefined(typeof(SchemaComputedAttribute)) && !e.IsDefined(typeof(ObsoleteAttribute))) + .ToList() + .ForEach(e => + { + var attr = e.GetCustomAttribute(); + try + { + dic[attr.Name] = e.Invoke(this, null); + } + catch (Exception ex) when (!ex.IsFatal()) + { + SpeckleLog.Logger.Warning(ex, "Failed to get computed member: {name}", attr.Name); + dic[attr.Name] = null; + } + }); + } + + return dic; + } + + /// + /// Gets the dynamically added property names only. + /// + /// + [Obsolete("Use GetMembers(DynamicBaseMemberType.Dynamic).Keys instead")] + public IEnumerable GetDynamicMembers() + { + return _properties.Keys; + } + + [Obsolete("Renamed to " + nameof(DEFAULT_INCLUDE_MEMBERS))] + [SuppressMessage("Style", "IDE1006:Naming Styles", Justification = "Obsolete")] + public const DynamicBaseMemberType DefaultIncludeMembers = DEFAULT_INCLUDE_MEMBERS; +} + +/// +/// This attribute is used internally to hide the this[key]{get; set;} property from inner reflection on members. +/// For more info see this discussion: https://speckle.community/t/why-do-i-keep-forgetting-base-objects-cant-use-item-as-a-dynamic-member/3246/5 +/// +[AttributeUsage(AttributeTargets.Property)] +internal sealed class IgnoreTheItemAttribute : Attribute { } diff --git a/src/Speckle.Core/Models/DynamicBaseMemberType.cs b/src/Speckle.Core/Models/DynamicBaseMemberType.cs new file mode 100644 index 00000000..9e9262dc --- /dev/null +++ b/src/Speckle.Core/Models/DynamicBaseMemberType.cs @@ -0,0 +1,45 @@ +using System; + +namespace Speckle.Core.Models; + +/// +/// Represents all different types of members that can be returned by +/// +[Flags] +public enum DynamicBaseMemberType +{ + /// + /// The typed members of the DynamicBase object + /// + Instance = 1, + + /// + /// The dynamically added members of the DynamicBase object + /// + Dynamic = 2, + + /// + /// The typed members flagged with attribute. + /// + Obsolete = 4, + + /// + /// The typed members flagged with attribute. + /// + SchemaIgnored = 8, + + /// + /// The typed methods flagged with TODO: + /// + SchemaComputed = 16, + + /// + /// All the typed members, including ones with or attributes. + /// + InstanceAll = Instance + Obsolete + SchemaIgnored, + + /// + /// All the members, including dynamic and instance members flagged with or attributes + /// + All = InstanceAll + Dynamic +} diff --git a/src/Speckle.Core/Models/Extensions/BaseExtensions.cs b/src/Speckle.Core/Models/Extensions/BaseExtensions.cs new file mode 100644 index 00000000..d919d120 --- /dev/null +++ b/src/Speckle.Core/Models/Extensions/BaseExtensions.cs @@ -0,0 +1,292 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using System.Linq; + +namespace Speckle.Core.Models.Extensions; + +public static class BaseExtensions +{ + /// + /// Provides access to each base object in the traverse function, and decides whether the traverse function should continue traversing it's children or not. + /// + /// + /// Should return 'true' if you wish to stop the traverse behaviour, 'false' otherwise. + /// + public delegate bool BaseRecursionBreaker(Base @base); + + /// + /// Traverses through the object and its children. + /// Only traverses through the first occurrence of a object (to prevent infinite recursion on circular references) + /// + /// The root object of the tree to flatten + /// Optional predicate function to determine whether to break (or continue) traversal of a object's children. + /// A flat List of objects. + /// + public static IEnumerable Flatten(this Base root, BaseRecursionBreaker? recursionBreaker = null) + { + recursionBreaker ??= _ => false; + + var cache = new HashSet(); + var traversal = Traverse( + root, + b => + { + if (!cache.Add(b.id)) + { + return true; + } + + return recursionBreaker.Invoke(b); + } + ); + + foreach (var b in traversal) + { + if (!cache.Contains(b.id)) + { + yield return b; + } + } + //Recursion break will be called after the above + } + + /// + /// Depth-first traversal of the specified object and all of its children as a deferred Enumerable, with a function to break the traversal. + /// + /// The object to traverse. + /// Predicate function to determine whether to break (or continue) traversal of a object's children. + /// Deferred Enumerable of the objects being traversed (iterable only once). + public static IEnumerable Traverse(this Base root, BaseRecursionBreaker recursionBreaker) + { + var stack = new Stack(); + stack.Push(root); + + while (stack.Count > 0) + { + Base current = stack.Pop(); + yield return current; + + if (recursionBreaker(current)) + { + continue; + } + + foreach (string child in current.GetDynamicMemberNames()) + { + switch (current[child]) + { + case Base o: + stack.Push(o); + break; + case IDictionary dictionary: + { + foreach (object obj in dictionary.Keys) + { + if (obj is Base b) + { + stack.Push(b); + } + } + + break; + } + case IList collection: + { + foreach (object obj in collection) + { + if (obj is Base b) + { + stack.Push(b); + } + } + + break; + } + default: + break; + } + } + } + } + + public static string ToFormattedString(this Exception exception) + { + var messages = exception + .GetAllExceptions() + .Where(e => !string.IsNullOrWhiteSpace(e.Message)) + .Select(e => e.Message.Trim()); + string flattened = string.Join(Environment.NewLine + " ", messages); // <-- the separator here + return flattened; + } + + private static IEnumerable GetAllExceptions(this Exception exception) + { + yield return exception; + + if (exception is AggregateException aggrEx) + { + foreach (var innerEx in aggrEx.InnerExceptions.SelectMany(e => e.GetAllExceptions())) + { + yield return innerEx; + } + } + else if (exception.InnerException != null) + { + foreach (var innerEx in exception.InnerException.GetAllExceptions()) + { + yield return innerEx; + } + } + } + + /// + /// see + /// + /// + /// + /// elements + public static object? GetDetachedProp(this Base speckleObject, string propName) + { + var detachedPropName = GetDetachedPropName(speckleObject, propName); + return speckleObject[detachedPropName]; + } + + /// + /// see + /// + /// + /// + /// Value to set + public static void SetDetachedProp(this Base speckleObject, string propName, object? value) + { + var detachedPropName = GetDetachedPropName(speckleObject, propName); + speckleObject[detachedPropName] = value; + } + + /// + /// Returns if the given has an instance prop of the same name + /// otherwise returns with a '@' prefix for dynamic detaching. + /// + /// + /// These functions are workarounds for '@' prefixed property names being treated as unique keys. + /// And is useful in circumstances where you want to get/set detached properties without caring about the derived class definition + /// This behaviour, and these functions may be changed in future releases. + /// + /// + /// the property name to check for + /// detached property name + public static string GetDetachedPropName(this Base speckleObject, string propName) + { + return speckleObject.GetMembers(DynamicBaseMemberType.Instance).ContainsKey(propName) ? propName : $"@{propName}"; + } + + /// + /// Checks if an object "is displayable" i.e. has a displayValue property that is a list of base. + /// This is to mirror the selection logic of our viewer package, where any "displayable object" will become + /// a single selectable entity. + /// + /// The Base object to check. + /// True if the object is displayable, false otherwise. + public static bool IsDisplayableObject(this Base speckleObject) + { + return speckleObject.TryGetDisplayValue() != null; + } + + public static IReadOnlyList? TryGetDisplayValue(this Base obj) + where T : Base + { + var rawDisplayValue = obj["displayValue"] ?? obj["@displayValue"]; + return rawDisplayValue switch + { + T b => new List { b }, + IReadOnlyList list => list, + _ => null + }; + } + + public static IReadOnlyList? TryGetDisplayValue(this Base obj) + { + return TryGetDisplayValue(obj); + } + + public static string? TryGetName(this Base obj) + { + return obj["name"] as string; + } + + public static IEnumerable? TryGetParameters(this Base obj) + where T : Base + { + var parameters = (obj["parameters"] ?? obj["@parameters"]) as Base; + return parameters?.GetMembers(DynamicBaseMemberType.Dynamic).Values.OfType(); + } + + public static IEnumerable? TryGetParameters(this Base obj) + { + return TryGetParameters(obj); + } + + /// + /// A variation of the OG Traversal extension from Alan, but with tracking the object path as well. + /// + /// Delegate condition to stop traverse. + /// List of base objects with their collection path. + public static IEnumerable<(string[], Base)> TraverseWithPath(this Base root, BaseRecursionBreaker recursionBreaker) + { + var stack = new Stack<(List, Base)>(); + stack.Push((new List(), root)); + + while (stack.Count > 0) + { + (List path, Base current) = stack.Pop(); + yield return (path.ToArray(), current); + + if (recursionBreaker(current)) + { + continue; + } + + foreach (string child in current.GetDynamicMemberNames()) + { + // NOTE: we can store collections rather than just path names. Where we have an actual collection, use that, where not, create a mock one based on the prop name + var localPathFragment = child; + if (current is Collection { name: { } } c) + { + localPathFragment = c.name; + } + + var newPath = new List(path) { localPathFragment }; + switch (current[child]) + { + case Base o: + stack.Push((newPath, o)); + break; + case IDictionary dictionary: + { + foreach (object obj in dictionary.Keys) + { + if (obj is Base b) + { + stack.Push((newPath, b)); + } + } + + break; + } + case IList collection: + { + foreach (object obj in collection) + { + if (obj is Base b) + { + stack.Push((newPath, b)); + } + } + break; + } + } + } + } + } +} diff --git a/src/Speckle.Core/Models/Extras.cs b/src/Speckle.Core/Models/Extras.cs new file mode 100644 index 00000000..371d97b6 --- /dev/null +++ b/src/Speckle.Core/Models/Extras.cs @@ -0,0 +1,304 @@ +#nullable disable +using System; +using System.Collections.Generic; +using System.Linq; +using Speckle.Core.Models.Extensions; + +namespace Speckle.Core.Models; + +/// +/// Wrapper around other, third party, classes that are not coming from a speckle kit. +/// Serialization and deserialization of the base object happens through default Newtonsoft converters. If your object does not de/serialize correctly, this class will not prevent that from happening. +/// Limitations: +/// - Base object needs to be serializable. +/// - Inline collection declarations with values do not behave correctly. +/// - Your class needs to have a void constructor. +/// - Probably more. File a bug! +/// +public class Abstract : Base +{ + private object _base; + + /// + /// See for limitations of this approach. + /// + public Abstract() { } + + /// + /// See for limitations of this approach. + /// + /// + public Abstract(object _original) + { + @base = _original; + assemblyQualifiedName = @base.GetType().AssemblyQualifiedName; + } + + public string assemblyQualifiedName { get; set; } + + /// + /// The original object. + /// + public object @base + { + get => _base; + set + { + _base = value; + assemblyQualifiedName = value.GetType().AssemblyQualifiedName; + } + } +} + +/// +/// In short, this helps you chunk big things into smaller things. +/// See the following reference. +/// +public class DataChunk : Base +{ + public List data { get; set; } = new(); +} + +public class ObjectReference : Base +{ + public new string speckle_type = "reference"; + + public string referencedId { get; set; } + + public Dictionary closure { get; set; } +} + +public class ProgressEventArgs : EventArgs +{ + public ProgressEventArgs(int current, int total, string scope) + { + this.current = current; + this.total = total; + this.scope = scope; + } + + public int current { get; set; } + public int total { get; set; } + public string scope { get; set; } +} + +public class ProgressReport +{ + public Dictionary ReportObjects { get; set; } = new(); + + public List SelectedReportObjects { get; set; } = new(); + + public void Log(ApplicationObject obj) + { + var reportObject = UpdateReportObject(obj); + if (reportObject == null) + { + ReportObjects.Add(obj.OriginalId, obj); + } + } + + public ApplicationObject UpdateReportObject(ApplicationObject obj) + { + if (ReportObjects.TryGetValue(obj.OriginalId, out ApplicationObject reportObject)) + { + reportObject.Update( + createdIds: obj.CreatedIds, + container: obj.Container, + converted: obj.Converted, + log: obj.Log, + descriptor: obj.Descriptor + ); + + if (obj.Status != ApplicationObject.State.Unknown) + { + reportObject.Update(status: obj.Status); + } + + return reportObject; + } + + return null; + } + + [Obsolete("Use TryGetValue or Dictionary indexing", true)] + public bool GetReportObject(string id, out int index) + { + throw new NotImplementedException(); + // var _reportObject = ReportObjects.Where(o => o.OriginalId == id)?.FirstOrDefault(); + // index = _reportObject != null ? ReportObjects.IndexOf(_reportObject) : -1; + // return index == -1 ? false : true; + } + + public void Merge(ProgressReport report) + { + lock (_operationErrorsLock) + { + OperationErrors.AddRange(report.OperationErrors); + } + + lock (_conversionLogLock) + { + ConversionLog.AddRange(report.ConversionLog); + } + + // update report object notes + foreach (var item in ReportObjects.Values) + { + var ids = new List { item.OriginalId }; + if (item.Fallback.Count > 0) + { + ids.AddRange(item.Fallback.Select(o => o.OriginalId)); + } + + if (item.Status == ApplicationObject.State.Unknown) + { + if (report.ReportObjects.TryGetValue(item.OriginalId, out var reportObject)) + { + item.Status = reportObject.Status; + } + } + + foreach (var id in ids) + { + //if (report.GetReportObject(id, out int index)) + if (report.ReportObjects.TryGetValue(id, out var reportObject)) + { + foreach (var logItem in reportObject.Log) + { + if (!item.Log.Contains(logItem)) + { + item.Log.Add(logItem); + } + } + + foreach (var createdId in reportObject.CreatedIds) + { + if (!item.CreatedIds.Contains(createdId)) + { + item.CreatedIds.Add(createdId); + } + } + + foreach (var convertedItem in reportObject.Converted) + { + if (!item.Converted.Contains(convertedItem)) + { + item.Converted.Add(convertedItem); + } + } + } + } + } + } + + #region Conversion + + /// + /// Keeps track of the conversion process + /// + public List ConversionLog { get; } = new(); + + private readonly object _conversionLogLock = new(); + + public string ConversionLogString + { + get + { + var summary = ""; + lock (_conversionLogLock) + { + var converted = ConversionLog.Count(x => x.ToLowerInvariant().Contains("converted")); + var created = ConversionLog.Count(x => x.ToLowerInvariant().Contains("created")); + var skipped = ConversionLog.Count(x => x.ToLowerInvariant().Contains("skipped")); + var failed = ConversionLog.Count(x => x.ToLowerInvariant().Contains("failed")); + var updated = ConversionLog.Count(x => x.ToLowerInvariant().Contains("updated")); + + summary += converted > 0 ? $"CONVERTED: {converted}\n" : ""; + summary += created > 0 ? $"CREATED: {created}\n" : ""; + summary += updated > 0 ? $"UPDATED: {updated}\n" : ""; + summary += skipped > 0 ? $"SKIPPED: {skipped}\n" : ""; + summary += failed > 0 ? $"FAILED: {failed}\n" : ""; + summary = !string.IsNullOrEmpty(summary) ? $"SUMMARY\n\n{summary}\n\n" : ""; + + return summary + string.Join("\n", ConversionLog); + } + } + } + + public void Log(string text) + { + var time = DateTime.Now.ToLocalTime().ToString("dd/MM/yy HH:mm:ss"); + lock (_conversionLogLock) + { + ConversionLog.Add(time + " " + text); + } + } + + /// + /// Keeps track of errors in the conversions. + /// + public List ConversionErrors { get; } = new(); + + private readonly object _conversionErrorsLock = new(); + + public string ConversionErrorsString + { + get + { + lock (_conversionErrorsLock) + { + return string.Join("\n", ConversionErrors.Select(x => x.Message).Distinct()); + } + } + } + + public int ConversionErrorsCount => ConversionErrors.Count; + + public void LogConversionError(Exception exception) + { + lock (_conversionErrorsLock) + { + ConversionErrors.Add(exception); + } + + Log(exception.Message); + } + + #endregion + + #region Operation + + /// + /// Keeps track of HANDLED errors that occur during send/recieve commands. + /// + /// + /// Handled errors specific to the conversion, should be added to ConversionErrors + /// Unhandleable errors (i.e. that lead to the entire send/receive failing) should be Thrown instead. + /// + public List OperationErrors { get; } = new(); + + private readonly object _operationErrorsLock = new(); + + public string OperationErrorsString + { + get + { + lock (_operationErrorsLock) + { + return string.Join("\n", OperationErrors.Select(x => x.ToFormattedString()).Distinct()); + } + } + } + + public int OperationErrorsCount => OperationErrors.Count; + + public void LogOperationError(Exception exception) + { + lock (_operationErrorsLock) + { + OperationErrors.Add(exception); + } + } + + #endregion +} diff --git a/src/Speckle.Core/Models/GraphTraversal/DefaultTraversal.cs b/src/Speckle.Core/Models/GraphTraversal/DefaultTraversal.cs new file mode 100644 index 00000000..885b031d --- /dev/null +++ b/src/Speckle.Core/Models/GraphTraversal/DefaultTraversal.cs @@ -0,0 +1,212 @@ +using System; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Diagnostics.Contracts; +using System.Linq; +using Speckle.Core.Kits; + +namespace Speckle.Core.Models.GraphTraversal; + +[SuppressMessage( + "Naming", + "CA1708:Identifiers should differ by more than case", + Justification = "Class contains obsolete members that are kept for backwards compatiblity" +)] +public static class DefaultTraversal +{ + public static GraphTraversal CreateTraversalFunc() + { + var convertableRule = TraversalRule + .NewTraversalRule() + .When(b => b.GetType() != typeof(Base)) + .When(HasDisplayValue) + .ContinueTraversing(_ => ElementsPropAliases); + + return new GraphTraversal(convertableRule, s_ignoreResultsRule, DefaultRule.ShouldReturnToOutput(false)); + } + + //These functions are just meant to make the syntax of defining rules less verbose, they are likely to change frequently/be restructured + #region Helper Functions + + //WORKAROUND: ideally, traversal rules would not have Objects specific rules. + private static readonly ITraversalRule s_ignoreResultsRule = TraversalRule + .NewTraversalRule() + .When(o => o.speckle_type.Contains("Objects.Structural.Results")) + .ContinueTraversing(None); + + public static ITraversalBuilderReturn DefaultRule => + TraversalRule.NewTraversalRule().When(_ => true).ContinueTraversing(Members()); + + public static readonly IReadOnlyList ElementsPropAliases = new[] { "elements", "@elements" }; + + [Pure] + public static IEnumerable ElementsAliases(Base _) + { + return ElementsPropAliases; + } + + public static bool HasElements(Base x) + { + return ElementsPropAliases.Any(m => x[m] != null); + } + + public static readonly IReadOnlyList DefinitionPropAliases = new[] { "definition", "@definition" }; + + [Pure] + public static IEnumerable DefinitionAliases(Base _) + { + return DefinitionPropAliases; + } + + public static bool HasDefinition(Base x) + { + return DefinitionPropAliases.Any(m => x[m] != null); + } + + public static readonly IReadOnlyList DisplayValuePropAliases = new[] { "displayValue", "@displayValue" }; + + [Pure] + public static IEnumerable DisplayValueAliases(Base _) + { + return DisplayValuePropAliases; + } + + public static bool HasDisplayValue(Base x) + { + return DisplayValuePropAliases.Any(m => x[m] != null); + } + + public static readonly IReadOnlyList GeometryPropAliases = new[] { "geometry", "@geometry" }; + + [Pure] + public static IEnumerable GeometryAliases(Base _) + { + return GeometryPropAliases; + } + + public static bool HasGeometry(Base x) + { + return GeometryPropAliases.Any(m => x[m] != null); + } + + [Pure] + public static IEnumerable None(Base _) + { + return Enumerable.Empty(); + } + + internal static SelectMembers Members(DynamicBaseMemberType includeMembers = DynamicBase.DEFAULT_INCLUDE_MEMBERS) + { + return x => x.GetMembers(includeMembers).Keys; + } + + public static readonly string[] DisplayValueAndElementsPropAliases = DisplayValuePropAliases + .Concat(ElementsPropAliases) + .ToArray(); + + [Pure] + public static IEnumerable DisplayValueAndElementsAliases(Base _) + { + return DisplayValueAndElementsPropAliases; + } + + #endregion + + #region Legacy function varients + + [Obsolete("Renamed to " + nameof(ElementsPropAliases))] + [SuppressMessage("Style", "IDE1006:Naming Styles", Justification = "Obsolete")] + public static IReadOnlyList elementsPropAliases => ElementsPropAliases; + + [Obsolete("Renamed to " + nameof(DisplayValuePropAliases))] + [SuppressMessage("Style", "IDE1006:Naming Styles", Justification = "Obsolete")] + public static IReadOnlyList displayValuePropAliases => DisplayValuePropAliases; + + [Obsolete("Renamed to " + nameof(DefinitionPropAliases))] + [SuppressMessage("Style", "IDE1006:Naming Styles", Justification = "Obsolete")] + public static IReadOnlyList definitionPropAliases => DefinitionPropAliases; + + [Obsolete("Renamed to " + nameof(GeometryPropAliases))] + [SuppressMessage("Style", "IDE1006:Naming Styles", Justification = "Obsolete")] + public static IReadOnlyList geometryPropAliases => GeometryPropAliases; + + [Obsolete("Renamed to " + nameof(DisplayValueAndElementsPropAliases))] + [SuppressMessage("Style", "IDE1006:Naming Styles", Justification = "Obsolete")] + [SuppressMessage("Performance", "CA1819:Properties should not return arrays", Justification = "Obsolete")] + public static string[] displayValueAndElementsPropAliases => DisplayValueAndElementsPropAliases; + + /// + /// + /// + /// + [Obsolete($"Consider using {nameof(CreateTraversalFunc)}")] + public static GraphTraversal CreateTraverseFunc(ISpeckleConverter converter) + { + return CreateLegacyTraverseFunc(converter.CanConvertToNative); + } + + /// + /// Legacy traversal rule that was dependent on the converter + /// + /// + /// Treats convertable objects and objects with displayValues as "convertable" such that only elements and dynamic props will be traversed + /// New code should use instead. + /// + /// + /// + [Obsolete($"Consider using {nameof(CreateTraversalFunc)}")] + public static GraphTraversal CreateLegacyTraverseFunc(Func canConvertToNative) + { + var convertableRule = TraversalRule + .NewTraversalRule() + .When(b => canConvertToNative(b)) + .When(HasDisplayValue) + .ContinueTraversing(_ => ElementsPropAliases); + + return new GraphTraversal(convertableRule, s_ignoreResultsRule, DefaultRule); + } + + /// + /// Traverses until finds a convertable object then HALTS deeper traversal + /// + /// + /// The DUI2 Revit connector does traversal, + /// so this traversal is a shallow traversal for directly convertable objects, + /// and a deep traversal for all other types + /// New code should use instead. + /// + /// + /// + [Obsolete($"Consider using {nameof(CreateTraversalFunc)}")] + public static GraphTraversal CreateRevitTraversalFunc(ISpeckleConverter converter) + { + var convertableRule = TraversalRule + .NewTraversalRule() + .When(converter.CanConvertToNative) + .When(HasDisplayValue) + .ContinueTraversing(None); + + return new GraphTraversal(convertableRule, s_ignoreResultsRule, DefaultRule); + } + + /// + /// Traverses until finds a convertable object (or fallback) then traverses members + /// + /// + /// New code should use instead. + /// + /// + /// + [Obsolete($"Consider using {nameof(CreateTraversalFunc)}")] + public static GraphTraversal CreateBIMTraverseFunc(ISpeckleConverter converter) + { + var bimElementRule = TraversalRule + .NewTraversalRule() + .When(converter.CanConvertToNative) + .ContinueTraversing(ElementsAliases); + + return new GraphTraversal(bimElementRule, s_ignoreResultsRule, DefaultRule); + } + + #endregion +} diff --git a/src/Speckle.Core/Models/GraphTraversal/GraphTraversal.cs b/src/Speckle.Core/Models/GraphTraversal/GraphTraversal.cs new file mode 100644 index 00000000..c397d292 --- /dev/null +++ b/src/Speckle.Core/Models/GraphTraversal/GraphTraversal.cs @@ -0,0 +1,151 @@ +using System.Collections; +using System.Collections.Generic; + +namespace Speckle.Core.Models.GraphTraversal; + +public class GraphTraversal : GraphTraversal +{ + public GraphTraversal(params ITraversalRule[] traversalRule) + : base(traversalRule) { } + + public static readonly string traversalContextId = "traversalContextId"; + + protected override TraversalContext NewContext(Base current, string? propName, TraversalContext? parent) + { + return new TraversalContext(current, propName, parent); + } +} + +public abstract class GraphTraversal + where T : TraversalContext +{ + private readonly ITraversalRule[] _rules; + + protected GraphTraversal(params ITraversalRule[] traversalRule) + { + _rules = traversalRule; + } + + /// + /// Given object, will recursively traverse members according to the provided traversal rules. + /// + /// The object to traverse members + /// Lazily returns objects found during traversal (including ), wrapped within a + public IEnumerable Traverse(Base root) + { + var stack = new List(); + stack.Add(NewContext(root, null, default)); + + while (stack.Count > 0) + { + int headIndex = stack.Count - 1; + T head = stack[headIndex]; + stack.RemoveAt(headIndex); + + Base current = head.Current; + var activeRule = GetActiveRuleOrDefault(current); + + if (activeRule.ShouldReturn) + { + yield return head; + } + + foreach (string childProp in activeRule.MembersToTraverse(current)) + { + TraverseMemberToStack(stack, current[childProp], childProp, head); + } + } + } + + private void TraverseMemberToStack( + ICollection stack, + object? value, + string? memberName = null, + T? parent = default + ) + { + //test + switch (value) + { + case Base o: + stack.Add(NewContext(o, memberName, parent)); + break; + case IList list: + { + foreach (object? obj in list) + { + TraverseMemberToStack(stack, obj, memberName, parent); + } + + break; + } + case IDictionary dictionary: + { + foreach (object? obj in dictionary.Values) + { + TraverseMemberToStack(stack, obj, memberName, parent); + } + + break; + } + } + } + + protected abstract T NewContext(Base current, string? propName, T? parent); + + /// + /// Traverses supported Collections yielding objects. + /// Does not traverse , only (potentially nested) collections. + /// + /// The value to traverse + public static IEnumerable TraverseMember(object? value) + { + //TODO we should benchmark this, as yield returning like this could be suboptimal + switch (value) + { + case Base o: + yield return o; + break; + case IList list: + { + foreach (object? obj in list) + { + foreach (Base o in TraverseMember(obj)) + { + yield return o; + } + } + break; + } + case IDictionary dictionary: + { + foreach (object? obj in dictionary.Values) + { + foreach (Base o in TraverseMember(obj)) + { + yield return o; + } + } + break; + } + } + } + + private ITraversalRule GetActiveRuleOrDefault(Base o) + { + return GetActiveRule(o) ?? DefaultRule.Instance; + } + + private ITraversalRule? GetActiveRule(Base o) + { + foreach (var rule in _rules) + { + if (rule.DoesRuleHold(o)) + { + return rule; + } + } + + return null; + } +} diff --git a/src/Speckle.Core/Models/GraphTraversal/ITraversalRule.cs b/src/Speckle.Core/Models/GraphTraversal/ITraversalRule.cs new file mode 100644 index 00000000..25072717 --- /dev/null +++ b/src/Speckle.Core/Models/GraphTraversal/ITraversalRule.cs @@ -0,0 +1,52 @@ +using System.Collections.Generic; +using System.Linq; + +namespace Speckle.Core.Models.GraphTraversal; + +/// +/// Interface for a definition of conditional traversal of objects. +/// +public interface ITraversalRule +{ + /// + /// The member names to traverse + /// Return may include member names doesn't have + public IEnumerable MembersToTraverse(Base b); + + /// + /// Evaluates the traversal rule given + /// + /// + /// + public bool DoesRuleHold(Base o); + + /// + /// When , + /// objects for which this rule applies, + /// will be filtered out from the traversal output + /// (but still traversed normally, as per the ) + /// + /// + /// This property was added to allow for easier filtering of the return of . + /// Without the option to set some rules as false, it was necessary to duplicate part of the rules in a + /// + public bool ShouldReturn { get; } +} + +/// +/// The "traverse none" rule that always holds true +/// +internal sealed class DefaultRule : ITraversalRule +{ + private static DefaultRule? s_instance; + + private DefaultRule() { } + + public static DefaultRule Instance => s_instance ??= new DefaultRule(); + + public IEnumerable MembersToTraverse(Base b) => Enumerable.Empty(); + + public bool DoesRuleHold(Base o) => true; + + public bool ShouldReturn => true; +} diff --git a/src/Speckle.Core/Models/GraphTraversal/RuleBuilder.cs b/src/Speckle.Core/Models/GraphTraversal/RuleBuilder.cs new file mode 100644 index 00000000..2482464a --- /dev/null +++ b/src/Speckle.Core/Models/GraphTraversal/RuleBuilder.cs @@ -0,0 +1,105 @@ +using System.Collections.Generic; +using System.Linq; + +namespace Speckle.Core.Models.GraphTraversal; + +/// +/// A traversal rule defines the conditional traversal behaviour when traversing a given objects. +/// Specifies what members to traverse if any provided are met. +/// +/// Follows the builder pattern to ensure that a rule is complete before usable, see usages +public sealed class TraversalRule : ITraversalBuilderReturn, ITraversalBuilderTraverse +{ + private readonly List _conditions; + private SelectMembers? _membersToTraverse; + public bool ShouldReturn { get; private set; } = true; + + private TraversalRule() + { + _conditions = new List(); + } + + public ITraversalBuilderReturn ContinueTraversing(SelectMembers membersToTraverse) + { + this._membersToTraverse = membersToTraverse; + return this; + } + + public ITraversalRule ShouldReturnToOutput(bool shouldReturn = true) + { + ShouldReturn = shouldReturn; + return this; + } + + public ITraversalBuilderTraverse When(WhenCondition condition) + { + _conditions.Add(condition); + return this; + } + + bool ITraversalRule.DoesRuleHold(Base o) + { + foreach (var condition in _conditions) + { + if (condition.Invoke(o)) + { + return true; + } + } + + return false; + } + + IEnumerable ITraversalRule.MembersToTraverse(Base o) + { + return _membersToTraverse!(o).Distinct(); //TODO distinct is expensive, there may be a better way for us to avoid duplicates + } + + /// a new Traversal Rule to be initialised using the Builder Pattern interfaces + public static ITraversalBuilderWhen NewTraversalRule() + { + return new TraversalRule(); + } +} + +public delegate bool WhenCondition(Base o); + +/// +/// Builder Pattern Interface for a traversal rule in a partially built (unusable state) +/// +public interface ITraversalBuilderWhen +{ + /// + /// Adds a condition to this rule. This rule will hold true when ANY of its conditions holds true. + /// + /// + /// Traversal rule in a building (unusable) state + ITraversalBuilderTraverse When(WhenCondition condition); +} + +/// +/// Delegate for selecting members (by member name) of an given object +/// +public delegate IEnumerable SelectMembers(Base o); + +/// +/// Builder Pattern Interface for a traversal rule in a partially built (unusable state) +/// +public interface ITraversalBuilderTraverse : ITraversalBuilderWhen +{ + /// + /// Function returning the members that should be traversed for objects where this rule holds + /// Traversal rule in a usable state + ITraversalBuilderReturn ContinueTraversing(SelectMembers membersToTraverse); +} + +/// +/// Builder Pattern Interface for a traversal rule in a usable state, with an (optional) final step to set the value of +/// +public interface ITraversalBuilderReturn : ITraversalRule +{ + /// + /// value to set + /// Traversal rule in a usable state + ITraversalRule ShouldReturnToOutput(bool shouldReturn = true); +} diff --git a/src/Speckle.Core/Models/GraphTraversal/TraversalContextExtensions.cs b/src/Speckle.Core/Models/GraphTraversal/TraversalContextExtensions.cs new file mode 100644 index 00000000..dab65ed4 --- /dev/null +++ b/src/Speckle.Core/Models/GraphTraversal/TraversalContextExtensions.cs @@ -0,0 +1,59 @@ +using System.Collections.Generic; +using System.Diagnostics.Contracts; +using System.Linq; + +namespace Speckle.Core.Models.GraphTraversal; + +public static class TraversalContextExtensions +{ + /// + /// Walks up the tree, returning values, starting with , + /// walking up nodes + /// + /// + /// + [Pure] + public static IEnumerable GetPropertyPath(this TraversalContext context) + { + TraversalContext? head = context; + do + { + if (head?.PropName == null) + { + break; + } + yield return head.PropName; + + head = head.Parent; + } while (true); + } + + /// + /// Walks up the tree, returning all ascendant, including + /// + /// + /// and all its ascendants + [Pure] + public static IEnumerable GetAscendants(this TraversalContext context) + { + TraversalContext? head = context; + do + { + yield return head.Current; + head = head.Parent; + } while (head != null); + } + + /// + /// Walks up the tree, returning all typed ascendant, starting the closest , + /// walking up nodes + /// + /// + /// and all its ascendants of type + [Pure] + public static IEnumerable GetAscendantOfType(this TraversalContext context) + where T : Base + { + return context.GetAscendants().OfType(); + } +} diff --git a/src/Speckle.Core/Models/GraphTraversal/TraversalContexts.cs b/src/Speckle.Core/Models/GraphTraversal/TraversalContexts.cs new file mode 100644 index 00000000..3f3baf39 --- /dev/null +++ b/src/Speckle.Core/Models/GraphTraversal/TraversalContexts.cs @@ -0,0 +1,57 @@ +using System; +using System.Diagnostics.CodeAnalysis; + +namespace Speckle.Core.Models.GraphTraversal; + +[SuppressMessage( + "Naming", + "CA1708:Identifiers should differ by more than case", + Justification = "Class contains obsolete members that are kept for backwards compatiblity" +)] +public class TraversalContext +{ + public Base Current { get; } + public TraversalContext? Parent { get; } + public string? PropName { get; } + + public TraversalContext(Base current, string? propName = null, TraversalContext? parent = default) + : this(current, propName) + { + Parent = parent; + } + + protected TraversalContext(Base current, string? propName = null) + { + Current = current; + PropName = propName; + } + + #region Obsolete + + [Obsolete("Renamed to " + nameof(Current))] + [SuppressMessage("Style", "IDE1006:Naming Styles", Justification = "Obsolete")] + public Base current => Current; + + [Obsolete("Renamed to " + nameof(PropName))] + [SuppressMessage("Style", "IDE1006:Naming Styles", Justification = "Obsolete")] + public string? propName => PropName; + + [Obsolete("Renamed to " + nameof(Parent))] + [SuppressMessage("Style", "IDE1006:Naming Styles", Justification = "Obsolete")] + public TraversalContext? parent => Parent; + + #endregion +} + +public class TraversalContext : TraversalContext + where T : TraversalContext +{ + public new T? Parent => (T?)base.Parent; + + public TraversalContext(Base current, string? propName = null, T? parent = default) + : base(current, propName, parent) { } + + [Obsolete("Use " + nameof(Parent) + " instead")] + [SuppressMessage("Style", "IDE1006:Naming Styles", Justification = "Obsolete")] + public T? typedParent => Parent; +} diff --git a/src/Speckle.Core/Models/InvalidPropNameException.cs b/src/Speckle.Core/Models/InvalidPropNameException.cs new file mode 100644 index 00000000..8a629afa --- /dev/null +++ b/src/Speckle.Core/Models/InvalidPropNameException.cs @@ -0,0 +1,18 @@ +using System; +using Speckle.Core.Logging; + +namespace Speckle.Core.Models; + +public class InvalidPropNameException : SpeckleException +{ + public InvalidPropNameException(string propName, string reason, Exception? innerException = null) + : this($"Property '{propName}' is invalid: {reason}", innerException) { } + + public InvalidPropNameException() { } + + public InvalidPropNameException(string? message) + : base(message) { } + + public InvalidPropNameException(string? message, Exception? innerException) + : base(message, innerException) { } +} diff --git a/src/Speckle.Core/Models/NestingInstructions.cs b/src/Speckle.Core/Models/NestingInstructions.cs new file mode 100644 index 00000000..0c8d38c4 --- /dev/null +++ b/src/Speckle.Core/Models/NestingInstructions.cs @@ -0,0 +1,19 @@ +namespace Speckle.Core.Models; + +/// +/// Container for a reference to a parent's applicationId and an Action to +/// execute in order to nest the child on the parent +/// +public readonly struct NestingInstructions +{ + public delegate void NestAction(Base parent, Base child); + + public NestingInstructions(string? parentApplicationId, NestAction nestAction) + { + ParentApplicationId = parentApplicationId; + Nest = nestAction; + } + + public string? ParentApplicationId { get; } + public NestAction Nest { get; } +} diff --git a/src/Speckle.Core/Models/Utilities.cs b/src/Speckle.Core/Models/Utilities.cs new file mode 100644 index 00000000..19e478f3 --- /dev/null +++ b/src/Speckle.Core/Models/Utilities.cs @@ -0,0 +1,249 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Diagnostics.Contracts; +using System.IO; +using System.Linq; +using System.Reflection; +using System.Security.Cryptography; +using Speckle.Core.Helpers; +using Speckle.Core.Logging; + +namespace Speckle.Core.Models; + +public static class Utilities +{ + public enum HashingFunctions + { + SHA256, + MD5 + } + + public static int HashLength => 32; + + /// + /// Wrapper method around hashing functions.. + /// + /// + /// + [Pure] + public static string HashString(string input, HashingFunctions func = HashingFunctions.SHA256) + { + return func switch + { + HashingFunctions.SHA256 => Crypt.Sha256(input, length: HashLength), + HashingFunctions.MD5 => Crypt.Md5(input, length: HashLength), + _ => throw new ArgumentOutOfRangeException(nameof(func), func, "Unrecognised value"), + }; + } + + [SuppressMessage("Security", "CA5351:Do Not Use Broken Cryptographic Algorithms")] + public static string HashFile(string filePath, HashingFunctions func = HashingFunctions.SHA256) + { + using HashAlgorithm hashAlgorithm = func == HashingFunctions.MD5 ? MD5.Create() : SHA256.Create(); + + using var stream = File.OpenRead(filePath); + + var hash = hashAlgorithm.ComputeHash(stream); + return BitConverter.ToString(hash, 0, HashLength).Replace("-", "").ToLowerInvariant(); + } + + [Pure] + public static bool IsSimpleType(this Type type) + { + return type.IsPrimitive + || new[] + { + typeof(string), + typeof(decimal), + typeof(DateTime), + typeof(DateTimeOffset), + typeof(TimeSpan), + typeof(Guid) + }.Contains(type) + || Convert.GetTypeCode(type) != TypeCode.Object; + } + + /// + /// Retrieves the simple type properties of an object + /// + /// + /// + /// Set to true to also retrieve simple props of direct parent type + /// Names of props to ignore + /// + public static Base GetApplicationProps( + object o, + Type t, + bool getParentProps = false, + IReadOnlyList? ignore = null + ) + { + var appProps = new Base(); + appProps["class"] = t.Name; + + try + { + // set primitive writeable props + foreach (var propInfo in t.GetProperties(BindingFlags.DeclaredOnly | BindingFlags.Instance | BindingFlags.Public)) + { + if (ignore != null && ignore.Contains(propInfo.Name)) + { + continue; + } + + if (IsMeaningfulProp(propInfo, o, out object? propValue)) + { + appProps[propInfo.Name] = propValue; + } + } + + if (getParentProps) + { + foreach ( + var propInfo in t.BaseType.GetProperties( + BindingFlags.DeclaredOnly | BindingFlags.Instance | BindingFlags.Public + ) + ) + { + if (ignore != null && ignore.Contains(propInfo.Name)) + { + continue; + } + + if (IsMeaningfulProp(propInfo, o, out object? propValue)) + { + appProps[propInfo.Name] = propValue; + } + } + } + } + catch (Exception ex) when (!ex.IsFatal()) + { + SpeckleLog.Logger.Warning(ex, "Failed to get application properties"); + } + + return appProps; + } + + private static bool IsMeaningfulProp(PropertyInfo propInfo, object o, out object? value) + { + value = propInfo.GetValue(o); + if (propInfo.GetSetMethod() != null && value != null) + { + if (propInfo.PropertyType.IsPrimitive || propInfo.PropertyType == typeof(decimal)) + { + return true; + } + + if (propInfo.PropertyType == typeof(string) && !string.IsNullOrEmpty((string)value)) + { + return true; + } + + if (propInfo.PropertyType.BaseType.Name == "Enum") // for some reason "IsEnum" prop returns false + { + value = value.ToString(); + return true; + } + } + return false; + } + + /// + /// Sets the properties of an object with the properties of a base object + /// + /// + /// + /// The base class object representing application props + [Obsolete("Unused")] + public static void SetApplicationProps(object o, Type t, Base props) + { + var propNames = props.GetDynamicMembers(); + IEnumerable names = propNames.ToList(); + if (o == null || names.Any()) + { + return; + } + + var typeProperties = t.GetProperties().ToList(); + typeProperties.AddRange(t.BaseType.GetProperties().ToList()); + foreach (var propInfo in typeProperties) + { + if (propInfo.CanWrite && names.Contains(propInfo.Name)) + { + var value = props[propInfo.Name]; + if (propInfo.PropertyType.BaseType.Name == "Enum") + { + value = Enum.Parse(propInfo.PropertyType, (string)value); + } + + if (value != null) + { + try + { + t.InvokeMember( + propInfo.Name, + BindingFlags.Instance | BindingFlags.Public | BindingFlags.SetProperty, + Type.DefaultBinder, + o, + new[] { value } + ); + } + catch (Exception ex) when (!ex.IsFatal()) { } + } + } + } + } + + /// + /// Chunks a list into pieces. + /// + /// + /// + /// + /// + [Obsolete("Unused")] + public static IEnumerable> SplitList(List list, int chunkSize = 50) + { + for (int i = 0; i < list.Count; i += chunkSize) + { + yield return list.GetRange(i, Math.Min(chunkSize, list.Count - i)); + } + } + + /// + /// Utility function to flatten a conversion result that might have nested lists of objects. + /// This happens, for example, in the case of multiple display value fallbacks for a given object. + /// + /// + /// Assuming native objects are not inherited from IList. + /// + /// Object to flatten + /// Flattened objects after to host. + public static List FlattenToHostConversionResult(object item) + { + List convertedList = new(); + Stack stack = new(); + stack.Push(item); + + while (stack.Count > 0) + { + object current = stack.Pop(); + if (current is IList list) + { + foreach (object subItem in list) + { + stack.Push(subItem); + } + } + else + { + convertedList.Add(current); + } + } + + return convertedList; + } +} diff --git a/src/Speckle.Core/Serialisation/BaseObjectDeserializerV2.cs b/src/Speckle.Core/Serialisation/BaseObjectDeserializerV2.cs new file mode 100644 index 00000000..908a4b1f --- /dev/null +++ b/src/Speckle.Core/Serialisation/BaseObjectDeserializerV2.cs @@ -0,0 +1,403 @@ +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.IO; +using System.Reflection; +using System.Threading; +using System.Threading.Tasks; +using Speckle.Core.Logging; +using Speckle.Core.Models; +using Speckle.Core.Serialisation.SerializationUtilities; +using Speckle.Core.Transports; +using Speckle.Newtonsoft.Json; +using Speckle.Newtonsoft.Json.Linq; + +namespace Speckle.Core.Serialisation; + +public sealed class BaseObjectDeserializerV2 +{ + private bool _isBusy; + private readonly object _callbackLock = new(); + + // id -> Base if already deserialized or id -> Task if was handled by a bg thread + private Dictionary? _deserializedObjects; + + /// + /// Property that describes the type of the object. + /// + private const string TYPE_DISCRIMINATOR = nameof(Base.speckle_type); + + private DeserializationWorkerThreads? _workerThreads; + + public CancellationToken CancellationToken { get; set; } + + /// + /// The sync transport. This transport will be used synchronously. + /// + public ITransport ReadTransport { get; set; } + + public Action? OnProgressAction { get; set; } + + public string? BlobStorageFolder { get; set; } + public TimeSpan Elapsed { get; private set; } + + public static int DefaultNumberThreads => Math.Min(Environment.ProcessorCount, 6); //6 threads seems the sweet spot, see performance test project + public int WorkerThreadCount { get; set; } = DefaultNumberThreads; + + /// The JSON string of the object to be deserialized + /// A typed object deserialized from the + /// Thrown when + /// was null + /// cannot be deserialised to type + // /// did not contain the required json objects (closures) + public Base Deserialize(string rootObjectJson) + { + if (_isBusy) + { + throw new InvalidOperationException( + "A deserializer instance can deserialize only 1 object at a time. Consider creating multiple deserializer instances" + ); + } + + try + { + _isBusy = true; + var stopwatch = Stopwatch.StartNew(); + _deserializedObjects = new(); + _workerThreads = new DeserializationWorkerThreads(this, WorkerThreadCount); + _workerThreads.Start(); + + List<(string, int)> closures = GetClosures(rootObjectJson); + closures.Sort((a, b) => b.Item2.CompareTo(a.Item2)); + foreach (var closure in closures) + { + string objId = closure.Item1; + // pausing for getting object from the transport + stopwatch.Stop(); + string? objJson = ReadTransport.GetObject(objId); + + //TODO: We should fail loudly when a closure can't be found (objJson is null) + //but adding throw here breaks blobs tests, see CNX-8541 + + stopwatch.Start(); + object? deserializedOrPromise = DeserializeTransportObjectProxy(objJson); + lock (_deserializedObjects) + { + _deserializedObjects[objId] = deserializedOrPromise; + } + } + + object? ret; + try + { + ret = DeserializeTransportObject(rootObjectJson); + } + catch (JsonReaderException ex) + { + throw new SpeckleDeserializeException("Failed to deserialize json", ex); + } + + stopwatch.Stop(); + Elapsed += stopwatch.Elapsed; + if (ret is not Base b) + { + throw new SpeckleDeserializeException( + $"Expected {nameof(rootObjectJson)} to be deserialized to type {nameof(Base)} but was {ret}" + ); + } + + return b; + } + finally + { + _deserializedObjects = null; + _workerThreads?.Dispose(); + _workerThreads = null; + _isBusy = false; + } + } + + private List<(string, int)> GetClosures(string rootObjectJson) + { + try + { + List<(string, int)> closureList = new(); + JObject doc1 = JObject.Parse(rootObjectJson); + + if (!doc1.ContainsKey("__closure")) + { + return new List<(string, int)>(); + } + + foreach (JToken prop in doc1["__closure"]) + { + string childId = ((JProperty)prop).Name; + int childMinDepth = (int)((JProperty)prop).Value; + closureList.Add((childId, childMinDepth)); + } + return closureList; + } + catch (Exception ex) when (!ex.IsFatal()) + { + return new List<(string, int)>(); + } + } + + private object? DeserializeTransportObjectProxy(string objectJson) + { + // Try background work + Task? bgResult = _workerThreads!.TryStartTask(WorkerThreadTaskType.Deserialize, objectJson); //BUG: Because we don't guarantee this task will ever be awaited, this may lead to unobserved exceptions! + if (bgResult != null) + { + return bgResult; + } + + // SyncS + return DeserializeTransportObject(objectJson); + } + + /// + /// The deserialized object + /// was null + /// was not valid JSON + /// Failed to deserialize to the target type + public object? DeserializeTransportObject(string objectJson) + { + if (objectJson is null) + { + throw new ArgumentNullException(nameof(objectJson), $"Cannot deserialize {nameof(objectJson)}, value was null"); + } + // Apparently this automatically parses DateTimes in strings if it matches the format: + // JObject doc1 = JObject.Parse(objectJson); + + // This is equivalent code that doesn't parse datetimes: + JObject doc1; + using (JsonReader reader = new JsonTextReader(new StringReader(objectJson))) + { + reader.DateParseHandling = DateParseHandling.None; + doc1 = JObject.Load(reader); + } + + object? converted; + try + { + converted = ConvertJsonElement(doc1); + } + catch (Exception ex) when (!ex.IsFatal() && ex is not OperationCanceledException) + { + throw new SpeckleDeserializeException($"Failed to deserialize {doc1} as {doc1.Type}", ex); + } + + lock (_callbackLock) + { + OnProgressAction?.Invoke("DS", 1); + } + + return converted; + } + + public object? ConvertJsonElement(JToken doc) + { + CancellationToken.ThrowIfCancellationRequested(); + + switch (doc.Type) + { + case JTokenType.Undefined: + case JTokenType.Null: + case JTokenType.None: + return null; + case JTokenType.Boolean: + return (bool)doc; + case JTokenType.Integer: + try + { + return (long)doc; + } + catch (OverflowException ex) + { + var v = (object)(double)doc; + SpeckleLog.Logger.Debug( + ex, + "Json property {tokenType} failed to deserialize {value} to {targetType}, will be deserialized as {fallbackType}", + doc.Type, + v, + typeof(long), + typeof(double) + ); + return v; + } + case JTokenType.Float: + return (double)doc; + case JTokenType.String: + return (string?)doc; + case JTokenType.Date: + return (DateTime)doc; + case JTokenType.Array: + JArray docAsArray = (JArray)doc; + List jsonList = new(docAsArray.Count); + int retListCount = 0; + foreach (JToken value in docAsArray) + { + object? convertedValue = ConvertJsonElement(value); + retListCount += convertedValue is DataChunk chunk ? chunk.data.Count : 1; + jsonList.Add(convertedValue); + } + + List retList = new(retListCount); + foreach (object? jsonObj in jsonList) + { + if (jsonObj is DataChunk chunk) + { + retList.AddRange(chunk.data); + } + else + { + retList.Add(jsonObj); + } + } + + return retList; + case JTokenType.Object: + var jObject = (JContainer)doc; + Dictionary dict = new(jObject.Count); + + foreach (JToken propJToken in jObject) + { + JProperty prop = (JProperty)propJToken; + if (prop.Name == "__closure") + { + continue; + } + + dict[prop.Name] = ConvertJsonElement(prop.Value); + } + + if (!dict.TryGetValue(TYPE_DISCRIMINATOR, out object? speckleType)) + { + return dict; + } + + if (speckleType as string == "reference" && dict.TryGetValue("referencedId", out object? referencedId)) + { + var objId = (string)referencedId!; + object? deserialized = null; + lock (_deserializedObjects) + { + if (_deserializedObjects.TryGetValue(objId, out object? o)) + { + deserialized = o; + } + } + + if (deserialized is Task task) + { + try + { + deserialized = task.Result; + } + catch (AggregateException ex) + { + throw new SpeckleDeserializeException("Failed to deserialize reference object", ex); + } + lock (_deserializedObjects) + { + _deserializedObjects[objId] = deserialized; + } + } + + if (deserialized != null) + { + return deserialized; + } + + // This reference was not already deserialized. Do it now in sync mode + string? objectJson = ReadTransport.GetObject(objId); + if (objectJson is null) + { + throw new TransportException($"Failed to fetch object id {objId} from {ReadTransport} "); + } + + deserialized = DeserializeTransportObject(objectJson); + + lock (_deserializedObjects) + { + _deserializedObjects[objId] = deserialized; + } + + return deserialized; + } + + return Dict2Base(dict); + default: + throw new ArgumentException("Json value not supported: " + doc.Type, nameof(doc)); + } + } + + private Base Dict2Base(Dictionary dictObj) + { + string typeName = (string)dictObj[TYPE_DISCRIMINATOR]!; + Type type = BaseObjectSerializationUtilities.GetType(typeName); + Base baseObj = (Base)Activator.CreateInstance(type); + + dictObj.Remove(TYPE_DISCRIMINATOR); + dictObj.Remove("__closure"); + + Dictionary staticProperties = BaseObjectSerializationUtilities.GetTypeProperties(typeName); + List onDeserializedCallbacks = BaseObjectSerializationUtilities.GetOnDeserializedCallbacks(typeName); + + foreach (var entry in dictObj) + { + string lowerPropertyName = entry.Key.ToLower(); + if (staticProperties.TryGetValue(lowerPropertyName, out PropertyInfo? value) && value.CanWrite) + { + PropertyInfo property = staticProperties[lowerPropertyName]; + if (entry.Value == null) + { + // Check for JsonProperty(NullValueHandling = NullValueHandling.Ignore) attribute + JsonPropertyAttribute attr = property.GetCustomAttribute(true); + if (attr != null && attr.NullValueHandling == NullValueHandling.Ignore) + { + continue; + } + } + + Type targetValueType = property.PropertyType; + bool conversionOk = ValueConverter.ConvertValue(targetValueType, entry.Value, out object? convertedValue); + if (conversionOk) + { + property.SetValue(baseObj, convertedValue); + } + else + { + // Cannot convert the value in the json to the static property type + throw new SpeckleDeserializeException( + $"Cannot deserialize {entry.Value?.GetType().FullName} to {targetValueType.FullName}" + ); + } + } + else + { + // No writable property with this name + CallSiteCache.SetValue(entry.Key, baseObj, entry.Value); + } + } + + if (baseObj is Blob bb && BlobStorageFolder != null) + { + bb.filePath = bb.GetLocalDestinationPath(BlobStorageFolder); + } + + foreach (MethodInfo onDeserialized in onDeserializedCallbacks) + { + onDeserialized.Invoke(baseObj, new object?[] { null }); + } + + return baseObj; + } + + [Obsolete("Use nameof(Base.speckle_type)")] + public string TypeDiscriminator => TYPE_DISCRIMINATOR; + + [Obsolete("OnErrorAction unused, deserializer will throw exceptions instead")] + public Action? OnErrorAction { get; set; } +} diff --git a/src/Speckle.Core/Serialisation/BaseObjectSerializer.cs b/src/Speckle.Core/Serialisation/BaseObjectSerializer.cs new file mode 100644 index 00000000..418df0b7 --- /dev/null +++ b/src/Speckle.Core/Serialisation/BaseObjectSerializer.cs @@ -0,0 +1,719 @@ +#nullable disable +using System; +using System.Collections; +using System.Collections.Generic; +using System.Runtime.Serialization; +using System.Threading; +using Speckle.Core.Helpers; +using Speckle.Core.Logging; +using Speckle.Core.Models; +using Speckle.Core.Serialisation.SerializationUtilities; +using Speckle.Core.Transports; +using Speckle.Newtonsoft.Json; +using Speckle.Newtonsoft.Json.Linq; +using Speckle.Newtonsoft.Json.Serialization; +using Utilities = Speckle.Core.Models.Utilities; + +// ReSharper disable InconsistentNaming +// ReSharper disable UseNegatedPatternInIsExpression +#pragma warning disable IDE0075, IDE1006, IDE0083, CA1051, CA1502, CA1854 + +namespace Speckle.Core.Serialisation; + +/// +/// Json converter that handles base speckle objects. Enables detachment and +/// simultaneous transport (persistence) of objects. +/// +[Obsolete("Use " + nameof(BaseObjectSerializerV2))] +public class BaseObjectSerializer : JsonConverter +{ + /// + /// Property that describes the type of the object. + /// + public string TypeDiscriminator = "speckle_type"; + + public BaseObjectSerializer() + { + ResetAndInitialize(); + } + + public CancellationToken CancellationToken { get; set; } + + /// + /// The sync transport. This transport will be used synchronously. + /// + public ITransport ReadTransport { get; set; } + + /// + /// List of transports to write to. + /// + public List WriteTransports { get; set; } = new(); + + public override bool CanWrite => true; + + public override bool CanRead => true; + + public Action OnProgressAction { get; set; } + + public Action OnErrorAction { get; set; } + + /// + /// Reinitializes the lineage, and other variables that get used during the + /// json writing process. + /// + public void ResetAndInitialize() + { + DetachLineage = new List(); + Lineage = new List(); + RefMinDepthTracker = new Dictionary>(); + OnProgressAction = null; + TotalProcessedCount = 0; + } + + public override bool CanConvert(Type objectType) + { + return true; + } + + #region Read Json + + public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer) + { + if (CancellationToken.IsCancellationRequested) + { + return null; // Check for cancellation + } + + if (reader.TokenType == JsonToken.Null) + { + return null; + } + + // Check if we passed in an array, rather than an object. + // TODO: Test the following branch. It's not used anywhere at the moment, and the default serializer prevents it from + // ever being used (only allows single object serialization) + if (reader.TokenType == JsonToken.StartArray) + { + var list = new List(); + var jarr = JArray.Load(reader); + + foreach (var val in jarr) + { + if (CancellationToken.IsCancellationRequested) + { + return null; // Check for cancellation + } + + var whatever = BaseObjectSerializationUtilities.HandleValue(val, serializer, CancellationToken); + list.Add(whatever as Base); + } + return list; + } + + if (CancellationToken.IsCancellationRequested) + { + return null; // Check for cancellation + } + + var jObject = JObject.Load(reader); + + if (jObject == null) + { + return null; + } + + var objType = jObject.GetValue(TypeDiscriminator); + + // Assume dictionary! + if (objType == null) + { + var dict = new Dictionary(); + + foreach (var val in jObject) + { + if (CancellationToken.IsCancellationRequested) + { + return null; // Check for cancellation + } + + dict[val.Key] = BaseObjectSerializationUtilities.HandleValue(val.Value, serializer, CancellationToken); + } + return dict; + } + + if (CancellationToken.IsCancellationRequested) + { + return null; // Check for cancellation + } + + var discriminator = objType.Value(); + + // Check for references. + if (discriminator == "reference") + { + var id = jObject.GetValue("referencedId").Value(); + + string str = + ReadTransport != null + ? ReadTransport.GetObject(id) + : throw new SpeckleException("Cannot resolve reference, no transport is defined."); + + if (str != null && !string.IsNullOrEmpty(str)) + { + jObject = JObject.Parse(str); + discriminator = jObject.GetValue(TypeDiscriminator).Value(); + } + else + { + throw new SpeckleException("Cannot resolve reference. The provided transport could not find it."); + } + } + + var type = BaseObjectSerializationUtilities.GetType(discriminator); + var obj = existingValue ?? Activator.CreateInstance(type); + + var contract = (JsonDynamicContract)serializer.ContractResolver.ResolveContract(type); + var used = new HashSet(); + + // remove unsettable properties + jObject.Remove(TypeDiscriminator); + jObject.Remove("__closure"); + + if (CancellationToken.IsCancellationRequested) + { + return null; // Check for cancellation + } + + foreach (var jProperty in jObject.Properties()) + { + if (CancellationToken.IsCancellationRequested) + { + return null; // Check for cancellation + } + + if (used.Contains(jProperty.Name)) + { + continue; + } + + used.Add(jProperty.Name); + + // first attempt to find a settable property, otherwise fall back to a dynamic set without type + JsonProperty property = contract.Properties.GetClosestMatchProperty(jProperty.Name); + + if (property != null && property.Writable) + { + if (type == typeof(Abstract) && property.PropertyName == "base") + { + var propertyValue = BaseObjectSerializationUtilities.HandleAbstractOriginalValue( + jProperty.Value, + ((JValue)jObject.GetValue("assemblyQualifiedName")).Value as string + ); + property.ValueProvider.SetValue(obj, propertyValue); + } + else + { + var val = BaseObjectSerializationUtilities.HandleValue( + jProperty.Value, + serializer, + CancellationToken, + property + ); + property.ValueProvider.SetValue(obj, val); + } + } + else + { + // dynamic properties + CallSiteCache.SetValue( + jProperty.Name, + obj, + BaseObjectSerializationUtilities.HandleValue(jProperty.Value, serializer, CancellationToken) + ); + } + } + + if (CancellationToken.IsCancellationRequested) + { + return null; // Check for cancellation + } + + TotalProcessedCount++; + OnProgressAction?.Invoke("DS", 1); + + foreach (var callback in contract.OnDeserializedCallbacks) + { + callback(obj, serializer.Context); + } + + return obj; + } + + #endregion + + #region Write Json Helper Properties + + /// + /// Keeps track of wether current property pointer is marked for detachment. + /// + private List DetachLineage { get; set; } + + /// + /// Keeps track of the hash chain through the object tree. + /// + private List Lineage { get; set; } + + /// + /// Dictionary of object if and its subsequent closure table (a dictionary of hashes and min depth at which they are found). + /// + private Dictionary> RefMinDepthTracker { get; set; } + + public int TotalProcessedCount; + + #endregion + + #region Write Json + + // Keeps track of the actual tree structure of the objects being serialised. + // These tree references will thereafter be stored in the __tree prop. + private void TrackReferenceInTree(string refId) + { + // Help with creating closure table entries. + for (int i = 0; i < Lineage.Count; i++) + { + var parent = Lineage[i]; + + if (!RefMinDepthTracker.ContainsKey(parent)) + { + RefMinDepthTracker[parent] = new Dictionary(); + } + + if (!RefMinDepthTracker[parent].ContainsKey(refId)) + { + RefMinDepthTracker[parent][refId] = Lineage.Count - i; + } + else if (RefMinDepthTracker[parent][refId] > Lineage.Count - i) + { + RefMinDepthTracker[parent][refId] = Lineage.Count - i; + } + } + } + + private bool FirstEntry = true, + FirstEntryWasListOrDict; + + // While this function looks complicated, it's actually quite smooth: + // The important things to remember is that serialization goes depth first: + // The first object to get fully serialised is the first nested one, with + // the parent object being last. + public override void WriteJson(JsonWriter writer, object value, JsonSerializer serializer) + { + writer.Formatting = serializer.Formatting; + if (CancellationToken.IsCancellationRequested) + { + return; // Check for cancellation + } + + ///////////////////////////////////// + // Path one: nulls + ///////////////////////////////////// + + if (value == null) + { + return; + } + + ///////////////////////////////////// + // Path two: primitives (string, bool, int, etc) + ///////////////////////////////////// + + if (value.GetType().IsPrimitive || value is string) + { + FirstEntry = false; + writer.WriteValue(value); + //var t = JToken.FromObject(value); // bypasses this converter as we do not pass in the serializer + //t.WriteTo(writer); + return; + } + + ///////////////////////////////////// + // Path three: Bases + ///////////////////////////////////// + + if (value is Base && !(value is ObjectReference)) + { + if (CancellationToken.IsCancellationRequested) + { + return; // Check for cancellation + } + + var obj = value as Base; + + FirstEntry = false; + //TotalProcessedCount++; + + // Append to lineage tracker + Lineage.Add(Guid.NewGuid().ToString()); + + var jo = new JObject(); + var propertyNames = obj.GetDynamicMemberNames(); + + var contract = (JsonDynamicContract)serializer.ContractResolver.ResolveContract(value.GetType()); + + // Iterate through the object's properties, one by one, checking for ignored ones + foreach (var prop in propertyNames) + { + if (CancellationToken.IsCancellationRequested) + { + return; // Check for cancellation + } + // Ignore properties starting with a double underscore. + if (prop.StartsWith("__")) + { + continue; + } + + if (prop == "id") + { + continue; + } + + var property = contract.Properties.GetClosestMatchProperty(prop); + + // Ignore properties decorated with [JsonIgnore]. + if (property != null && property.Ignored) + { + continue; + } + + // Ignore nulls + object propValue = obj[prop]; + if (propValue == null) + { + continue; + } + + // Check if this property is marked for detachment: either by the presence of "@" at the beginning of the name, or by the presence of a DetachProperty attribute on a typed property. + if (property != null) + { + var detachableAttributes = property.AttributeProvider.GetAttributes(typeof(DetachProperty), true); + if (detachableAttributes.Count > 0) + { + DetachLineage.Add(((DetachProperty)detachableAttributes[0]).Detachable); + } + else + { + DetachLineage.Add(false); + } + + var chunkableAttributes = property.AttributeProvider.GetAttributes(typeof(Chunkable), true); + if (chunkableAttributes.Count > 0) + { + //DetachLineage.Add(true); // NOOPE + serializer.Context = new StreamingContext(StreamingContextStates.Other, chunkableAttributes[0]); + } + else + { + //DetachLineage.Add(false); + serializer.Context = new StreamingContext(); + } + } + else if (prop.StartsWith("@")) // Convention check for dynamically added properties. + { + DetachLineage.Add(true); + + var chunkSyntax = Constants.ChunkPropertyNameRegex; + + if (chunkSyntax.IsMatch(prop)) + { + var match = chunkSyntax.Match(prop); + _ = int.TryParse(match.Groups[match.Groups.Count - 1].Value, out int chunkSize); + serializer.Context = new StreamingContext( + StreamingContextStates.Other, + chunkSize > 0 ? new Chunkable(chunkSize) : new Chunkable() + ); + } + else + { + serializer.Context = new StreamingContext(); + } + } + else + { + DetachLineage.Add(false); + } + + // Set and store a reference, if it is marked as detachable and the transport is not null. + if ( + WriteTransports != null + && WriteTransports.Count != 0 + && propValue is Base + && DetachLineage[DetachLineage.Count - 1] + ) + { + var what = JToken.FromObject(propValue, serializer); // Trigger next. + + if (CancellationToken.IsCancellationRequested) + { + return; // Check for cancellation + } + + if (what == null) + { + return; // HACK: Prevent nulls from borking our serialization on nested schema object refs. (i.e. Line has @SchemaObject, that has ref to line) + } + + var refHash = ((JObject)what).GetValue("id").ToString(); + + var reference = new ObjectReference { referencedId = refHash }; + TrackReferenceInTree(refHash); + jo.Add(prop, JToken.FromObject(reference)); + } + else + { + jo.Add(prop, JToken.FromObject(propValue, serializer)); // Default route + } + + // Pop detach lineage. If you don't get this, remember this thing moves ONLY FORWARD, DEPTH FIRST + DetachLineage.RemoveAt(DetachLineage.Count - 1); + // Refresh the streaming context to remove chunking flag + serializer.Context = new StreamingContext(); + } + + // Check if we actually have any transports present that would warrant a + if ( + WriteTransports != null + && WriteTransports.Count != 0 + && RefMinDepthTracker.ContainsKey(Lineage[Lineage.Count - 1]) + ) + { + jo.Add("__closure", JToken.FromObject(RefMinDepthTracker[Lineage[Lineage.Count - 1]])); + } + + var hash = Utilities.HashString(jo.ToString()); + if (!jo.ContainsKey("id")) + { + jo.Add("id", JToken.FromObject(hash)); + } + + jo.WriteTo(writer); + + if ( + (DetachLineage.Count == 0 || DetachLineage[DetachLineage.Count - 1]) + && WriteTransports != null + && WriteTransports.Count != 0 + ) + { + var objString = jo.ToString(writer.Formatting); + var objId = jo["id"].Value(); + + OnProgressAction?.Invoke("S", 1); + + foreach (var transport in WriteTransports) + { + if (CancellationToken.IsCancellationRequested) + { + continue; // Check for cancellation + } + + transport.SaveObject(objId, objString); + } + } + + // Pop lineage tracker + Lineage.RemoveAt(Lineage.Count - 1); + return; + } + + ///////////////////////////////////// + // Path four: lists/arrays & dicts + ///////////////////////////////////// + + if (CancellationToken.IsCancellationRequested) + { + return; // Check for cancellation + } + + var type = value.GetType(); + + // TODO: List handling and dictionary serialisation handling can be sped up significantly if we first check by their inner type. + // This handles a broader case in which we are, essentially, checking only for object[] or List / Dictionary cases. + // A much faster approach is to check for List, where primitive = string, number, etc. and directly serialize it in full. + // Same goes for dictionaries. + if ( + typeof(IEnumerable).IsAssignableFrom(type) + && !typeof(IDictionary).IsAssignableFrom(type) + && type != typeof(string) + ) + { + if (TotalProcessedCount == 0 && FirstEntry) + { + FirstEntry = false; + FirstEntryWasListOrDict = true; + TotalProcessedCount += 1; + DetachLineage.Add(WriteTransports != null && WriteTransports.Count != 0 ? true : false); + } + + JArray arr = new(); + + // Chunking large lists into manageable parts. + if (DetachLineage[DetachLineage.Count - 1] && serializer.Context.Context is Chunkable chunkInfo) + { + var maxCount = chunkInfo.MaxObjCountPerChunk; + var i = 0; + var chunkList = new List(); + var currChunk = new DataChunk(); + + foreach (var arrValue in (IEnumerable)value) + { + if (i == maxCount) + { + if (currChunk.data.Count != 0) + { + chunkList.Add(currChunk); + } + + currChunk = new DataChunk(); + i = 0; + } + currChunk.data.Add(arrValue); + i++; + } + + if (currChunk.data.Count != 0) + { + chunkList.Add(currChunk); + } + + value = chunkList; + } + + foreach (var arrValue in (IEnumerable)value) + { + if (CancellationToken.IsCancellationRequested) + { + return; // Check for cancellation + } + + if (arrValue == null) + { + continue; + } + + if ( + WriteTransports != null + && WriteTransports.Count != 0 + && arrValue is Base + && DetachLineage[DetachLineage.Count - 1] + ) + { + var what = JToken.FromObject(arrValue, serializer); // Trigger next + + var refHash = ((JObject)what).GetValue("id").ToString(); + + var reference = new ObjectReference { referencedId = refHash }; + TrackReferenceInTree(refHash); + arr.Add(JToken.FromObject(reference)); + } + else + { + arr.Add(JToken.FromObject(arrValue, serializer)); // Default route + } + } + + if (CancellationToken.IsCancellationRequested) + { + return; // Check for cancellation + } + + arr.WriteTo(writer); + + if (DetachLineage.Count == 1 && FirstEntryWasListOrDict) // are we in a list entry point case? + { + DetachLineage.RemoveAt(0); + } + + return; + } + + if (CancellationToken.IsCancellationRequested) + { + return; // Check for cancellation + } + + if (typeof(IDictionary).IsAssignableFrom(type)) + { + if (TotalProcessedCount == 0 && FirstEntry) + { + FirstEntry = false; + FirstEntryWasListOrDict = true; + TotalProcessedCount += 1; + DetachLineage.Add(WriteTransports != null && WriteTransports.Count != 0 ? true : false); + } + var dict = value as IDictionary; + var dictJo = new JObject(); + foreach (DictionaryEntry kvp in dict) + { + if (CancellationToken.IsCancellationRequested) + { + return; // Check for cancellation + } + + if (kvp.Value == null) + { + continue; + } + + JToken jToken; + if ( + WriteTransports != null + && WriteTransports.Count != 0 + && kvp.Value is Base + && DetachLineage[DetachLineage.Count - 1] + ) + { + var what = JToken.FromObject(kvp.Value, serializer); // Trigger next + var refHash = ((JObject)what).GetValue("id").ToString(); + + var reference = new ObjectReference { referencedId = refHash }; + TrackReferenceInTree(refHash); + jToken = JToken.FromObject(reference); + } + else + { + jToken = JToken.FromObject(kvp.Value, serializer); // Default route + } + dictJo.Add(kvp.Key.ToString(), jToken); + } + dictJo.WriteTo(writer); + + if (CancellationToken.IsCancellationRequested) + { + return; // Check for cancellation + } + + if (DetachLineage.Count == 1 && FirstEntryWasListOrDict) // are we in a dictionary entry point case? + { + DetachLineage.RemoveAt(0); + } + + return; + } + + ///////////////////////////////////// + // Path five: everything else (enums?) + ///////////////////////////////////// + + if (CancellationToken.IsCancellationRequested) + { + return; // Check for cancellation + } + + FirstEntry = false; + var lastCall = JToken.FromObject(value); // bypasses this converter as we do not pass in the serializer + lastCall.WriteTo(writer); + } + + #endregion +} +#pragma warning restore IDE0075, IDE1006, IDE0083, CA1051, CA1502, CA1854 diff --git a/src/Speckle.Core/Serialisation/BaseObjectSerializerV2.cs b/src/Speckle.Core/Serialisation/BaseObjectSerializerV2.cs new file mode 100644 index 00000000..5e63b5ec --- /dev/null +++ b/src/Speckle.Core/Serialisation/BaseObjectSerializerV2.cs @@ -0,0 +1,535 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using System.Diagnostics; +using System.DoubleNumerics; +using System.Drawing; +using System.Globalization; +using System.Linq; +using System.Reflection; +using System.Threading; +using Speckle.Core.Helpers; +using Speckle.Core.Logging; +using Speckle.Core.Models; +using Speckle.Core.Transports; +using Speckle.Newtonsoft.Json; +using Utilities = Speckle.Core.Models.Utilities; + +namespace Speckle.Core.Serialisation; + +public class BaseObjectSerializerV2 +{ + private readonly Stopwatch _stopwatch = new(); + private volatile bool _isBusy; + private List> _parentClosures = new(); + private HashSet _parentObjects = new(); + private readonly Dictionary> _typedPropertiesCache = new(); + private readonly Action? _onProgressAction; + + private readonly bool _trackDetachedChildren; + + /// + /// Keeps track of all detached children created during serialisation that have an applicationId (provided this serializer instance has been told to track detached children). + /// This is currently used to cache previously converted objects and avoid their conversion if they haven't changed. See the DUI3 send bindings in rhino or another host app. + /// + public Dictionary ObjectReferences { get; } = new(); + + /// The sync transport. This transport will be used synchronously. + public IReadOnlyCollection WriteTransports { get; } + + public CancellationToken CancellationToken { get; set; } + + /// The current total elapsed time spent serializing + public TimeSpan Elapsed => _stopwatch.Elapsed; + + public BaseObjectSerializerV2() + : this(Array.Empty()) { } + + /// + /// Creates a new Serializer instance. + /// + /// The transports detached children should be persisted to. + /// Used to track progress. + /// Whether to store all detachable objects while serializing. They can be retrieved via post serialization. + /// + public BaseObjectSerializerV2( + IReadOnlyCollection writeTransports, + Action? onProgressAction = null, + bool trackDetachedChildren = false, + CancellationToken cancellationToken = default + ) + { + WriteTransports = writeTransports; + _onProgressAction = onProgressAction; + CancellationToken = cancellationToken; + _trackDetachedChildren = trackDetachedChildren; + } + + /// The object to serialize + /// The serialized JSON + /// The serializer is busy (already serializing an object) + /// Failed to save object in one or more + /// Failed to extract (pre-serialize) properties from the + /// One or more 's cancellation token requested cancel + public string Serialize(Base baseObj) + { + if (_isBusy) + { + throw new InvalidOperationException( + "A serializer instance can serialize only 1 object at a time. Consider creating multiple serializer instances" + ); + } + + try + { + _stopwatch.Start(); + _isBusy = true; + IDictionary converted; + try + { + converted = PreserializeBase(baseObj, true)!; + } + catch (Exception ex) when (!ex.IsFatal()) + { + throw new SpeckleSerializeException($"Failed to extract (pre-serialize) properties from the {baseObj}", ex); + } + string serialized = Dict2Json(converted); + StoreObject((string)converted["id"]!, serialized); + return serialized; + } + finally + { + _parentClosures = new List>(); // cleanup in case of exceptions + _parentObjects = new HashSet(); + _isBusy = false; + _stopwatch.Stop(); + } + } + + // `Preserialize` means transforming all objects into the final form that will appear in json, with basic .net objects + // (primitives, lists and dictionaries with string keys) + public object? PreserializeObject( + object? obj, + bool computeClosures = false, + PropertyAttributeInfo inheritedDetachInfo = default + ) + { + CancellationToken.ThrowIfCancellationRequested(); + + if (obj == null) + { + return null; + } + + if (obj.GetType().IsPrimitive || obj is string) + { + return obj; + } + + switch (obj) + { + // Start with object references so they're not captured by the Base class case below + // Note: this change was needed as we've made the ObjectReference type inherit from Base for + // the purpose of the "do not convert unchanged previously converted objects" POC. + case ObjectReference r: + { + Dictionary ret = + new() + { + ["speckle_type"] = r.speckle_type, + ["referencedId"] = r.referencedId, + ["__closure"] = r.closure + }; + if (r.closure is not null) + { + foreach (var kvp in r.closure) + { + UpdateParentClosures(kvp.Key); + } + } + UpdateParentClosures(r.referencedId); + return ret; + } + // Complex enough to deserve its own function + case Base b: + return PreserializeBase(b, computeClosures, inheritedDetachInfo); + case IDictionary d: + { + Dictionary ret = new(d.Count); + foreach (DictionaryEntry kvp in d) + { + object? converted = PreserializeObject(kvp.Value, inheritedDetachInfo: inheritedDetachInfo); + if (converted != null) + { + ret[kvp.Key.ToString()] = converted; + } + } + return ret; + } + case IEnumerable e: + { + //TODO: handle IReadonlyDictionary + int preSize = (e is IList list) ? list.Count : 0; + + List ret = new(preSize); + + foreach (object? element in e) + { + ret.Add(PreserializeObject(element, inheritedDetachInfo: inheritedDetachInfo)); + } + + return ret; + } + case Enum: + return (int)obj; + // Support for simple types + case Guid g: + return g.ToString(); + case Color c: + return c.ToArgb(); + case DateTime t: + return t.ToString("o", CultureInfo.InvariantCulture); + case Matrix4x4 md: + return new List + { + md.M11, + md.M12, + md.M13, + md.M14, + md.M21, + md.M22, + md.M23, + md.M24, + md.M31, + md.M32, + md.M33, + md.M34, + md.M41, + md.M42, + md.M43, + md.M44 + }; + //BACKWARDS COMPATIBILITY: matrix4x4 changed from System.Numerics float to System.DoubleNumerics double in release 2.16 + case System.Numerics.Matrix4x4 ms: + SpeckleLog.Logger.Warning( + "This kept for backwards compatibility, no one should be using {this}", + "BaseObjectSerializerV2 serialize System.Numerics.Matrix4x4" + ); + return new List + { + ms.M11, + ms.M12, + ms.M13, + ms.M14, + ms.M21, + ms.M22, + ms.M23, + ms.M24, + ms.M31, + ms.M32, + ms.M33, + ms.M34, + ms.M41, + ms.M42, + ms.M43, + ms.M44 + }; + default: + throw new ArgumentException($"Unsupported value in serialization: {obj.GetType()}"); + } + } + + public IDictionary? PreserializeBase( + Base baseObj, + bool computeClosures = false, + PropertyAttributeInfo inheritedDetachInfo = default + ) + { + // handle circular references + bool alreadySerialized = !_parentObjects.Add(baseObj); + if (alreadySerialized) + { + return null; + } + + Dictionary convertedBase = new(); + Dictionary closure = new(); + if (computeClosures || inheritedDetachInfo.IsDetachable || baseObj is Blob) + { + _parentClosures.Add(closure); + } + + List<(PropertyInfo, PropertyAttributeInfo)> typedProperties = GetTypedPropertiesWithCache(baseObj); + IEnumerable dynamicProperties = baseObj.GetDynamicMembers(); + + // propertyName -> (originalValue, isDetachable, isChunkable, chunkSize) + Dictionary allProperties = new(); + + // Construct `allProperties`: Add typed properties + foreach ((PropertyInfo propertyInfo, PropertyAttributeInfo detachInfo) in typedProperties) + { + object baseValue = propertyInfo.GetValue(baseObj); + allProperties[propertyInfo.Name] = (baseValue, detachInfo); + } + + // Construct `allProperties`: Add dynamic properties + foreach (string propName in dynamicProperties) + { + if (propName.StartsWith("__")) + { + continue; + } + + object? baseValue = baseObj[propName]; + bool isDetachable = propName.StartsWith("@"); + bool isChunkable = false; + int chunkSize = 1000; + + if (Constants.ChunkPropertyNameRegex.IsMatch(propName)) + { + var match = Constants.ChunkPropertyNameRegex.Match(propName); + isChunkable = int.TryParse(match.Groups[match.Groups.Count - 1].Value, out chunkSize); + } + allProperties[propName] = (baseValue, new PropertyAttributeInfo(isDetachable, isChunkable, chunkSize, null)); + } + + // Convert all properties + foreach (var prop in allProperties) + { + object? convertedValue = PreserializeBasePropertyValue(prop.Value.Item1, prop.Value.Item2); + + if ( + convertedValue == null && prop.Value.Item2.JsonPropertyInfo is { NullValueHandling: NullValueHandling.Ignore } + ) + { + continue; + } + + convertedBase[prop.Key] = convertedValue; + } + + convertedBase["id"] = baseObj is Blob blob ? blob.id : ComputeId(convertedBase); + + if (closure.Count > 0) + { + convertedBase["__closure"] = closure; + } + + if (computeClosures || inheritedDetachInfo.IsDetachable || baseObj is Blob) + { + _parentClosures.RemoveAt(_parentClosures.Count - 1); + } + + _parentObjects.Remove(baseObj); + + if (baseObj is Blob myBlob) + { + StoreBlob(myBlob); + UpdateParentClosures($"blob:{convertedBase["id"]}"); + return convertedBase; + } + + if (inheritedDetachInfo.IsDetachable && WriteTransports.Count > 0) + { + string json = Dict2Json(convertedBase); + string id = (string)convertedBase["id"]!; + StoreObject(id, json); + ObjectReference objRef = new() { referencedId = id }; + var objRefConverted = (IDictionary?)PreserializeObject(objRef); + UpdateParentClosures(id); + _onProgressAction?.Invoke("S", 1); + + // add to obj refs to return + if (baseObj.applicationId != null && _trackDetachedChildren) // && baseObj is not DataChunk && baseObj is not Abstract) // not needed, as data chunks will never have application ids, and abstract objs are not really used. + { + ObjectReferences[baseObj.applicationId] = new ObjectReference() + { + referencedId = id, + applicationId = baseObj.applicationId, + closure = closure + }; + } + + return objRefConverted; + } + return convertedBase; + } + + private object? PreserializeBasePropertyValue(object? baseValue, PropertyAttributeInfo detachInfo) + { + // If there are no WriteTransports, keep everything attached. + if (WriteTransports.Count == 0) + { + return PreserializeObject(baseValue, inheritedDetachInfo: detachInfo); + } + + if (baseValue is IEnumerable chunkableCollection && detachInfo.IsChunkable) + { + List chunks = new(); + DataChunk crtChunk = new() { data = new List(detachInfo.ChunkSize) }; + + foreach (object element in chunkableCollection) + { + crtChunk.data.Add(element); + if (crtChunk.data.Count >= detachInfo.ChunkSize) + { + chunks.Add(crtChunk); + crtChunk = new DataChunk { data = new List(detachInfo.ChunkSize) }; + } + } + + if (crtChunk.data.Count > 0) + { + chunks.Add(crtChunk); + } + + return PreserializeObject(chunks, inheritedDetachInfo: new PropertyAttributeInfo(true, false, 0, null)); + } + + return PreserializeObject(baseValue, inheritedDetachInfo: detachInfo); + } + + private void UpdateParentClosures(string objectId) + { + for (int parentLevel = 0; parentLevel < _parentClosures.Count; parentLevel++) + { + int childDepth = _parentClosures.Count - parentLevel; + if (!_parentClosures[parentLevel].TryGetValue(objectId, out int currentValue)) + { + currentValue = childDepth; + } + + _parentClosures[parentLevel][objectId] = Math.Min(currentValue, childDepth); + } + } + + private static string ComputeId(IDictionary obj) + { + string serialized = JsonConvert.SerializeObject(obj); + string hash = Utilities.HashString(serialized); + return hash; + } + + private static string Dict2Json(IDictionary? obj) + { + string serialized = JsonConvert.SerializeObject(obj); + return serialized; + } + + private void StoreObject(string objectId, string objectJson) + { + _stopwatch.Stop(); + foreach (var transport in WriteTransports) + { + transport.SaveObject(objectId, objectJson); + } + + _stopwatch.Start(); + } + + private void StoreBlob(Blob obj) + { + bool hasBlobTransport = false; + + _stopwatch.Stop(); + + foreach (var transport in WriteTransports) + { + if (transport is IBlobCapableTransport blobTransport) + { + hasBlobTransport = true; + blobTransport.SaveBlob(obj); + } + } + + _stopwatch.Start(); + if (!hasBlobTransport) + { + throw new InvalidOperationException( + "Object tree contains a Blob (file), but the serializer has no blob saving capable transports." + ); + } + } + + // (propertyInfo, isDetachable, isChunkable, chunkSize, JsonPropertyAttribute) + private List<(PropertyInfo, PropertyAttributeInfo)> GetTypedPropertiesWithCache(Base baseObj) + { + Type type = baseObj.GetType(); + IEnumerable typedProperties = baseObj.GetInstanceMembers(); + + if (_typedPropertiesCache.TryGetValue(type.FullName, out List<(PropertyInfo, PropertyAttributeInfo)>? cached)) + { + return cached; + } + + List<(PropertyInfo, PropertyAttributeInfo)> ret = new(); + + foreach (PropertyInfo typedProperty in typedProperties) + { + if (typedProperty.Name.StartsWith("__") || typedProperty.Name == "id") + { + continue; + } + + // Check JsonIgnore like this to cover both Newtonsoft JsonIgnore and System.Text.Json JsonIgnore + // TODO: replace JsonIgnore from newtonsoft with JsonIgnore from Sys, and check this more properly. + bool jsonIgnore = false; + foreach (object attr in typedProperty.GetCustomAttributes(true)) + { + if (attr.GetType().Name.Contains("JsonIgnore")) + { + jsonIgnore = true; + break; + } + } + if (jsonIgnore) + { + continue; + } + + _ = typedProperty.GetValue(baseObj); + + List detachableAttributes = typedProperty.GetCustomAttributes(true).ToList(); + List chunkableAttributes = typedProperty.GetCustomAttributes(true).ToList(); + bool isDetachable = detachableAttributes.Count > 0 && detachableAttributes[0].Detachable; + bool isChunkable = chunkableAttributes.Count > 0; + int chunkSize = isChunkable ? chunkableAttributes[0].MaxObjCountPerChunk : 1000; + JsonPropertyAttribute? jsonPropertyAttribute = typedProperty.GetCustomAttribute(); + ret.Add((typedProperty, new PropertyAttributeInfo(isDetachable, isChunkable, chunkSize, jsonPropertyAttribute))); + } + + _typedPropertiesCache[type.FullName] = ret; + return ret; + } + + public readonly struct PropertyAttributeInfo + { + public PropertyAttributeInfo( + bool isDetachable, + bool isChunkable, + int chunkSize, + JsonPropertyAttribute? jsonPropertyAttribute + ) + { + IsDetachable = isDetachable || isChunkable; + IsChunkable = isChunkable; + ChunkSize = chunkSize; + JsonPropertyInfo = jsonPropertyAttribute; + } + + public readonly bool IsDetachable; + public readonly bool IsChunkable; + public readonly int ChunkSize; + public readonly JsonPropertyAttribute? JsonPropertyInfo; + } + + [Obsolete("OnErrorAction unused, serializer will throw exceptions instead")] + public Action? OnErrorAction { get; set; } + + [Obsolete("Set via constructor instead", true)] + public Action? OnProgressAction + { + get => _onProgressAction; + set => _ = value; + } +} diff --git a/src/Speckle.Core/Serialisation/SerializationUtilities/BaseObjectSerializationUtilities.cs b/src/Speckle.Core/Serialisation/SerializationUtilities/BaseObjectSerializationUtilities.cs new file mode 100644 index 00000000..e8f00cad --- /dev/null +++ b/src/Speckle.Core/Serialisation/SerializationUtilities/BaseObjectSerializationUtilities.cs @@ -0,0 +1,370 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using System.Linq; +using System.Reflection; +using System.Runtime.Serialization; +using System.Threading; +using Speckle.Core.Kits; +using Speckle.Core.Logging; +using Speckle.Core.Models; +using Speckle.Newtonsoft.Json; +using Speckle.Newtonsoft.Json.Linq; +using Speckle.Newtonsoft.Json.Serialization; + +namespace Speckle.Core.Serialisation.SerializationUtilities; + +internal static class BaseObjectSerializationUtilities +{ + #region Getting Types + + private static Dictionary s_cachedTypes = new(); + + private static readonly Dictionary> s_typeProperties = new(); + + private static readonly Dictionary> s_onDeserializedCallbacks = new(); + + internal static Type GetType(string objFullType) + { + lock (s_cachedTypes) + { + if (s_cachedTypes.TryGetValue(objFullType, out Type? type1)) + { + return type1; + } + + var type = GetAtomicType(objFullType); + s_cachedTypes[objFullType] = type; + return type; + } + } + + internal static Type GetAtomicType(string objFullType) + { + var objectTypes = objFullType.Split(':').Reverse(); + foreach (var typeName in objectTypes) + { + //TODO: rather than getting the type from the first loaded kit that has it, maybe + //we get it from a specific Kit + var type = KitManager.Types.FirstOrDefault(tp => tp.FullName == typeName); + if (type != null) + { + return type; + } + + //To allow for backwards compatibility saving deserialization target types. + //We also check a ".Deprecated" prefixed namespace + string deprecatedTypeName = GetDeprecatedTypeName(typeName); + + var deprecatedType = KitManager.Types.FirstOrDefault(tp => tp.FullName == deprecatedTypeName); + if (deprecatedType != null) + { + return deprecatedType; + } + } + + return typeof(Base); + } + + internal static string GetDeprecatedTypeName(string typeName, string deprecatedSubstring = "Deprecated.") + { + int lastDotIndex = typeName.LastIndexOf('.'); + return typeName.Insert(lastDotIndex + 1, deprecatedSubstring); + } + + internal static Dictionary GetTypeProperties(string objFullType) + { + lock (s_typeProperties) + { + if (s_typeProperties.TryGetValue(objFullType, out Dictionary? value)) + { + return value; + } + + Dictionary ret = new(); + Type type = GetType(objFullType); + PropertyInfo[] properties = type.GetProperties(); + foreach (PropertyInfo prop in properties) + { + ret[prop.Name.ToLower()] = prop; + } + + value = ret; + s_typeProperties[objFullType] = value; + return value; + } + } + + internal static List GetOnDeserializedCallbacks(string objFullType) + { + // return new List(); + lock (s_onDeserializedCallbacks) + { + // System.Runtime.Serialization.Ca + if (s_onDeserializedCallbacks.TryGetValue(objFullType, out List? value)) + { + return value; + } + + List ret = new(); + Type type = GetType(objFullType); + MethodInfo[] methods = type.GetMethods(BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic); + foreach (MethodInfo method in methods) + { + List onDeserializedAttributes = method + .GetCustomAttributes(true) + .ToList(); + if (onDeserializedAttributes.Count > 0) + { + ret.Add(method); + } + } + + value = ret; + s_onDeserializedCallbacks[objFullType] = value; + return value; + } + } + + internal static Type GetSystemOrSpeckleType(string typeName) + { + var systemType = Type.GetType(typeName); + if (systemType != null) + { + return systemType; + } + + return GetAtomicType(typeName); + } + + /// + /// Flushes kit's (discriminator, type) cache. Useful if you're dynamically loading more kits at runtime, that provide better coverage of what you're deserialising, and it's now somehow poisoned because the higher level types were not originally available. + /// + public static void FlushCachedTypes() + { + lock (s_cachedTypes) + { + s_cachedTypes = new Dictionary(); + } + } + + #endregion + + #region Obsolete +#pragma warning disable CS8602, CA1502 + + private static readonly Dictionary s_cachedAbstractTypes = new(); + + [Obsolete("Only Used by Serializer V1")] + internal static object? HandleAbstractOriginalValue(JToken jToken, string assemblyQualifiedName) + { + if (s_cachedAbstractTypes.TryGetValue(assemblyQualifiedName, out Type? type)) + { + return jToken.ToObject(type); + } + + var pieces = assemblyQualifiedName.Split(',').Select(s => s.Trim()).ToArray(); + + var myAssembly = AppDomain.CurrentDomain.GetAssemblies().FirstOrDefault(ass => ass.GetName().Name == pieces[1]); + if (myAssembly == null) + { + throw new SpeckleException("Could not load abstract object's assembly."); + } + + var myType = myAssembly.GetType(pieces[0]); + if (myType == null) + { + throw new SpeckleException("Could not load abstract object's assembly."); + } + + s_cachedAbstractTypes[assemblyQualifiedName] = myType; + + return jToken.ToObject(myType); + } + + [Obsolete("Only used by serializer v1")] + internal static object? HandleValue( + JToken? value, + JsonSerializer serializer, + CancellationToken cancellationToken, + JsonProperty? jsonProperty = null, + string typeDiscriminator = "speckle_type" + ) + { + cancellationToken.ThrowIfCancellationRequested(); + + if (jsonProperty is { PropertyType: null }) + { + throw new ArgumentException($"Expected {nameof(JsonProperty.PropertyType)} to be non-null", nameof(jsonProperty)); + } + + switch (value) + { + case JValue jValue when jsonProperty != null: + return jValue.ToObject(jsonProperty.PropertyType); + case JValue jValue: + return jValue.Value; + // Lists + case JArray array when jsonProperty != null && jsonProperty.PropertyType.GetConstructor(Type.EmptyTypes) != null: + { + var arr = Activator.CreateInstance(jsonProperty.PropertyType); + + var addMethod = arr.GetType().GetMethod(nameof(IList.Add))!; + var hasGenericType = jsonProperty.PropertyType.GenericTypeArguments.Length != 0; + + foreach (var val in array) + { + cancellationToken.ThrowIfCancellationRequested(); + + if (val == null) + { + continue; + } + + var item = HandleValue(val, serializer, cancellationToken); + + if (item is DataChunk chunk) + { + foreach (var dataItem in chunk.data) + { + if (hasGenericType && !jsonProperty.PropertyType.GenericTypeArguments[0].IsInterface) + { + if (jsonProperty.PropertyType.GenericTypeArguments[0].IsAssignableFrom(dataItem.GetType())) + { + addMethod.Invoke(arr, new[] { dataItem }); + } + else + { + addMethod.Invoke( + arr, + new[] { Convert.ChangeType(dataItem, jsonProperty.PropertyType.GenericTypeArguments[0]) } + ); + } + } + else + { + addMethod.Invoke(arr, new[] { dataItem }); + } + } + } + else if (hasGenericType && !jsonProperty.PropertyType.GenericTypeArguments[0].IsInterface) + { + if (jsonProperty.PropertyType.GenericTypeArguments[0].IsAssignableFrom(item.GetType())) + { + addMethod.Invoke(arr, new[] { item }); + } + else + { + addMethod.Invoke( + arr, + new[] { Convert.ChangeType(item, jsonProperty.PropertyType.GenericTypeArguments[0]) } + ); + } + } + else + { + addMethod.Invoke(arr, new[] { item }); + } + } + return arr; + } + case JArray array when jsonProperty != null: + { + var arr = (IList) + Activator.CreateInstance(typeof(List<>).MakeGenericType(jsonProperty.PropertyType.GetElementType())); + + foreach (var val in array) + { + cancellationToken.ThrowIfCancellationRequested(); + + if (val == null) + { + continue; + } + + var item = HandleValue(val, serializer, cancellationToken); + if (item is DataChunk chunk) + { + foreach (var dataItem in chunk.data) + { + if (!jsonProperty.PropertyType.GetElementType()!.IsInterface) + { + arr.Add(Convert.ChangeType(dataItem, jsonProperty.PropertyType.GetElementType()!)); + } + else + { + arr.Add(dataItem); + } + } + } + else + { + if (!jsonProperty.PropertyType.GetElementType()!.IsInterface) + { + arr.Add(Convert.ChangeType(item, jsonProperty.PropertyType.GetElementType()!)); + } + else + { + arr.Add(item); + } + } + } + var actualArr = Array.CreateInstance(jsonProperty.PropertyType.GetElementType()!, arr.Count); + arr.CopyTo(actualArr, 0); + return actualArr; + } + case JArray array: + { + var arr = new List(); + foreach (var val in array) + { + cancellationToken.ThrowIfCancellationRequested(); + + if (val == null) + { + continue; + } + + var item = HandleValue(val, serializer, cancellationToken); + + if (item is DataChunk chunk) + { + arr.AddRange(chunk.data); + } + else + { + arr.Add(item); + } + } + return arr; + } + case JObject jObject when jObject.Property(typeDiscriminator) != null: + return jObject.ToObject(serializer); + case JObject jObject: + { + var dict = + jsonProperty != null + ? Activator.CreateInstance(jsonProperty.PropertyType) as IDictionary + : new Dictionary(); + foreach (var prop in jObject) + { + cancellationToken.ThrowIfCancellationRequested(); + + object key = prop.Key; + if (jsonProperty != null) + { + key = Convert.ChangeType(prop.Key, jsonProperty.PropertyType.GetGenericArguments()[0]); + } + + dict[key] = HandleValue(prop.Value, serializer, cancellationToken); + } + return dict; + } + default: + return null; + } + } +#pragma warning restore CS8602, CA1502 // Dereference of a possibly null reference. + + #endregion +} diff --git a/src/Speckle.Core/Serialisation/SerializationUtilities/CallsiteCache.cs b/src/Speckle.Core/Serialisation/SerializationUtilities/CallsiteCache.cs new file mode 100644 index 00000000..6d74cf37 --- /dev/null +++ b/src/Speckle.Core/Serialisation/SerializationUtilities/CallsiteCache.cs @@ -0,0 +1,45 @@ +using System; +using System.Collections.Generic; +using System.Runtime.CompilerServices; +using Microsoft.CSharp.RuntimeBinder; + +namespace Speckle.Core.Serialisation.SerializationUtilities; + +internal static class CallSiteCache +{ + // Adapted from the answer to + // https://stackoverflow.com/questions/12057516/c-sharp-dynamicobject-dynamic-properties + // by jbtule, https://stackoverflow.com/users/637783/jbtule + // And also + // https://github.com/mgravell/fast-member/blob/master/FastMember/CallSiteCache.cs + // by Marc Gravell, https://github.com/mgravell + private static readonly Dictionary>> s_setters = new(); + + public static void SetValue(string propertyName, object target, object? value) + { + lock (s_setters) + { + CallSite>? site; + + lock (s_setters) + { + if (!s_setters.TryGetValue(propertyName, out site)) + { + var binder = Binder.SetMember( + CSharpBinderFlags.None, + propertyName, + typeof(CallSiteCache), + new List + { + CSharpArgumentInfo.Create(CSharpArgumentInfoFlags.None, null), + CSharpArgumentInfo.Create(CSharpArgumentInfoFlags.None, null) + } + ); + s_setters[propertyName] = site = CallSite>.Create(binder); + } + } + + site.Target.Invoke(site, target, value); + } + } +} diff --git a/src/Speckle.Core/Serialisation/SerializationUtilities/DeserializationWorkerThreads.cs b/src/Speckle.Core/Serialisation/SerializationUtilities/DeserializationWorkerThreads.cs new file mode 100644 index 00000000..459350b5 --- /dev/null +++ b/src/Speckle.Core/Serialisation/SerializationUtilities/DeserializationWorkerThreads.cs @@ -0,0 +1,108 @@ +using System; +using System.Threading.Tasks; +using Speckle.Core.Logging; + +namespace Speckle.Core.Serialisation.SerializationUtilities; + +internal enum WorkerThreadTaskType +{ + NoOp = default, + Deserialize, +} + +internal sealed class DeserializationWorkerThreads : ParallelOperationExecutor +{ + private int _freeThreadCount; + + private readonly object _lockFreeThreads = new(); + private readonly BaseObjectDeserializerV2 _serializer; + + public DeserializationWorkerThreads(BaseObjectDeserializerV2 serializer, int threadCount) + { + _serializer = serializer; + NumThreads = threadCount; + } + + public override void Dispose() + { + lock (_lockFreeThreads) + { + _freeThreadCount -= NumThreads; + } + + base.Dispose(); + } + + protected override void ThreadMain() + { + while (true) + { + lock (_lockFreeThreads) + { + _freeThreadCount++; + } + + var (taskType, inputValue, tcs) = Tasks.Take(); + if (taskType == WorkerThreadTaskType.NoOp || tcs == null) + { + return; + } + + try + { + var result = RunOperation(taskType, inputValue!, _serializer); + tcs.SetResult(result); + } + catch (Exception ex) + { + tcs.SetException(ex); + + if (ex.IsFatal()) + { + throw; + } + } + } + } + + private static object? RunOperation( + WorkerThreadTaskType taskType, + object inputValue, + BaseObjectDeserializerV2 serializer + ) + { + switch (taskType) + { + case WorkerThreadTaskType.Deserialize: + var converted = serializer.DeserializeTransportObject((string)inputValue); + return converted; + default: + throw new ArgumentException( + $"No implementation for {nameof(WorkerThreadTaskType)} with value {taskType}", + nameof(taskType) + ); + } + } + + internal Task? TryStartTask(WorkerThreadTaskType taskType, object inputValue) + { + bool canStartTask = false; + lock (_lockFreeThreads) + { + if (_freeThreadCount > 0) + { + canStartTask = true; + _freeThreadCount--; + } + } + + if (!canStartTask) + { + return null; + } + + TaskCompletionSource tcs = new(TaskCreationOptions.RunContinuationsAsynchronously); + Tasks.Add(new(taskType, inputValue, tcs)); + return tcs.Task; + } +} diff --git a/src/Speckle.Core/Serialisation/SerializationUtilities/OperationTask.cs b/src/Speckle.Core/Serialisation/SerializationUtilities/OperationTask.cs new file mode 100644 index 00000000..d5589fdc --- /dev/null +++ b/src/Speckle.Core/Serialisation/SerializationUtilities/OperationTask.cs @@ -0,0 +1,87 @@ +using System; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; + +namespace Speckle.Core.Serialisation.SerializationUtilities; + +internal readonly struct OperationTask + where T : struct +{ + public readonly T OperationType; + public readonly object? InputValue; + public readonly TaskCompletionSource? Tcs; + + public OperationTask(T operationType, object? inputValue = null, TaskCompletionSource? tcs = null) + { + OperationType = operationType; + InputValue = inputValue; + Tcs = tcs; + } + + public void Deconstruct(out T operationType, out object? inputValue, out TaskCompletionSource? tcs) + { + operationType = OperationType; + inputValue = InputValue; + tcs = Tcs; + } +} + +internal abstract class ParallelOperationExecutor : IDisposable + where TOperation : struct +{ + protected BlockingCollection> Tasks { get; set; } = new(); + + protected IList Threads { get; set; } = new List(); + + public int NumThreads { get; protected set; } + public bool HasStarted => Threads.Count > 0; + + protected abstract void ThreadMain(); + + protected virtual void Stop() + { + if (!HasStarted) + { + throw new InvalidOperationException($"Unable to {nameof(Stop)} {this} as it has not started!"); + } + + foreach (Thread _ in Threads) + { + Tasks.Add(default); + } + + foreach (Thread t in Threads) + { + t.Join(); + } + + Threads = new List(); + } + + public virtual void Start() + { + if (HasStarted) + { + throw new InvalidOperationException($"{this}: Threads already started"); + } + + for (int i = 0; i < NumThreads; i++) + { + Thread t = new(ThreadMain) { Name = ToString(), IsBackground = true }; + Threads.Add(t); + t.Start(); + } + } + + public virtual void Dispose() + { + if (HasStarted) + { + Stop(); + } + + Tasks.Dispose(); + } +} diff --git a/src/Speckle.Core/Serialisation/SerializationUtilities/ValueConverter.cs b/src/Speckle.Core/Serialisation/SerializationUtilities/ValueConverter.cs new file mode 100644 index 00000000..f648f327 --- /dev/null +++ b/src/Speckle.Core/Serialisation/SerializationUtilities/ValueConverter.cs @@ -0,0 +1,314 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using System.DoubleNumerics; +using System.Drawing; +using System.Globalization; +using Speckle.Core.Logging; +using Numerics = System.Numerics; + +namespace Speckle.Core.Serialisation.SerializationUtilities; + +internal static class ValueConverter +{ + public static bool ConvertValue(Type type, object? value, out object? convertedValue) + { + // TODO: Document list of supported values in the SDK. (and grow it as needed) + + convertedValue = null; + if (value == null) + { + return true; + } + + Type valueType = value.GetType(); + + if (type.IsAssignableFrom(valueType)) + { + convertedValue = value; + return true; + } + + //strings + if (type == typeof(string)) + { + convertedValue = Convert.ToString(value); + return true; + } + + #region Enum + if (type.IsEnum) + { + if (valueType != typeof(long)) + { + return false; + } + + convertedValue = Enum.ToObject(type, (long)value); + return true; + } + #endregion + + switch (type.Name) + { + case "Nullable`1": + return ConvertValue(type.GenericTypeArguments[0], value, out convertedValue); + #region Numbers + case "Int64": + if (valueType == typeof(long)) + { + convertedValue = (long)value; + return true; + } + + return false; + case "Int32": + if (valueType == typeof(long)) + { + convertedValue = (int)(long)value; + return true; + } + + return false; + case "Int16": + if (valueType == typeof(long)) + { + convertedValue = (short)(long)value; + return true; + } + + return false; + case "UInt64": + if (valueType == typeof(long)) + { + convertedValue = (ulong)(long)value; + return true; + } + + return false; + case "UInt32": + if (valueType == typeof(long)) + { + convertedValue = (uint)(long)value; + return true; + } + + return false; + case "UInt16": + if (valueType == typeof(long)) + { + convertedValue = (ushort)(long)value; + return true; + } + + return false; + case "Double": + if (valueType == typeof(double)) + { + convertedValue = (double)value; + return true; + } + if (valueType == typeof(long)) + { + convertedValue = (double)(long)value; + return true; + } + switch (value) + { + case "NaN": + convertedValue = double.NaN; + return true; + case "Infinity": + convertedValue = double.PositiveInfinity; + return true; + case "-Infinity": + convertedValue = double.NegativeInfinity; + return true; + default: + return false; + } + + case "Single": + if (valueType == typeof(double)) + { + convertedValue = (float)(double)value; + return true; + } + if (valueType == typeof(long)) + { + convertedValue = (float)(long)value; + return true; + } + switch (value) + { + case "NaN": + convertedValue = float.NaN; + return true; + case "Infinity": + convertedValue = float.PositiveInfinity; + return true; + case "-Infinity": + convertedValue = float.NegativeInfinity; + return true; + default: + return false; + } + + #endregion + } + + // Handle List + if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(List<>)) + { + if (value is not List valueList) + { + return false; + } + + Type listElementType = type.GenericTypeArguments[0]; + IList ret = Activator.CreateInstance(type, valueList.Count) as IList; + foreach (object inputListElement in valueList) + { + if (!ConvertValue(listElementType, inputListElement, out object? convertedListElement)) + { + return false; + } + + ret.Add(convertedListElement); + } + convertedValue = ret; + return true; + } + + // Handle Dictionary + if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(Dictionary<,>)) + { + if (value is not Dictionary valueDict) + { + return false; + } + + if (type.GenericTypeArguments[0] != typeof(string)) + { + throw new ArgumentException("Dictionaries with non-string keys are not supported", nameof(type)); + } + + Type dictValueType = type.GenericTypeArguments[1]; + IDictionary ret = Activator.CreateInstance(type) as IDictionary; + + foreach (KeyValuePair kv in valueDict) + { + if (!ConvertValue(dictValueType, kv.Value, out object? convertedDictValue)) + { + return false; + } + + ret[kv.Key] = convertedDictValue; + } + convertedValue = ret; + return true; + } + + // Handle arrays + if (type.IsArray) + { + if (value is not List valueList) + { + return false; + } + + Type arrayElementType = + type.GetElementType() ?? throw new ArgumentException("IsArray yet not valid element type", nameof(type)); + + Array ret = Activator.CreateInstance(type, valueList.Count) as Array; + for (int i = 0; i < valueList.Count; i++) + { + object inputListElement = valueList[i]; + if (!ConvertValue(arrayElementType, inputListElement, out object? convertedListElement)) + { + return false; + } + + ret.SetValue(convertedListElement, i); + } + convertedValue = ret; + return true; + } + + // Handle simple classes/structs + if (type == typeof(Guid) && value is string str) + { + convertedValue = Guid.Parse(str); + return true; + } + + if (type == typeof(Color) && value is long integer) + { + convertedValue = Color.FromArgb((int)integer); + return true; + } + + if (type == typeof(DateTime) && value is string s) + { + convertedValue = DateTime.ParseExact(s, "o", CultureInfo.InvariantCulture); + return true; + } + + #region BACKWARDS COMPATIBILITY: matrix4x4 changed from System.Numerics float to System.DoubleNumerics double in release 2.16 + if (type == typeof(Numerics.Matrix4x4) && value is IReadOnlyList lMatrix) + { + SpeckleLog.Logger.Warning( + "This kept for backwards compatibility, no one should be using {this}", + "ValueConverter deserialize to System.Numerics.Matrix4x4" + ); + convertedValue = new Numerics.Matrix4x4( + I(0), + I(1), + I(2), + I(3), + I(4), + I(5), + I(6), + I(7), + I(8), + I(9), + I(10), + I(11), + I(12), + I(13), + I(14), + I(15) + ); + return true; + + float I(int index) => Convert.ToSingle(lMatrix[index]); + } + #endregion + + if (type == typeof(Matrix4x4) && value is IReadOnlyList l) + { + double I(int index) => Convert.ToDouble(l[index]); + convertedValue = new Matrix4x4( + I(0), + I(1), + I(2), + I(3), + I(4), + I(5), + I(6), + I(7), + I(8), + I(9), + I(10), + I(11), + I(12), + I(13), + I(14), + I(15) + ); + return true; + } + + return false; + } +} diff --git a/src/Speckle.Core/Serialisation/SpeckleSerializerException.cs b/src/Speckle.Core/Serialisation/SpeckleSerializerException.cs new file mode 100644 index 00000000..de74b572 --- /dev/null +++ b/src/Speckle.Core/Serialisation/SpeckleSerializerException.cs @@ -0,0 +1,26 @@ +using System; +using Speckle.Core.Logging; + +namespace Speckle.Core.Serialisation; + +public class SpeckleSerializeException : SpeckleException +{ + public SpeckleSerializeException() { } + + public SpeckleSerializeException(string message, Exception? inner = null) + : base(message, inner) { } + + public SpeckleSerializeException(string message) + : base(message) { } +} + +public class SpeckleDeserializeException : SpeckleException +{ + public SpeckleDeserializeException() { } + + public SpeckleDeserializeException(string message, Exception? inner = null) + : base(message, inner) { } + + public SpeckleDeserializeException(string message) + : base(message) { } +} diff --git a/src/Speckle.Core/Speckle.Core.csproj b/src/Speckle.Core/Speckle.Core.csproj new file mode 100644 index 00000000..6b1a5b6a --- /dev/null +++ b/src/Speckle.Core/Speckle.Core.csproj @@ -0,0 +1,40 @@ + + + + netstandard2.0 + true + Speckle.Core + Speckle.Core + Speckle.Core + Speckle.Core + Core is the .NET SDK for Speckle + $(PackageTags) core + true + true + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/src/Speckle.Core/Transports/Exceptions.cs b/src/Speckle.Core/Transports/Exceptions.cs new file mode 100644 index 00000000..d19c760d --- /dev/null +++ b/src/Speckle.Core/Transports/Exceptions.cs @@ -0,0 +1,23 @@ +using System; +using Speckle.Core.Logging; + +namespace Speckle.Core.Transports; + +public class TransportException : SpeckleException +{ + public ITransport? Transport { get; } + + public TransportException(ITransport? transport, string? message, Exception? innerException = null) + : this(message, innerException) + { + Transport = transport; + } + + public TransportException() { } + + public TransportException(string? message) + : base(message) { } + + public TransportException(string? message, Exception? innerException) + : base(message, innerException) { } +} diff --git a/src/Speckle.Core/Transports/ITransport.cs b/src/Speckle.Core/Transports/ITransport.cs new file mode 100644 index 00000000..92e5d25c --- /dev/null +++ b/src/Speckle.Core/Transports/ITransport.cs @@ -0,0 +1,124 @@ +using System; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using Speckle.Core.Models; + +namespace Speckle.Core.Transports; + +/// +/// Interface defining the contract for transport implementations. +/// +public interface ITransport +{ + /// + /// Human readable name for the transport + /// + public string TransportName { get; set; } + + /// + /// Extra descriptor properties of the given transport. + /// + public Dictionary TransportContext { get; } + + /// + /// Show how much time the transport was busy for. + /// + public TimeSpan Elapsed { get; } + + /// + /// Show how many objects the transport saved. + /// + public int SavedObjectCount { get; } + + /// + /// Should be checked often and gracefully stop all in progress sending if requested. + /// + public CancellationToken CancellationToken { get; set; } + + /// + /// Used to report progress during the transport's longer operations. + /// + public Action? OnProgressAction { get; set; } + + /// + /// Used to report errors during the transport's longer operations. + /// + [Obsolete("Transports will now throw exceptions", true)] + public Action? OnErrorAction { get; set; } + + /// + /// Signals to the transport that writes are about to begin. + /// + public void BeginWrite(); + + /// + /// Signals to the transport that no more items will need to be written. + /// + public void EndWrite(); + + /// + /// Saves an object. + /// + /// The hash of the object. + /// The full string representation of the object + /// Failed to save object + /// requested cancel + public void SaveObject(string id, string serializedObject); + + /// + /// + /// Retrieving its serialised version from the provided transport. + /// + /// + /// The transport from where to retrieve it. + /// Failed to save object + /// requested cancel + public void SaveObject(string id, ITransport sourceTransport); + + /// + /// Awaitable method to figure out whether writing is completed. + /// + /// + public Task WriteComplete(); + + /// The object's hash. + /// The serialized object data, or if the transport cannot find the object + /// requested cancel + public string? GetObject(string id); + + /// + /// Copies the parent object and all its children to the provided transport. + /// + /// The id of the object you want to copy. + /// The transport you want to copy the object to. + /// (Optional) an that will be invoked once, when the number of object children to be copied over is known. + /// The string representation of the root object. + /// The provided arguments are not valid + /// The transport could not complete the operation + /// requested cancel + public Task CopyObjectAndChildren( + string id, + ITransport targetTransport, + Action? onTotalChildrenCountKnown = null + ); + + /// + /// Checks if objects are present in the transport + /// + /// List of object ids to check + /// A dictionary with the specified object ids as keys and boolean values, whether each object is present in the transport or not + /// The transport could not complete the operation + /// requested cancel + public Task> HasObjects(IReadOnlyList objectIds); +} + +public interface IBlobCapableTransport +{ + public string BlobStorageFolder { get; } + + public void SaveBlob(Blob obj); + + // NOTE: not needed, should be implemented in "CopyObjectsAndChildren" + //public void GetBlob(Blob obj); +} diff --git a/src/Speckle.Core/Transports/Memory.cs b/src/Speckle.Core/Transports/Memory.cs new file mode 100644 index 00000000..011fdb7d --- /dev/null +++ b/src/Speckle.Core/Transports/Memory.cs @@ -0,0 +1,142 @@ +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Diagnostics.CodeAnalysis; +using System.Threading; +using System.Threading.Tasks; +using Speckle.Core.Logging; + +namespace Speckle.Core.Transports; + +/// +/// An in memory storage of speckle objects. +/// +public sealed class MemoryTransport : ITransport, ICloneable +{ + public IDictionary Objects { get; } + + public MemoryTransport() + : this(new Dictionary()) { } + + public MemoryTransport(IDictionary objects) + { + Objects = objects; + SpeckleLog.Logger.Debug("Creating a new Memory Transport"); + } + + public object Clone() + { + return new MemoryTransport(Objects) + { + TransportName = TransportName, + OnProgressAction = OnProgressAction, + CancellationToken = CancellationToken, + SavedObjectCount = SavedObjectCount + }; + } + + public CancellationToken CancellationToken { get; set; } + + public string TransportName { get; set; } = "Memory"; + + public Action? OnProgressAction { get; set; } + + public int SavedObjectCount { get; private set; } + + public Dictionary TransportContext => new() { { "name", TransportName }, { "type", GetType().Name } }; + + public TimeSpan Elapsed { get; private set; } = TimeSpan.Zero; + + public void BeginWrite() + { + SavedObjectCount = 0; + } + + public void EndWrite() { } + + public void SaveObject(string id, string serializedObject) + { + CancellationToken.ThrowIfCancellationRequested(); + var stopwatch = Stopwatch.StartNew(); + + Objects[id] = serializedObject; + + SavedObjectCount++; + OnProgressAction?.Invoke(TransportName, 1); + stopwatch.Stop(); + Elapsed += stopwatch.Elapsed; + } + + public void SaveObject(string id, ITransport sourceTransport) + { + CancellationToken.ThrowIfCancellationRequested(); + + var serializedObject = sourceTransport.GetObject(id); + + if (serializedObject is null) + { + throw new TransportException( + this, + $"Cannot copy {id} from {sourceTransport.TransportName} to {TransportName} as source returned null" + ); + } + + SaveObject(id, serializedObject); + } + + public string? GetObject(string id) + { + var stopwatch = Stopwatch.StartNew(); + var ret = Objects.TryGetValue(id, out string o) ? o : null; + stopwatch.Stop(); + Elapsed += stopwatch.Elapsed; + return ret; + } + + public Task CopyObjectAndChildren( + string id, + ITransport targetTransport, + Action? onTotalChildrenCountKnown = null + ) + { + string res = TransportHelpers.CopyObjectAndChildrenSync( + id, + this, + targetTransport, + onTotalChildrenCountKnown, + CancellationToken + ); + return Task.FromResult(res); + } + + public Task WriteComplete() + { + return Task.CompletedTask; + } + + public Task> HasObjects(IReadOnlyList objectIds) + { + Dictionary ret = new(objectIds.Count); + foreach (string objectId in objectIds) + { + ret[objectId] = Objects.ContainsKey(objectId); + } + + return Task.FromResult(ret); + } + + [Obsolete("No replacement required, memory transport is always sync")] + [SuppressMessage("Design", "CA1024:Use properties where appropriate")] + public bool GetWriteCompletionStatus() + { + return true; // can safely assume it's always true, as ops are atomic? + } + + public override string ToString() + { + return $"Memory Transport {TransportName}"; + } + + [Obsolete("Transports will now throw exceptions", true)] + public Action? OnErrorAction { get; set; } +} diff --git a/src/Speckle.Core/Transports/SQLite.cs b/src/Speckle.Core/Transports/SQLite.cs new file mode 100644 index 00000000..06e21b3e --- /dev/null +++ b/src/Speckle.Core/Transports/SQLite.cs @@ -0,0 +1,478 @@ +using System; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.Diagnostics; +using System.Diagnostics.CodeAnalysis; +using System.IO; +using System.Text; +using System.Threading; +using System.Threading.Tasks; +using System.Timers; +using Microsoft.Data.Sqlite; +using Speckle.Core.Helpers; +using Speckle.Core.Logging; +using Speckle.Core.Models; +using Timer = System.Timers.Timer; + +namespace Speckle.Core.Transports; + +public sealed class SQLiteTransport : IDisposable, ICloneable, ITransport, IBlobCapableTransport +{ + private bool _isWriting; + private const int MAX_TRANSACTION_SIZE = 1000; + private const int POLL_INTERVAL = 500; + + private ConcurrentQueue<(string id, string serializedObject, int byteCount)> _queue = new(); + + /// + /// Timer that ensures queue is consumed if less than MAX_TRANSACTION_SIZE objects are being sent. + /// + private readonly Timer _writeTimer; + + /// + /// Connects to an SQLite DB at {}/{}/{}.db + /// Will attempt to create db + directory structure as needed + /// + /// defaults to if + /// defaults to "Speckle" if + /// defaults to "Data" if + /// Failed to initialize a connection to the db + /// Path was invalid or could not be created + public SQLiteTransport(string? basePath = null, string? applicationName = null, string? scope = null) + { + _basePath = basePath ?? SpecklePathProvider.UserApplicationDataPath(); + _applicationName = applicationName ?? "Speckle"; + _scope = scope ?? "Data"; + + try + { + var dir = Path.Combine(_basePath, _applicationName); + _rootPath = Path.Combine(dir, $"{_scope}.db"); + + Directory.CreateDirectory(dir); //ensure dir is there + } + catch (Exception ex) + when (ex is ArgumentException or IOException or UnauthorizedAccessException or NotSupportedException) + { + throw new TransportException($"Path was invalid or could not be created {_rootPath}", ex); + } + + _connectionString = $"Data Source={_rootPath};"; + + Initialize(); + + _writeTimer = new Timer + { + AutoReset = true, + Enabled = false, + Interval = POLL_INTERVAL + }; + _writeTimer.Elapsed += WriteTimerElapsed; + } + + private readonly string _rootPath; + + private readonly string _basePath; + private readonly string _applicationName; + private readonly string _scope; + private readonly string _connectionString; + + private SqliteConnection Connection { get; set; } + private object ConnectionLock { get; set; } + + public string BlobStorageFolder => SpecklePathProvider.BlobStoragePath(Path.Combine(_basePath, _applicationName)); + + public void SaveBlob(Blob obj) + { + var blobPath = obj.originalPath; + var targetPath = obj.GetLocalDestinationPath(BlobStorageFolder); + File.Copy(blobPath, targetPath, true); + } + + public object Clone() + { + return new SQLiteTransport(_basePath, _applicationName, _scope) + { + OnProgressAction = OnProgressAction, + CancellationToken = CancellationToken + }; + } + + public void Dispose() + { + // TODO: Check if it's still writing? + Connection.Close(); + Connection.Dispose(); + _writeTimer.Dispose(); + } + + public string TransportName { get; set; } = "SQLite"; + + public Dictionary TransportContext => + new() + { + { "name", TransportName }, + { "type", GetType().Name }, + { "basePath", _basePath }, + { "applicationName", _applicationName }, + { "scope", _scope }, + { "blobStorageFolder", BlobStorageFolder } + }; + + public CancellationToken CancellationToken { get; set; } + + public Action? OnProgressAction { get; set; } + + [Obsolete("Transports will now throw exceptions")] + public Action? OnErrorAction { get; set; } + public int SavedObjectCount { get; private set; } + + public TimeSpan Elapsed { get; private set; } + + public void BeginWrite() + { + _queue = new(); + SavedObjectCount = 0; + } + + public void EndWrite() { } + + public Task> HasObjects(IReadOnlyList objectIds) + { + Dictionary ret = new(objectIds.Count); + // Initialize with false so that canceled queries still return a dictionary item for every object id + foreach (string objectId in objectIds) + { + ret[objectId] = false; + } + + try + { + const string COMMAND_TEXT = "SELECT 1 FROM objects WHERE hash = @hash LIMIT 1 "; + using var command = new SqliteCommand(COMMAND_TEXT, Connection); + + foreach (string objectId in objectIds) + { + CancellationToken.ThrowIfCancellationRequested(); + + command.Parameters.Clear(); + command.Parameters.AddWithValue("@hash", objectId); + + using var reader = command.ExecuteReader(); + bool rowFound = reader.Read(); + ret[objectId] = rowFound; + } + } + catch (SqliteException ex) + { + throw new TransportException("SQLite transport failed", ex); + } + + return Task.FromResult(ret); + } + + /// Failed to initialize connection to the SQLite DB + private void Initialize() + { + // NOTE: used for creating partioned object tables. + //string[] HexChars = new string[] { "0", "1", "2", "3", "4", "5", "6", "7", "8", "9", "a", "b", "c", "d", "e", "f" }; + //var cart = new List(); + //foreach (var str in HexChars) + // foreach (var str2 in HexChars) + // cart.Add(str + str2); + + using (var c = new SqliteConnection(_connectionString)) + { + c.Open(); + const string COMMAND_TEXT = + @" + CREATE TABLE IF NOT EXISTS objects( + hash TEXT PRIMARY KEY, + content TEXT + ) WITHOUT ROWID; + "; + using (var command = new SqliteCommand(COMMAND_TEXT, c)) + { + command.ExecuteNonQuery(); + } + + // Insert Optimisations + + using SqliteCommand cmd0 = new("PRAGMA journal_mode='wal';", c); + cmd0.ExecuteNonQuery(); + + //Note / Hack: This setting has the potential to corrupt the db. + //cmd = new SqliteCommand("PRAGMA synchronous=OFF;", Connection); + //cmd.ExecuteNonQuery(); + + using SqliteCommand cmd1 = new("PRAGMA count_changes=OFF;", c); + cmd1.ExecuteNonQuery(); + + using SqliteCommand cmd2 = new("PRAGMA temp_store=MEMORY;", c); + cmd2.ExecuteNonQuery(); + } + + Connection = new SqliteConnection(_connectionString); + Connection.Open(); + ConnectionLock = new object(); + } + + /// + /// Returns all the objects in the store. Note: do not use for large collections. + /// + /// + /// This function uses a separate so is safe to call concurrently (unlike most other transport functions) + internal IEnumerable GetAllObjects() + { + CancellationToken.ThrowIfCancellationRequested(); + + using SqliteConnection connection = new(_connectionString); + connection.Open(); + + using var command = new SqliteCommand("SELECT * FROM objects", connection); + + using var reader = command.ExecuteReader(); + while (reader.Read()) + { + CancellationToken.ThrowIfCancellationRequested(); + yield return reader.GetString(1); + } + } + + /// + /// Deletes an object. Note: do not use for any speckle object transport, as it will corrupt the database. + /// + /// + public void DeleteObject(string hash) + { + CancellationToken.ThrowIfCancellationRequested(); + + using var command = new SqliteCommand("DELETE FROM objects WHERE hash = @hash", Connection); + command.Parameters.AddWithValue("@hash", hash); + command.ExecuteNonQuery(); + } + + /// + /// Updates an object. + /// + /// + /// + public void UpdateObject(string hash, string serializedObject) + { + CancellationToken.ThrowIfCancellationRequested(); + + using var c = new SqliteConnection(_connectionString); + c.Open(); + const string COMMAND_TEXT = "REPLACE INTO objects(hash, content) VALUES(@hash, @content)"; + using var command = new SqliteCommand(COMMAND_TEXT, c); + command.Parameters.AddWithValue("@hash", hash); + command.Parameters.AddWithValue("@content", serializedObject); + command.ExecuteNonQuery(); + } + + public override string ToString() + { + return $"Sqlite Transport @{_rootPath}"; + } + + #region Writes + + /// + /// Awaits untill write completion (ie, the current queue is fully consumed). + /// + /// + public async Task WriteComplete() + { + await Utilities.WaitUntil(() => WriteCompletionStatus, 500).ConfigureAwait(false); + } + + /// + /// Returns true if the current write queue is empty and comitted. + /// + /// + public bool WriteCompletionStatus => _queue.IsEmpty && !_isWriting; + + private void WriteTimerElapsed(object sender, ElapsedEventArgs e) + { + _writeTimer.Enabled = false; + + if (CancellationToken.IsCancellationRequested) + { + _queue = new ConcurrentQueue<(string, string, int)>(); + return; + } + + if (!_isWriting && !_queue.IsEmpty) + { + ConsumeQueue(); + } + } + + private void ConsumeQueue() + { + var stopwatch = Stopwatch.StartNew(); + _isWriting = true; + try + { + CancellationToken.ThrowIfCancellationRequested(); + + var i = 0; //BUG: This never gets incremented! + + var saved = 0; + + using (var c = new SqliteConnection(_connectionString)) + { + c.Open(); + using var t = c.BeginTransaction(); + const string COMMAND_TEXT = "INSERT OR IGNORE INTO objects(hash, content) VALUES(@hash, @content)"; + + while (i < MAX_TRANSACTION_SIZE && _queue.TryPeek(out var result)) + { + using var command = new SqliteCommand(COMMAND_TEXT, c, t); + _queue.TryDequeue(out result); + command.Parameters.AddWithValue("@hash", result.id); + command.Parameters.AddWithValue("@content", result.serializedObject); + command.ExecuteNonQuery(); + + saved++; + } + + t.Commit(); + CancellationToken.ThrowIfCancellationRequested(); + } + + OnProgressAction?.Invoke(TransportName, saved); + + CancellationToken.ThrowIfCancellationRequested(); + + if (!_queue.IsEmpty) + { + ConsumeQueue(); + } + } + catch (SqliteException ex) + { + throw new TransportException(this, "SQLite Command Failed", ex); + } + catch (OperationCanceledException) + { + _queue = new(); + } + finally + { + stopwatch.Stop(); + Elapsed += stopwatch.Elapsed; + _isWriting = false; + } + } + + /// + /// Adds an object to the saving queue. + /// + /// + /// + public void SaveObject(string id, string serializedObject) + { + CancellationToken.ThrowIfCancellationRequested(); + _queue.Enqueue((id, serializedObject, Encoding.UTF8.GetByteCount(serializedObject))); + + _writeTimer.Enabled = true; + _writeTimer.Start(); + } + + public void SaveObject(string id, ITransport sourceTransport) + { + CancellationToken.ThrowIfCancellationRequested(); + + var serializedObject = sourceTransport.GetObject(id); + + if (serializedObject is null) + { + throw new TransportException( + this, + $"Cannot copy {id} from {sourceTransport.TransportName} to {TransportName} as source returned null" + ); + } + + //Should this just call SaveObject... do we not want the write timers? + _queue.Enqueue((id, serializedObject, Encoding.UTF8.GetByteCount(serializedObject))); + } + + /// + /// Directly saves the object in the db. + /// + /// + /// + public void SaveObjectSync(string hash, string serializedObject) + { + const string COMMAND_TEXT = "INSERT OR IGNORE INTO objects(hash, content) VALUES(@hash, @content)"; + + try + { + using var command = new SqliteCommand(COMMAND_TEXT, Connection); + command.Parameters.AddWithValue("@hash", hash); + command.Parameters.AddWithValue("@content", serializedObject); + command.ExecuteNonQuery(); + } + catch (SqliteException ex) + { + throw new TransportException(this, "SQLite Command Failed", ex); + } + } + + #endregion + + #region Reads + + /// + /// Gets an object. + /// + /// + /// + public string? GetObject(string id) + { + CancellationToken.ThrowIfCancellationRequested(); + lock (ConnectionLock) + { + var startTime = Stopwatch.GetTimestamp(); + using (var command = new SqliteCommand("SELECT * FROM objects WHERE hash = @hash LIMIT 1 ", Connection)) + { + command.Parameters.AddWithValue("@hash", id); + using var reader = command.ExecuteReader(); + + while (reader.Read()) + { + return reader.GetString(1); + } + } + Elapsed += LoggingHelpers.GetElapsedTime(startTime, Stopwatch.GetTimestamp()); + } + return null; // pass on the duty of null checks to consumers + } + + public Task CopyObjectAndChildren( + string id, + ITransport targetTransport, + Action? onTotalChildrenCountKnown = null + ) + { + string res = TransportHelpers.CopyObjectAndChildrenSync( + id, + this, + targetTransport, + onTotalChildrenCountKnown, + CancellationToken + ); + return Task.FromResult(res); + } + + #endregion + + #region Deprecated + + [Obsolete("Use " + nameof(WriteCompletionStatus))] + [SuppressMessage("Design", "CA1024:Use properties where appropriate")] + public bool GetWriteCompletionStatus() => WriteCompletionStatus; + + #endregion +} diff --git a/src/Speckle.Core/Transports/Server.cs b/src/Speckle.Core/Transports/Server.cs new file mode 100644 index 00000000..e92d2383 --- /dev/null +++ b/src/Speckle.Core/Transports/Server.cs @@ -0,0 +1,634 @@ +#nullable disable +using System; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.IO; +using System.IO.Compression; +using System.Linq; +using System.Net; +using System.Net.Http; +using System.Net.Http.Headers; +using System.Text; +using System.Threading; +using System.Threading.Tasks; +using System.Timers; +using Speckle.Core.Credentials; +using Speckle.Core.Helpers; +using Speckle.Core.Logging; +using Speckle.Newtonsoft.Json; +using Timer = System.Timers.Timer; + +// ReSharper disable InconsistentNaming +#pragma warning disable IDE1006, IDE0018, CA2000, CA1031, CS1634, CS1570, CS1696, CA1836, CA1854, CA1834, CA2201, CA1725, CA1861, CA1024 + +namespace Speckle.Core.Transports; + +/// +/// Sends data to a speckle server. +/// +[Obsolete("Use " + nameof(ServerTransport))] +public sealed class ServerTransportV1 : IDisposable, ICloneable, ITransport +{ + private const int DownloadBatchSize = 1000; + + private bool _isWriting; + + private const int MaxBufferSize = 1_000_000; + + private const int MaxMultipartCount = 50; + + private ConcurrentQueue<(string, string, int)> _queue = new(); + + private int _totalElapsed; + + private const int PollInterval = 100; + + private Timer _writeTimer; + + public ServerTransportV1(Account account, string streamId, int timeoutSeconds = 60) + { + Account = account; + Initialize(account.serverInfo.url, streamId, account.token, timeoutSeconds); + } + + public string BaseUri { get; private set; } + + public string StreamId { get; set; } + + private HttpClient Client { get; set; } + + public bool CompressPayloads { get; set; } = true; + + public int TotalSentBytes { get; set; } + + public Account Account { get; set; } + + public object Clone() + { + return new ServerTransport(Account, StreamId) + { + OnErrorAction = OnErrorAction, + OnProgressAction = OnProgressAction, + CancellationToken = CancellationToken + }; + } + + public void Dispose() + { + // TODO: check if it's writing first? + Client?.Dispose(); + _writeTimer.Dispose(); + } + + public string TransportName { get; set; } = "RemoteTransport"; + + public Dictionary TransportContext => + new() + { + { "name", TransportName }, + { "type", GetType().Name }, + { "streamId", StreamId }, + { "serverUrl", BaseUri } + }; + + public CancellationToken CancellationToken { get; set; } + + public int SavedObjectCount { get; private set; } + + public Action OnProgressAction { get; set; } + + public Action OnErrorAction { get; set; } + + // not implementing this for V1, just a dummy 0 value + public TimeSpan Elapsed => TimeSpan.Zero; + + public void BeginWrite() + { + if (!GetWriteCompletionStatus()) + { + throw new SpeckleException("Transport is still writing."); + } + + TotalSentBytes = 0; + SavedObjectCount = 0; + } + + public void EndWrite() { } + + public async Task> HasObjects(IReadOnlyList objectIds) + { + var payload = new Dictionary { { "objects", JsonConvert.SerializeObject(objectIds) } }; + var uri = new Uri($"/api/diff/{StreamId}", UriKind.Relative); + var response = await Client + .PostAsync( + uri, + new StringContent(JsonConvert.SerializeObject(payload), Encoding.UTF8, "application/json"), + CancellationToken + ) + .ConfigureAwait(false); + response.EnsureSuccessStatusCode(); + + var hasObjectsJson = await response.Content.ReadAsStringAsync().ConfigureAwait(false); + var hasObjects = JsonConvert.DeserializeObject>(hasObjectsJson); + return hasObjects; + } + + private void Initialize(string baseUri, string streamId, string authorizationToken, int timeoutSeconds = 60) + { + SpeckleLog.Logger.Information("Initializing New Remote V1 Transport for {baseUri}", baseUri); + + BaseUri = baseUri; + StreamId = streamId; + + Client = Http.GetHttpProxyClient( + new SpeckleHttpClientHandler { AutomaticDecompression = DecompressionMethods.GZip } + ); + + Client.BaseAddress = new Uri(baseUri); + Client.Timeout = new TimeSpan(0, 0, timeoutSeconds); + Http.AddAuthHeader(Client, authorizationToken); + + _writeTimer = new Timer + { + AutoReset = true, + Enabled = false, + Interval = PollInterval + }; + _writeTimer.Elapsed += WriteTimerElapsed; + } + + public override string ToString() + { + return $"Server Transport @{Account.serverInfo.url}"; + } + + internal class Placeholder + { + public Dictionary __closure { get; set; } = new(); + } + + #region Writing objects + + public async Task WriteComplete() + { + await Utilities + .WaitUntil( + () => + { + return GetWriteCompletionStatus(); + }, + 50 + ) + .ConfigureAwait(false); + } + + public bool GetWriteCompletionStatus() + { + return _queue.Count == 0 && !_isWriting; + } + + private void WriteTimerElapsed(object sender, ElapsedEventArgs e) + { + _totalElapsed += PollInterval; + + if (CancellationToken.IsCancellationRequested) + { + _queue = new ConcurrentQueue<(string, string, int)>(); + _isWriting = false; + return; + } + + if (_totalElapsed > 300 && _isWriting == false && _queue.Count != 0) + { + _totalElapsed = 0; + _writeTimer.Enabled = false; +#pragma warning disable CS4014 + ConsumeQueue(); +#pragma warning restore CS4014 + } + } + + /// + /// Consumes a batch of objects from Queue, of MAX_BUFFER_SIZE or until queue is empty, and filters out the objects that already exist on the server + /// + /// + /// Tuple of: + /// - int: the number of objects consumed from the queue (useful to report progress) + /// - List<(string, string, int)>: List of queued objects that are not already on the server + /// + private async Task<(int, List<(string, string, int)>)> ConsumeNewBatch() + { + // Read a batch from the queue + + List<(string, string, int)> queuedBatch = new(); + List queuedBatchIds = new(); + ValueTuple queueElement; + var payloadBufferSize = 0; + while (_queue.TryPeek(out queueElement) && payloadBufferSize < MaxBufferSize) + { + if (CancellationToken.IsCancellationRequested) + { + return (queuedBatch.Count, null); + } + + _queue.TryDequeue(out queueElement); + queuedBatch.Add(queueElement); + queuedBatchIds.Add(queueElement.Item1); + payloadBufferSize += queueElement.Item3; + } + + // Ask the server which objects from the batch it already has + Dictionary hasObjects = null; + try + { + hasObjects = await HasObjects(queuedBatchIds).ConfigureAwait(false); + } + catch (Exception e) + { + OnErrorAction?.Invoke(TransportName, e); + return (queuedBatch.Count, null); + } + + // Filter the queued batch to only return new objects + + List<(string, string, int)> newBatch = new(); + foreach (var queuedItem in queuedBatch) + { + if (!hasObjects.ContainsKey(queuedItem.Item1) || !hasObjects[queuedItem.Item1]) + { + newBatch.Add(queuedItem); + } + } + + return (queuedBatch.Count, newBatch); + } + + private async Task ConsumeQueue() + { + if (CancellationToken.IsCancellationRequested) + { + _queue = new ConcurrentQueue<(string, string, int)>(); + _isWriting = false; + return; + } + + if (_queue.Count == 0) + { + return; + } + + _isWriting = true; + using var message = new HttpRequestMessage + { + RequestUri = new Uri($"/objects/{StreamId}", UriKind.Relative), + Method = HttpMethod.Post + }; + + using var multipart = new MultipartFormDataContent("--obj--"); + + SavedObjectCount = 0; + var addedMpCount = 0; + + while (addedMpCount < MaxMultipartCount && _queue.Count != 0) + { + if (CancellationToken.IsCancellationRequested) + { + _queue = new ConcurrentQueue<(string, string, int)>(); + _isWriting = false; + return; + } + + (int consumedQueuedObjects, List<(string, string, int)> batch) = await ConsumeNewBatch().ConfigureAwait(false); + if (batch == null) + { + // Canceled or error happened (which was already reported) + _queue = new ConcurrentQueue<(string, string, int)>(); + _isWriting = false; + return; + } + + if (batch.Count == 0) + { + // The server already has all objects from the queued batch + SavedObjectCount += consumedQueuedObjects; + continue; + } + + var _ctBuilder = new StringBuilder("["); + for (int i = 0; i < batch.Count; i++) + { + if (i > 0) + { + _ctBuilder.Append(","); + } + + _ctBuilder.Append(batch[i].Item2); + TotalSentBytes += batch[i].Item3; + } + _ctBuilder.Append("]"); + string _ct = _ctBuilder.ToString(); + + if (CompressPayloads) + { + var content = new GzipContent(new StringContent(_ct, Encoding.UTF8)); + content.Headers.ContentType = new MediaTypeHeaderValue("application/gzip"); + multipart.Add(content, $"batch-{addedMpCount}", $"batch-{addedMpCount}"); + } + else + { + multipart.Add(new StringContent(_ct, Encoding.UTF8), $"batch-{addedMpCount}", $"batch-{addedMpCount}"); + } + + addedMpCount++; + SavedObjectCount += consumedQueuedObjects; + } + + message.Content = multipart; + + if (CancellationToken.IsCancellationRequested) + { + _queue = new ConcurrentQueue<(string, string, int)>(); + _isWriting = false; + return; + } + + if (addedMpCount > 0) + { + try + { + var response = await Client.SendAsync(message, CancellationToken).ConfigureAwait(false); + response.EnsureSuccessStatusCode(); + } + catch (Exception e) + { + _isWriting = false; + OnErrorAction?.Invoke( + TransportName, + new Exception($"Remote error: {Account.serverInfo.url} is not reachable. \n {e.Message}", e) + ); + + _queue = new ConcurrentQueue<(string, string, int)>(); + return; + } + } + + _isWriting = false; + + OnProgressAction?.Invoke(TransportName, SavedObjectCount); + + if (!_writeTimer.Enabled) + { + _writeTimer.Enabled = true; + _writeTimer.Start(); + } + } + + public void SaveObject(string hash, string serializedObject) + { + if (CancellationToken.IsCancellationRequested) + { + _queue = new ConcurrentQueue<(string, string, int)>(); + _isWriting = false; + return; + } + + _queue.Enqueue((hash, serializedObject, Encoding.UTF8.GetByteCount(serializedObject))); + + if (!_writeTimer.Enabled && !_isWriting) + { + _writeTimer.Enabled = true; + _writeTimer.Start(); + } + } + + public void SaveObject(string hash, ITransport sourceTransport) + { + if (CancellationToken.IsCancellationRequested) + { + _queue = new ConcurrentQueue<(string, string, int)>(); + _isWriting = false; + return; + } + + var serializedObject = sourceTransport.GetObject(hash); + + _queue.Enqueue((hash, serializedObject, Encoding.UTF8.GetByteCount(serializedObject))); + + if (!_writeTimer.Enabled && !_isWriting) + { + _writeTimer.Enabled = true; + _writeTimer.Start(); + } + } + + #endregion + + #region Getting objects + + public string GetObject(string id) + { + if (CancellationToken.IsCancellationRequested) + { + _queue = new ConcurrentQueue<(string, string, int)>(); + return null; + } + + using var message = new HttpRequestMessage + { + RequestUri = new Uri($"/objects/{StreamId}/{id}/single", UriKind.Relative), + Method = HttpMethod.Get + }; + + var response = Client + .SendAsync(message, HttpCompletionOption.ResponseContentRead, CancellationToken) + .Result.Content; + return response.ReadAsStringAsync().Result; + } + + public async Task CopyObjectAndChildren( + string id, + ITransport targetTransport, + Action onTotalChildrenCountKnown + ) + { + if (CancellationToken.IsCancellationRequested) + { + _queue = new ConcurrentQueue<(string, string, int)>(); + return null; + } + + // Get root object + using var rootHttpMessage = new HttpRequestMessage + { + RequestUri = new Uri($"/objects/{StreamId}/{id}/single", UriKind.Relative), + Method = HttpMethod.Get + }; + + HttpResponseMessage rootHttpResponse = null; + try + { + rootHttpResponse = await Client + .SendAsync(rootHttpMessage, HttpCompletionOption.ResponseContentRead, CancellationToken) + .ConfigureAwait(false); + rootHttpResponse.EnsureSuccessStatusCode(); + } + catch (Exception e) + { + OnErrorAction?.Invoke(TransportName, e); + return null; + } + + string rootObjectStr = await rootHttpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); + List childrenIds = new(); + var rootPartial = JsonConvert.DeserializeObject(rootObjectStr); + if (rootPartial.__closure != null) + { + childrenIds = new List(rootPartial.__closure.Keys); + } + + onTotalChildrenCountKnown?.Invoke(childrenIds.Count); + + var childrenFoundMap = await targetTransport.HasObjects(childrenIds).ConfigureAwait(false); + List newChildrenIds = new(from objId in childrenFoundMap.Keys where !childrenFoundMap[objId] select objId); + + targetTransport.BeginWrite(); + + // Get the children that are not already in the targetTransport + List childrenIdBatch = new(DownloadBatchSize); + bool downloadBatchResult; + foreach (var objectId in newChildrenIds) + { + childrenIdBatch.Add(objectId); + if (childrenIdBatch.Count >= DownloadBatchSize) + { + downloadBatchResult = await CopyObjects(childrenIdBatch, targetTransport).ConfigureAwait(false); + if (!downloadBatchResult) + { + return null; + } + + childrenIdBatch = new List(DownloadBatchSize); + } + } + if (childrenIdBatch.Count > 0) + { + downloadBatchResult = await CopyObjects(childrenIdBatch, targetTransport).ConfigureAwait(false); + if (!downloadBatchResult) + { + return null; + } + } + + targetTransport.SaveObject(id, rootObjectStr); + await targetTransport.WriteComplete().ConfigureAwait(false); + return rootObjectStr; + } + + private async Task CopyObjects(List hashes, ITransport targetTransport) + { + Stream childrenStream = null; + + if (hashes.Count <= 0) + { + childrenStream = new MemoryStream(); + } + else + { + using var childrenHttpMessage = new HttpRequestMessage + { + RequestUri = new Uri($"/api/getobjects/{StreamId}", UriKind.Relative), + Method = HttpMethod.Post + }; + + Dictionary postParameters = new(); + postParameters.Add("objects", JsonConvert.SerializeObject(hashes)); + childrenHttpMessage.Content = new FormUrlEncodedContent(postParameters); + childrenHttpMessage.Headers.Add("Accept", "text/plain"); + + HttpResponseMessage childrenHttpResponse = null; + try + { + childrenHttpResponse = await Client + .SendAsync(childrenHttpMessage, HttpCompletionOption.ResponseHeadersRead, CancellationToken) + .ConfigureAwait(false); + childrenHttpResponse.EnsureSuccessStatusCode(); + } + catch (Exception e) + { + OnErrorAction?.Invoke(TransportName, e); + return false; + } + + childrenStream = await childrenHttpResponse.Content.ReadAsStreamAsync().ConfigureAwait(false); + } + + using var stream = childrenStream; + using var reader = new StreamReader(stream, Encoding.UTF8); + + string line; + while ((line = reader.ReadLine()) != null) + { + if (CancellationToken.IsCancellationRequested) + { + _queue = new ConcurrentQueue<(string, string, int)>(); + return false; + } + + var pcs = line.Split(new[] { '\t' }, 2); + targetTransport.SaveObject(pcs[0], pcs[1]); + + OnProgressAction?.Invoke(TransportName, 1); // possibly make this more friendly + } + + return true; + } + + #endregion +} + +/// +/// https://cymbeline.ch/2014/03/16/gzip-encoding-an-http-post-request-body/ +/// +[Obsolete("Use " + nameof(ServerUtils.GzipContent))] +internal sealed class GzipContent : HttpContent +{ + private readonly HttpContent _content; + + public GzipContent(HttpContent content) + { + this._content = content; + + // Keep the original content's headers ... + if (content != null) + { + foreach (KeyValuePair> header in content.Headers) + { + Headers.TryAddWithoutValidation(header.Key, header.Value); + } + } + + // ... and let the server know we've Gzip-compressed the body of this request. + Headers.ContentEncoding.Add("gzip"); + } + + protected override async Task SerializeToStreamAsync(Stream stream, TransportContext context) + { + // Open a GZipStream that writes to the specified output stream. + using GZipStream gzip = new(stream, CompressionMode.Compress, true); + if (_content != null) + { + await _content.CopyToAsync(gzip).ConfigureAwait(false); + } + else + { + await new StringContent(string.Empty).CopyToAsync(gzip).ConfigureAwait(false); + } + } + + protected override bool TryComputeLength(out long length) + { + length = -1; + return false; + } +} +#pragma warning restore IDE1006, IDE0018, CA2000, CA1031, CS1634, CS1570, CS1696, CA1836, CA1854, CA1834, CA2201, CA1725, CA1861, CA1024 diff --git a/src/Speckle.Core/Transports/ServerUtils/GzipContent.cs b/src/Speckle.Core/Transports/ServerUtils/GzipContent.cs new file mode 100644 index 00000000..6fca6e89 --- /dev/null +++ b/src/Speckle.Core/Transports/ServerUtils/GzipContent.cs @@ -0,0 +1,62 @@ +using System.Collections.Generic; +using System.IO; +using System.IO.Compression; +using System.Net; +using System.Net.Http; +using System.Threading.Tasks; + +namespace Speckle.Core.Transports.ServerUtils; + +/// +/// https://cymbeline.ch/2014/03/16/gzip-encoding-an-http-post-request-body/ +/// +internal sealed class GzipContent : HttpContent +{ + private readonly HttpContent? _content; + + public GzipContent(HttpContent? content) + { + _content = content; + + // Keep the original content's headers ... + if (content is not null) + { + foreach (KeyValuePair> header in content.Headers) + { + Headers.TryAddWithoutValidation(header.Key, header.Value); + } + } + + // ... and let the server know we've Gzip-compressed the body of this request. + Headers.ContentEncoding.Add("gzip"); + } + + protected override async Task SerializeToStreamAsync(Stream stream, TransportContext? context) + { + // Open a GZipStream that writes to the specified output stream. + using GZipStream gzip = new(stream, CompressionMode.Compress, true); + // Copy all the input content to the GZip stream. + if (_content != null) + { + await _content.CopyToAsync(gzip).ConfigureAwait(false); + } + else + { + using var emptyContent = new StringContent(string.Empty); + await emptyContent.CopyToAsync(gzip).ConfigureAwait(false); + } + await gzip.FlushAsync().ConfigureAwait(false); + } + + protected override bool TryComputeLength(out long length) + { + length = -1; + return false; + } + + protected override void Dispose(bool disposing) + { + _content?.Dispose(); + base.Dispose(disposing); + } +} diff --git a/src/Speckle.Core/Transports/ServerUtils/IServerApi.cs b/src/Speckle.Core/Transports/ServerUtils/IServerApi.cs new file mode 100644 index 00000000..17833590 --- /dev/null +++ b/src/Speckle.Core/Transports/ServerUtils/IServerApi.cs @@ -0,0 +1,22 @@ +using System.Collections.Generic; +using System.Threading.Tasks; + +namespace Speckle.Core.Transports.ServerUtils; + +public delegate void CbObjectDownloaded(string id, string json); +public delegate void CbBlobdDownloaded(); + +internal interface IServerApi +{ + public Task DownloadSingleObject(string streamId, string objectId); + + public Task DownloadObjects(string streamId, IReadOnlyList objectIds, CbObjectDownloaded onObjectCallback); + + public Task> HasObjects(string streamId, IReadOnlyList objectIds); + + public Task UploadObjects(string streamId, IReadOnlyList<(string id, string data)> objects); + + public Task UploadBlobs(string streamId, IReadOnlyList<(string id, string data)> objects); + + public Task DownloadBlobs(string streamId, IReadOnlyList blobIds, CbBlobdDownloaded onBlobCallback); +} diff --git a/src/Speckle.Core/Transports/ServerUtils/ParallelServerAPI.cs b/src/Speckle.Core/Transports/ServerUtils/ParallelServerAPI.cs new file mode 100644 index 00000000..0bad69cd --- /dev/null +++ b/src/Speckle.Core/Transports/ServerUtils/ParallelServerAPI.cs @@ -0,0 +1,305 @@ +using System; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.Diagnostics; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using Speckle.Core.Logging; +using Speckle.Core.Serialisation.SerializationUtilities; + +namespace Speckle.Core.Transports.ServerUtils; + +internal enum ServerApiOperation +{ + NoOp = default, + DownloadSingleObject, + DownloadObjects, + HasObjects, + UploadObjects, + UploadBlobs, + DownloadBlobs, + HasBlobs, +} + +internal class ParallelServerApi : ParallelOperationExecutor, IServerApi +{ + private readonly string _authToken; + + private readonly string _baseUri; + + private readonly object _callbackLock = new(); + + private readonly int _timeoutSeconds; + + public ParallelServerApi( + string baseUri, + string authorizationToken, + string blobStorageFolder, + int timeoutSeconds, + int numThreads = 4, + int numBufferedOperations = 8 + ) + { + _baseUri = baseUri; + _authToken = authorizationToken; + _timeoutSeconds = timeoutSeconds; + NumThreads = numThreads; + + BlobStorageFolder = blobStorageFolder; + + NumThreads = numThreads; + Tasks = new BlockingCollection>(numBufferedOperations); + } + + public CancellationToken CancellationToken { get; set; } + public bool CompressPayloads { get; set; } = true; + public Action OnBatchSent { get; set; } + + public string BlobStorageFolder { get; set; } + + #region Operations + + public async Task> HasObjects(string streamId, IReadOnlyList objectIds) + { + EnsureStarted(); + List> tasks = new(); + IReadOnlyList> splitObjectsIds; + if (objectIds.Count <= 50) + { + splitObjectsIds = new List> { objectIds }; + } + else + { + splitObjectsIds = SplitList(objectIds, NumThreads); + } + + for (int i = 0; i < NumThreads; i++) + { + if (splitObjectsIds.Count <= i || splitObjectsIds[i].Count == 0) + { + continue; + } + + var op = QueueOperation(ServerApiOperation.HasObjects, (streamId, splitObjectsIds[i])); + tasks.Add(op); + } + Dictionary ret = new(); + foreach (var task in tasks) + { + var taskResult = (IReadOnlyDictionary)(await task.ConfigureAwait(false))!; + foreach (KeyValuePair kv in taskResult) + { + ret[kv.Key] = kv.Value; + } + } + + return ret; + } + + public async Task DownloadSingleObject(string streamId, string objectId) + { + EnsureStarted(); + Task op = QueueOperation(ServerApiOperation.DownloadSingleObject, (streamId, objectId)); + object? result = await op.ConfigureAwait(false); + return (string)result!; + } + + public async Task DownloadObjects( + string streamId, + IReadOnlyList objectIds, + CbObjectDownloaded onObjectCallback + ) + { + EnsureStarted(); + List> tasks = new(); + IReadOnlyList> splitObjectsIds = SplitList(objectIds, NumThreads); + object callbackLock = new(); + + CbObjectDownloaded callbackWrapper = (id, json) => + { + lock (callbackLock) + { + onObjectCallback(id, json); + } + }; + + for (int i = 0; i < NumThreads; i++) + { + if (splitObjectsIds[i].Count == 0) + { + continue; + } + + Task op = QueueOperation( + ServerApiOperation.DownloadObjects, + (streamId, splitObjectsIds[i], callbackWrapper) + ); + tasks.Add(op); + } + await Task.WhenAll(tasks.ToArray()).ConfigureAwait(false); + } + + public async Task UploadObjects(string streamId, IReadOnlyList<(string, string)> objects) + { + EnsureStarted(); + List> tasks = new(); + IReadOnlyList> splitObjects; + + // request count optimization: if objects are < 500k, send in 1 request + int totalSize = 0; + foreach ((_, string json) in objects) + { + totalSize += json.Length; + if (totalSize >= 500_000) + { + break; + } + } + splitObjects = + totalSize >= 500_000 ? SplitList(objects, NumThreads) : new List> { objects }; + + for (int i = 0; i < NumThreads; i++) + { + if (splitObjects.Count <= i || splitObjects[i].Count == 0) + { + continue; + } + + var op = QueueOperation(ServerApiOperation.UploadObjects, (streamId, splitObjects[i])); + tasks.Add(op); + } + await Task.WhenAll(tasks.ToArray()).ConfigureAwait(false); + } + + public async Task UploadBlobs(string streamId, IReadOnlyList<(string, string)> blobs) + { + EnsureStarted(); + var op = QueueOperation(ServerApiOperation.UploadBlobs, (streamId, blobs)); + await op.ConfigureAwait(false); + } + + public async Task DownloadBlobs(string streamId, IReadOnlyList blobIds, CbBlobdDownloaded onBlobDownloaded) + { + EnsureStarted(); + var op = QueueOperation(ServerApiOperation.DownloadBlobs, (streamId, blobIds, onBlobDownloaded)); + await op.ConfigureAwait(false); + } + + public async Task> HasBlobs(string streamId, IReadOnlyList<(string, string)> blobs) + { + EnsureStarted(); + Task op = QueueOperation(ServerApiOperation.HasBlobs, (streamId, blobs)); + var res = (List)await op.ConfigureAwait(false)!; + Debug.Assert(res is not null); + return res!; + } + + #endregion + + public void EnsureStarted() + { + if (Threads.Count == 0) + { + Start(); + } + } + + protected override void ThreadMain() + { + using ServerApi serialApi = new(_baseUri, _authToken, BlobStorageFolder, _timeoutSeconds); + + serialApi.OnBatchSent = (num, size) => + { + lock (_callbackLock) + { + OnBatchSent(num, size); + } + }; + serialApi.CancellationToken = CancellationToken; + serialApi.CompressPayloads = CompressPayloads; + + while (true) + { + var (operation, inputValue, tcs) = Tasks.Take(); + if (operation == ServerApiOperation.NoOp || tcs == null) + { + return; + } + + try + { + var result = RunOperation(operation, inputValue!, serialApi).GetAwaiter().GetResult(); + tcs.SetResult(result); + } + catch (Exception ex) + { + tcs.SetException(ex); + + if (ex.IsFatal()) + { + throw; + } + } + } + } + + private static async Task RunOperation(ServerApiOperation operation, object inputValue, ServerApi serialApi) + { + switch (operation) + { + case ServerApiOperation.DownloadSingleObject: + var (dsoStreamId, dsoObjectId) = ((string, string))inputValue; + return await serialApi.DownloadSingleObject(dsoStreamId, dsoObjectId).ConfigureAwait(false); + case ServerApiOperation.DownloadObjects: + var (doStreamId, doObjectIds, doCallback) = ((string, IReadOnlyList, CbObjectDownloaded))inputValue; + await serialApi.DownloadObjects(doStreamId, doObjectIds, doCallback).ConfigureAwait(false); + return null; + case ServerApiOperation.HasObjects: + var (hoStreamId, hoObjectIds) = ((string, IReadOnlyList))inputValue; + return await serialApi.HasObjects(hoStreamId, hoObjectIds).ConfigureAwait(false); + case ServerApiOperation.UploadObjects: + var (uoStreamId, uoObjects) = ((string, IReadOnlyList<(string, string)>))inputValue; + await serialApi.UploadObjects(uoStreamId, uoObjects).ConfigureAwait(false); + return null; + case ServerApiOperation.UploadBlobs: + var (ubStreamId, ubBlobs) = ((string, IReadOnlyList<(string, string)>))inputValue; + await serialApi.UploadBlobs(ubStreamId, ubBlobs).ConfigureAwait(false); + return null; + case ServerApiOperation.HasBlobs: + var (hbStreamId, hBlobs) = ((string, IReadOnlyList<(string, string)>))inputValue; + return await serialApi + .HasBlobs(hbStreamId, hBlobs.Select(b => b.Item1.Split(':')[1]).ToList()) + .ConfigureAwait(false); + case ServerApiOperation.DownloadBlobs: + var (dbStreamId, blobIds, cb) = ((string, IReadOnlyList, CbBlobdDownloaded))inputValue; + await serialApi.DownloadBlobs(dbStreamId, blobIds, cb).ConfigureAwait(false); + return null; + default: + throw new ArgumentOutOfRangeException(nameof(operation), operation, null); + } + } + + private Task QueueOperation(ServerApiOperation operation, object? inputValue) + { + TaskCompletionSource tcs = new(TaskCreationOptions.RunContinuationsAsynchronously); + Tasks.Add(new(operation, inputValue, tcs)); + return tcs.Task; + } + + private static List> SplitList(IReadOnlyList list, int parts) + { + List> ret = new(parts); + for (int i = 0; i < parts; i++) + { + ret.Add(new List(list.Count / parts + 1)); + } + + for (int i = 0; i < list.Count; i++) + { + ret[i % parts].Add(list[i]); + } + + return ret; + } +} diff --git a/src/Speckle.Core/Transports/ServerUtils/ServerAPI.cs b/src/Speckle.Core/Transports/ServerUtils/ServerAPI.cs new file mode 100644 index 00000000..d6a32a54 --- /dev/null +++ b/src/Speckle.Core/Transports/ServerUtils/ServerAPI.cs @@ -0,0 +1,509 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Net; +using System.Net.Http; +using System.Net.Http.Headers; +using System.Text; +using System.Threading; +using System.Threading.Tasks; +using Speckle.Core.Helpers; +using Speckle.Core.Logging; +using Speckle.Core.Models; +using Speckle.Newtonsoft.Json; +using Speckle.Newtonsoft.Json.Linq; + +namespace Speckle.Core.Transports.ServerUtils; + +public sealed class ServerApi : IDisposable, IServerApi +{ + private const int BATCH_SIZE_GET_OBJECTS = 10000; + private const int BATCH_SIZE_HAS_OBJECTS = 100000; + + private const int MAX_MULTIPART_COUNT = 5; + private const int MAX_MULTIPART_SIZE = 25_000_000; + private const int MAX_OBJECT_SIZE = 25_000_000; + + private const int MAX_REQUEST_SIZE = 100_000_000; + + private const int RETRY_COUNT = 3; + private static readonly HashSet s_retryCodes = new() { 408, 502, 503, 504 }; + private static readonly char[] s_separator = { '\t' }; + private static readonly string[] s_filenameSeparator = { "filename=" }; + + private readonly HttpClient _client; + + public ServerApi(string baseUri, string? authorizationToken, string blobStorageFolder, int timeoutSeconds = 60) + { + CancellationToken = CancellationToken.None; + + BlobStorageFolder = blobStorageFolder; + + _client = Http.GetHttpProxyClient( + new SpeckleHttpClientHandler { AutomaticDecompression = DecompressionMethods.GZip } + ); + + _client.BaseAddress = new Uri(baseUri); + _client.Timeout = TimeSpan.FromSeconds(timeoutSeconds); + + Http.AddAuthHeader(_client, authorizationToken); + } + + private int RetriedCount { get; set; } + public CancellationToken CancellationToken { get; set; } + public bool CompressPayloads { get; set; } = true; + + public string BlobStorageFolder { get; set; } + + /// + /// Callback when sending batches. Parameters: object count, total bytes sent + /// + public Action OnBatchSent { get; set; } + + public void Dispose() + { + _client.Dispose(); + } + + public async Task DownloadSingleObject(string streamId, string objectId) + { + CancellationToken.ThrowIfCancellationRequested(); + + // Get root object + using var rootHttpMessage = new HttpRequestMessage + { + RequestUri = new Uri($"/objects/{streamId}/{objectId}/single", UriKind.Relative), + Method = HttpMethod.Get + }; + + HttpResponseMessage rootHttpResponse; + do + { + rootHttpResponse = await _client + .SendAsync(rootHttpMessage, HttpCompletionOption.ResponseContentRead, CancellationToken) + .ConfigureAwait(false); + } while (ShouldRetry(rootHttpResponse)); + + rootHttpResponse.EnsureSuccessStatusCode(); + + string rootObjectStr = await rootHttpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); + return rootObjectStr; + } + + public async Task DownloadObjects( + string streamId, + IReadOnlyList objectIds, + CbObjectDownloaded onObjectCallback + ) + { + if (objectIds.Count == 0) + { + return; + } + + if (objectIds.Count < BATCH_SIZE_GET_OBJECTS) + { + await DownloadObjectsImpl(streamId, objectIds, onObjectCallback).ConfigureAwait(false); + return; + } + + List crtRequest = new(); + foreach (string id in objectIds) + { + if (crtRequest.Count >= BATCH_SIZE_GET_OBJECTS) + { + await DownloadObjectsImpl(streamId, crtRequest, onObjectCallback).ConfigureAwait(false); + crtRequest = new List(); + } + crtRequest.Add(id); + } + await DownloadObjectsImpl(streamId, crtRequest, onObjectCallback).ConfigureAwait(false); + } + + public async Task> HasObjects(string streamId, IReadOnlyList objectIds) + { + if (objectIds.Count <= BATCH_SIZE_HAS_OBJECTS) + { + return await HasObjectsImpl(streamId, objectIds).ConfigureAwait(false); + } + + Dictionary ret = new(); + List crtBatch = new(BATCH_SIZE_HAS_OBJECTS); + foreach (string objectId in objectIds) + { + crtBatch.Add(objectId); + if (crtBatch.Count >= BATCH_SIZE_HAS_OBJECTS) + { + Dictionary batchResult = await HasObjectsImpl(streamId, crtBatch).ConfigureAwait(false); + foreach (KeyValuePair kv in batchResult) + { + ret[kv.Key] = kv.Value; + } + + crtBatch = new List(BATCH_SIZE_HAS_OBJECTS); + } + } + if (crtBatch.Count > 0) + { + Dictionary batchResult = await HasObjectsImpl(streamId, crtBatch).ConfigureAwait(false); + foreach (KeyValuePair kv in batchResult) + { + ret[kv.Key] = kv.Value; + } + } + return ret; + } + + public async Task UploadObjects(string streamId, IReadOnlyList<(string, string)> objects) + { + if (objects.Count == 0) + { + return; + } + + // 1. Split into parts of MAX_MULTIPART_SIZE size (can be exceptions until a max of MAX_OBJECT_SIZE if a single obj is larger than MAX_MULTIPART_SIZE) + List> multipartedObjects = new(); + List multipartedObjectsSize = new(); + + List<(string, string)> crtMultipart = new(); + int crtMultipartSize = 0; + + foreach ((string id, string json) in objects) + { + int objSize = Encoding.UTF8.GetByteCount(json); + if (objSize > MAX_OBJECT_SIZE) + { + throw new ArgumentException( + $"Object {id} too large (size {objSize}, max size {MAX_OBJECT_SIZE}). Consider using detached/chunked properties", + nameof(objects) + ); + } + + if (crtMultipartSize + objSize <= MAX_MULTIPART_SIZE) + { + crtMultipart.Add((id, json)); + crtMultipartSize += objSize; + continue; + } + + // new multipart + if (crtMultipart.Count > 0) + { + multipartedObjects.Add(crtMultipart); + multipartedObjectsSize.Add(crtMultipartSize); + } + crtMultipart = new List<(string, string)> { (id, json) }; + crtMultipartSize = objSize; + } + multipartedObjects.Add(crtMultipart); + multipartedObjectsSize.Add(crtMultipartSize); + + // 2. Split multiparts into individual server requests of max size MAX_REQUEST_SIZE or max length MAX_MULTIPART_COUNT and send them + List> crtRequest = new(); + int crtRequestSize = 0; + int crtObjectCount = 0; + for (int i = 0; i < multipartedObjects.Count; i++) + { + List<(string, string)> multipart = multipartedObjects[i]; + int multipartSize = multipartedObjectsSize[i]; + if (crtRequestSize + multipartSize > MAX_REQUEST_SIZE || crtRequest.Count >= MAX_MULTIPART_COUNT) + { + await UploadObjectsImpl(streamId, crtRequest).ConfigureAwait(false); + OnBatchSent?.Invoke(crtObjectCount, crtRequestSize); + crtRequest = new List>(); + crtRequestSize = 0; + crtObjectCount = 0; + } + crtRequest.Add(multipart); + crtRequestSize += multipartSize; + crtObjectCount += multipart.Count; + } + if (crtRequest.Count > 0) + { + await UploadObjectsImpl(streamId, crtRequest).ConfigureAwait(false); + OnBatchSent?.Invoke(crtObjectCount, crtRequestSize); + } + } + + public async Task UploadBlobs(string streamId, IReadOnlyList<(string, string)> objects) + { + CancellationToken.ThrowIfCancellationRequested(); + if (objects.Count == 0) + { + return; + } + + var multipartFormDataContent = new MultipartFormDataContent(); + var streams = new List(); + foreach (var (id, filePath) in objects) + { + var fileName = Path.GetFileName(filePath); + var stream = File.OpenRead(filePath); + streams.Add(stream); + StreamContent fsc = new(stream); + var hash = id.Split(':')[1]; + + multipartFormDataContent.Add(fsc, $"hash:{hash}", fileName); + } + + using var message = new HttpRequestMessage + { + RequestUri = new Uri($"/api/stream/{streamId}/blob", UriKind.Relative), + Method = HttpMethod.Post, + Content = multipartFormDataContent + }; + + try + { + HttpResponseMessage response; + do + { + response = await _client.SendAsync(message, CancellationToken).ConfigureAwait(false); + } while (ShouldRetry(response)); //TODO: can we get rid of this now we have polly? + + response.EnsureSuccessStatusCode(); + + foreach (var stream in streams) + { + stream.Dispose(); + } + } + finally + { + foreach (var stream in streams) + { + stream.Dispose(); + } + } + } + + public async Task DownloadBlobs(string streamId, IReadOnlyList blobIds, CbBlobdDownloaded onBlobCallback) + { + foreach (var blobId in blobIds) + { + try + { + using var blobMessage = new HttpRequestMessage + { + RequestUri = new Uri($"api/stream/{streamId}/blob/{blobId}", UriKind.Relative), + Method = HttpMethod.Get + }; + + using var response = await _client.SendAsync(blobMessage, CancellationToken).ConfigureAwait(false); + response.Content.Headers.TryGetValues("Content-Disposition", out IEnumerable? cdHeaderValues); + + var cdHeader = cdHeaderValues.First(); + var fileName = cdHeader.Split(s_filenameSeparator, StringSplitOptions.None)[1].TrimStart('"').TrimEnd('"'); + + string fileLocation = Path.Combine( + BlobStorageFolder, + $"{blobId.Substring(0, Blob.LocalHashPrefixLength)}-{fileName}" + ); + using (var fs = new FileStream(fileLocation, FileMode.OpenOrCreate)) + { + await response.Content.CopyToAsync(fs).ConfigureAwait(false); + } + + onBlobCallback(); + } + catch (Exception ex) when (!ex.IsFatal()) + { + throw new SpeckleException($"Failed to download blob {blobId}", ex); + } + } + } + + private async Task DownloadObjectsImpl( + string streamId, + IReadOnlyList objectIds, + CbObjectDownloaded onObjectCallback + ) + { + // Stopwatch sw = new Stopwatch(); sw.Start(); + + CancellationToken.ThrowIfCancellationRequested(); + + using var childrenHttpMessage = new HttpRequestMessage + { + RequestUri = new Uri($"/api/getobjects/{streamId}", UriKind.Relative), + Method = HttpMethod.Post + }; + + Dictionary postParameters = new() { { "objects", JsonConvert.SerializeObject(objectIds) } }; + string serializedPayload = JsonConvert.SerializeObject(postParameters); + childrenHttpMessage.Content = new StringContent(serializedPayload, Encoding.UTF8, "application/json"); + childrenHttpMessage.Headers.Add("Accept", "text/plain"); + + HttpResponseMessage childrenHttpResponse; + do + { + childrenHttpResponse = await _client + .SendAsync(childrenHttpMessage, HttpCompletionOption.ResponseHeadersRead, CancellationToken) + .ConfigureAwait(false); + } while (ShouldRetry(childrenHttpResponse)); + + childrenHttpResponse.EnsureSuccessStatusCode(); + + using Stream childrenStream = await childrenHttpResponse.Content.ReadAsStreamAsync().ConfigureAwait(false); + + using var reader = new StreamReader(childrenStream, Encoding.UTF8); + while (reader.ReadLine() is { } line) + { + CancellationToken.ThrowIfCancellationRequested(); + + var pcs = line.Split(s_separator, 2); + onObjectCallback(pcs[0], pcs[1]); + } + + // Console.WriteLine($"ServerApi::DownloadObjects({objectIds.Count}) request in {sw.ElapsedMilliseconds / 1000.0} sec"); + } + + private async Task> HasObjectsImpl(string streamId, IReadOnlyList objectIds) + { + CancellationToken.ThrowIfCancellationRequested(); + + // Stopwatch sw = new Stopwatch(); sw.Start(); + + string objectsPostParameter = JsonConvert.SerializeObject(objectIds); + var payload = new Dictionary { { "objects", objectsPostParameter } }; + string serializedPayload = JsonConvert.SerializeObject(payload); + var uri = new Uri($"/api/diff/{streamId}", UriKind.Relative); + + HttpResponseMessage response; + using StringContent stringContent = new(serializedPayload, Encoding.UTF8, "application/json"); + do + { + response = await _client.PostAsync(uri, stringContent, CancellationToken).ConfigureAwait(false); + } while (ShouldRetry(response)); + + response.EnsureSuccessStatusCode(); + + var hasObjectsJson = await response.Content.ReadAsStringAsync().ConfigureAwait(false); + Dictionary hasObjects = new(); + + JObject doc = JObject.Parse(hasObjectsJson); + foreach (KeyValuePair prop in doc) + { + hasObjects[prop.Key] = (bool)prop!.Value!; + } + + // Console.WriteLine($"ServerApi::HasObjects({objectIds.Count}) request in {sw.ElapsedMilliseconds / 1000.0} sec"); + + return hasObjects; + } + + private async Task UploadObjectsImpl(string streamId, List> multipartedObjects) + { + // Stopwatch sw = new Stopwatch(); sw.Start(); + + CancellationToken.ThrowIfCancellationRequested(); + + using HttpRequestMessage message = + new() { RequestUri = new Uri($"/objects/{streamId}", UriKind.Relative), Method = HttpMethod.Post }; + + MultipartFormDataContent multipart = new(); + + int mpId = 0; + foreach (List<(string, string)> mpData in multipartedObjects) + { + mpId++; + + var ctBuilder = new StringBuilder("["); + for (int i = 0; i < mpData.Count; i++) + { + if (i > 0) + { + ctBuilder.Append(','); + } + + ctBuilder.Append(mpData[i].Item2); + } + ctBuilder.Append(']'); + string ct = ctBuilder.ToString(); + + if (CompressPayloads) + { + var content = new GzipContent(new StringContent(ct, Encoding.UTF8)); + content.Headers.ContentType = new MediaTypeHeaderValue("application/gzip"); + multipart.Add(content, $"batch-{mpId}", $"batch-{mpId}"); + } + else + { + multipart.Add(new StringContent(ct, Encoding.UTF8), $"batch-{mpId}", $"batch-{mpId}"); + } + } + message.Content = multipart; + HttpResponseMessage response; + do + { + response = await _client.SendAsync(message, CancellationToken).ConfigureAwait(false); + } while (ShouldRetry(response)); + + response.EnsureSuccessStatusCode(); + + // Console.WriteLine($"ServerApi::UploadObjects({totalObjCount}) request in {sw.ElapsedMilliseconds / 1000.0} sec"); + } + + public async Task> HasBlobs(string streamId, IReadOnlyList blobIds) + { + CancellationToken.ThrowIfCancellationRequested(); + + var payload = JsonConvert.SerializeObject(blobIds); + var uri = new Uri($"/api/stream/{streamId}/blob/diff", UriKind.Relative); + + using StringContent stringContent = new(payload, Encoding.UTF8, "application/json"); + + //TODO: can we get rid of this now we have polly? + HttpResponseMessage response; + do + { + response = await _client.PostAsync(uri, stringContent, CancellationToken).ConfigureAwait(false); + } while (ShouldRetry(response)); + + response.EnsureSuccessStatusCode(); + + var responseString = await response.Content.ReadAsStringAsync().ConfigureAwait(false); + var parsed = JsonConvert.DeserializeObject>(responseString); + if (parsed is null) + { + throw new SpeckleException($"Failed to deserialize successful response {response.Content}"); + } + + return parsed; + } + + //TODO: can we get rid of this now we have polly? + private bool ShouldRetry(HttpResponseMessage? serverResponse) + { + if (serverResponse == null) + { + return true; + } + + if (!s_retryCodes.Contains((int)serverResponse.StatusCode)) + { + return false; + } + + if (RetriedCount >= RETRY_COUNT) + { + return false; + } + + RetriedCount += 1; + return true; + } + + private sealed class BlobUploadResult + { + public List uploadResults { get; set; } + } + + private sealed class BlobUploadResultItem + { + public string blobId { get; set; } + public string formKey { get; set; } + public string fileName { get; set; } + } +} diff --git a/src/Speckle.Core/Transports/ServerV2.cs b/src/Speckle.Core/Transports/ServerV2.cs new file mode 100644 index 00000000..ff1baa2e --- /dev/null +++ b/src/Speckle.Core/Transports/ServerV2.cs @@ -0,0 +1,445 @@ +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.IO; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using Speckle.Core.Credentials; +using Speckle.Core.Helpers; +using Speckle.Core.Logging; +using Speckle.Core.Models; +using Speckle.Core.Transports.ServerUtils; +using Speckle.Newtonsoft.Json.Linq; + +namespace Speckle.Core.Transports; + +public sealed class ServerTransport : IDisposable, ICloneable, ITransport, IBlobCapableTransport +{ + // POC: autofac uses this to construct a factory in place if this delegate and this can then be injected and parameters passed + // this should, and I think can, come out of this class, because I think autofac does magic based on the return type + public delegate ITransport Factory(Account account, string streamId, int timeoutSeconds, string? blobStorageFolder); + + private readonly object _elapsedLock = new(); + + private Exception? _exception; + private bool IsInErrorState => _exception is not null; + private bool _isWriteComplete; + + // TODO: make send buffer more flexible to accept blobs too + private List<(string id, string data)> _sendBuffer = new(); + private readonly object _sendBufferLock = new(); + private Thread? _sendingThread; + + private volatile bool _shouldSendThreadRun; + + /// + /// + /// + /// Defaults to + /// was not formatted as valid stream id + public ServerTransport(Account account, string streamId, int timeoutSeconds = 60, string? blobStorageFolder = null) + { + if (string.IsNullOrWhiteSpace(streamId)) + { + throw new ArgumentException($"{streamId} is not a valid id", streamId); + } + + Account = account; + BaseUri = account.serverInfo.url; + StreamId = streamId; + AuthorizationToken = account.token; + TimeoutSeconds = timeoutSeconds; + BlobStorageFolder = blobStorageFolder ?? SpecklePathProvider.BlobStoragePath(); + Initialize(account.serverInfo.url); + + Directory.CreateDirectory(BlobStorageFolder); + } + + public int TotalSentBytes { get; private set; } + + public Account Account { get; } + public string BaseUri { get; } + public string StreamId { get; internal set; } + + public int TimeoutSeconds { get; set; } + private string AuthorizationToken { get; } + + internal ParallelServerApi Api { get; private set; } + + public string BlobStorageFolder { get; set; } + + public void SaveBlob(Blob obj) + { + var hash = obj.GetFileHash(); + + lock (_sendBufferLock) + { + if (IsInErrorState) + { + return; + } + + _sendBuffer.Add(($"blob:{hash}", obj.filePath)); + } + } + + public object Clone() + { + return new ServerTransport(Account, StreamId, TimeoutSeconds, BlobStorageFolder) + { + OnProgressAction = OnProgressAction, + CancellationToken = CancellationToken, + }; + } + + public void Dispose() + { + if (_sendingThread != null) + { + _shouldSendThreadRun = false; + _sendingThread.Join(); + } + Api.Dispose(); + } + + public string TransportName { get; set; } = "RemoteTransport"; + + public Dictionary TransportContext => + new() + { + { "name", TransportName }, + { "type", GetType().Name }, + { "streamId", StreamId }, + { "serverUrl", BaseUri }, + { "blobStorageFolder", BlobStorageFolder } + }; + + public CancellationToken CancellationToken { get; set; } + public Action? OnProgressAction { get; set; } + public int SavedObjectCount { get; private set; } + public TimeSpan Elapsed { get; private set; } = TimeSpan.Zero; + + public async Task CopyObjectAndChildren( + string id, + ITransport targetTransport, + Action? onTotalChildrenCountKnown = null + ) + { + if (string.IsNullOrEmpty(id)) + { + throw new ArgumentException("Cannot copy object with empty id", nameof(id)); + } + + CancellationToken.ThrowIfCancellationRequested(); + + using ParallelServerApi api = new(BaseUri, AuthorizationToken, BlobStorageFolder, TimeoutSeconds); + + var stopwatch = Stopwatch.StartNew(); + api.CancellationToken = CancellationToken; + + string rootObjectJson = await api.DownloadSingleObject(StreamId, id).ConfigureAwait(false); + IList allIds = ParseChildrenIds(rootObjectJson); + + List childrenIds = allIds.Where(x => !x.Contains("blob:")).ToList(); + List blobIds = allIds.Where(x => x.Contains("blob:")).Select(x => x.Remove(0, 5)).ToList(); + + onTotalChildrenCountKnown?.Invoke(allIds.Count); + + // + // Objects download + // + + // Check which children are not already in the local transport + var childrenFoundMap = await targetTransport.HasObjects(childrenIds).ConfigureAwait(false); + List newChildrenIds = new(from objId in childrenFoundMap.Keys where !childrenFoundMap[objId] select objId); + + targetTransport.BeginWrite(); + + await api.DownloadObjects( + StreamId, + newChildrenIds, + (childId, childData) => + { + stopwatch.Stop(); + targetTransport.SaveObject(childId, childData); + OnProgressAction?.Invoke(TransportName, 1); + stopwatch.Start(); + } + ) + .ConfigureAwait(false); + + // pausing until writing to the target transport + stopwatch.Stop(); + targetTransport.SaveObject(id, rootObjectJson); + + await targetTransport.WriteComplete().ConfigureAwait(false); + targetTransport.EndWrite(); + stopwatch.Start(); + + // + // Blobs download + // + var localBlobTrimmedHashes = Directory + .GetFiles(BlobStorageFolder) + .Select(fileName => fileName.Split(Path.DirectorySeparatorChar).Last()) + .Where(fileName => fileName.Length > 10) + .Select(fileName => fileName.Substring(0, Blob.LocalHashPrefixLength)) + .ToList(); + + var newBlobIds = blobIds + .Where(blobId => !localBlobTrimmedHashes.Contains(blobId.Substring(0, Blob.LocalHashPrefixLength))) + .ToList(); + + await api.DownloadBlobs( + StreamId, + newBlobIds, + () => + { + OnProgressAction?.Invoke(TransportName, 1); + } + ) + .ConfigureAwait(false); + + stopwatch.Stop(); + Elapsed += stopwatch.Elapsed; + return rootObjectJson; + } + + public string GetObject(string id) + { + CancellationToken.ThrowIfCancellationRequested(); + var stopwatch = Stopwatch.StartNew(); + var result = Api.DownloadSingleObject(StreamId, id).Result; + stopwatch.Stop(); + Elapsed += stopwatch.Elapsed; + return result; + } + + public async Task> HasObjects(IReadOnlyList objectIds) + { + return await Api.HasObjects(StreamId, objectIds).ConfigureAwait(false); + } + + public void SaveObject(string id, string serializedObject) + { + lock (_sendBufferLock) + { + if (IsInErrorState) + { + return; + } + + _sendBuffer.Add((id, serializedObject)); + _isWriteComplete = false; + } + } + + public void SaveObject(string id, ITransport sourceTransport) + { + var objectData = sourceTransport.GetObject(id); + + if (objectData is null) + { + throw new TransportException( + this, + $"Cannot copy {id} from {sourceTransport.TransportName} to {TransportName} as source returned null" + ); + } + + SaveObject(id, objectData); + } + + public void BeginWrite() + { + if (_shouldSendThreadRun || _sendingThread != null) + { + throw new InvalidOperationException("ServerTransport already sending"); + } + + TotalSentBytes = 0; + SavedObjectCount = 0; + + _exception = null; + _shouldSendThreadRun = true; + _sendingThread = new Thread(SendingThreadMain) { Name = "ServerTransportSender", IsBackground = true }; + _sendingThread.Start(); + } + + public async Task WriteComplete() + { + while (true) + { + lock (_sendBufferLock) + { + if (_isWriteComplete || IsInErrorState) + { + CancellationToken.ThrowIfCancellationRequested(); + + if (_exception is not null) + { + throw new TransportException(this, $"{TransportName} transport failed", _exception); + } + + return; + } + } + + await Task.Delay(50, CancellationToken).ConfigureAwait(false); + } + } + + public void EndWrite() + { + if (!_shouldSendThreadRun || _sendingThread == null) + { + throw new InvalidOperationException("ServerTransport not sending"); + } + + _shouldSendThreadRun = false; + _sendingThread.Join(); + _sendingThread = null; + } + + private void Initialize(string baseUri) + { + SpeckleLog.Logger.Information("Initializing a new Remote Transport for {baseUri}", baseUri); + + Api = new ParallelServerApi(BaseUri, AuthorizationToken, BlobStorageFolder, TimeoutSeconds) + { + OnBatchSent = (num, size) => + { + OnProgressAction?.Invoke(TransportName, num); + TotalSentBytes += size; + SavedObjectCount += num; + } + }; + } + + public override string ToString() + { + return $"Server Transport @{Account.serverInfo.url}"; + } + + private static IList ParseChildrenIds(string json) + { + List childrenIds = new(); + + JObject doc1 = JObject.Parse(json); + JToken? closures = doc1["__closure"]; + if (closures == null) + { + return Array.Empty(); + } + + foreach (JToken prop in closures) + { + childrenIds.Add(((JProperty)prop).Name); + } + + return childrenIds; + } + + private async void SendingThreadMain() + { + while (true) + { + var stopwatch = Stopwatch.StartNew(); + if (!_shouldSendThreadRun || CancellationToken.IsCancellationRequested) + { + return; + } + + List<(string id, string data)>? buffer = null; + lock (_sendBufferLock) + { + if (_sendBuffer.Count > 0) + { + buffer = _sendBuffer; + _sendBuffer = new(); + } + else + { + _isWriteComplete = true; + } + } + + if (buffer is null) + { + Thread.Sleep(100); + continue; + } + try + { + var bufferObjects = buffer.Where(tuple => !tuple.id.Contains("blob")).ToList(); + var bufferBlobs = buffer.Where(tuple => tuple.id.Contains("blob")).ToList(); + + List objectIds = new(bufferObjects.Count); + + foreach ((string id, _) in bufferObjects) + { + if (id != "blob") + { + objectIds.Add(id); + } + } + + Dictionary hasObjects = await Api.HasObjects(StreamId, objectIds).ConfigureAwait(false); + List<(string, string)> newObjects = new(); + foreach ((string id, object json) in bufferObjects) + { + if (!hasObjects[id]) + { + newObjects.Add((id, (string)json)); + } + } + + // Report the objects that are already on the server + OnProgressAction?.Invoke(TransportName, hasObjects.Count - newObjects.Count); + + await Api.UploadObjects(StreamId, newObjects).ConfigureAwait(false); + + if (bufferBlobs.Count != 0) + { + var blobIdsToUpload = await Api.HasBlobs(StreamId, bufferBlobs).ConfigureAwait(false); + var formattedIds = blobIdsToUpload.Select(id => $"blob:{id}").ToList(); + var newBlobs = bufferBlobs.Where(tuple => formattedIds.IndexOf(tuple.id) != -1).ToList(); + if (newBlobs.Count != 0) + { + await Api.UploadBlobs(StreamId, newBlobs).ConfigureAwait(false); + } + } + } + catch (Exception ex) + { + lock (_sendBufferLock) + { + _sendBuffer.Clear(); + _exception = ex; + } + + if (ex.IsFatal()) + { + throw; + } + } + finally + { + stopwatch.Stop(); + lock (_elapsedLock) + { + Elapsed += stopwatch.Elapsed; + } + } + } + } + + [Obsolete("Transport will throw exceptions instead", true)] + public Action? OnErrorAction { get; set; } +} + +[Obsolete("Use " + nameof(ServerTransport), true)] +public sealed class ServerTransportV2 +{ + public ServerTransportV2(params object[] _) { } +} diff --git a/src/Speckle.Core/Transports/TransportHelpers.cs b/src/Speckle.Core/Transports/TransportHelpers.cs new file mode 100644 index 00000000..c0977dcb --- /dev/null +++ b/src/Speckle.Core/Transports/TransportHelpers.cs @@ -0,0 +1,84 @@ +using System; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Threading; +using Speckle.Core.Serialisation; +using Speckle.Newtonsoft.Json; + +namespace Speckle.Core.Transports; + +public static class TransportHelpers +{ + public static string CopyObjectAndChildrenSync( + string id, + ITransport sourceTransport, + ITransport targetTransport, + Action? onTotalChildrenCountKnown, + CancellationToken cancellationToken + ) + { + if (string.IsNullOrEmpty(id)) + { + throw new ArgumentException("Cannot copy object with empty id", nameof(id)); + } + + cancellationToken.ThrowIfCancellationRequested(); + + var parent = sourceTransport.GetObject(id); + if (parent is null) + { + throw new TransportException( + $"Requested id {id} was not found within this transport {sourceTransport.TransportName}" + ); + } + + targetTransport.SaveObject(id, parent); + + var closures = GetClosureTable(parent); + + onTotalChildrenCountKnown?.Invoke(closures?.Count ?? 0); + + if (closures is not null) + { + int i = 0; + foreach (var kvp in closures) + { + cancellationToken.ThrowIfCancellationRequested(); + + var child = sourceTransport.GetObject(kvp.Key); + if (child is null) + { + throw new TransportException( + $"Closure id {kvp.Key} was not found within this transport {sourceTransport.TransportName}" + ); + } + + targetTransport.SaveObject(kvp.Key, child); + sourceTransport.OnProgressAction?.Invoke($"{sourceTransport}", i++); + } + } + + return parent; + } + + /// The Json object + /// The closure table + /// Failed to deserialize the object into + internal static Dictionary? GetClosureTable(string objString) //TODO: Unit Test + { + var partial = JsonConvert.DeserializeObject(objString); + + if (partial is null) + { + throw new SpeckleDeserializeException($"Failed to deserialize {nameof(objString)} into {nameof(Placeholder)}"); + } + + return partial.__closure; + } + + [SuppressMessage("Style", "IDE1006:Naming Styles", Justification = "Deserialization target for DTO")] + internal sealed class Placeholder + { + public Dictionary? __closure { get; set; } + } +} diff --git a/src/Speckle.Core/Transports/Utilities.cs b/src/Speckle.Core/Transports/Utilities.cs new file mode 100644 index 00000000..c685c10e --- /dev/null +++ b/src/Speckle.Core/Transports/Utilities.cs @@ -0,0 +1,31 @@ +using System; +using System.Threading.Tasks; +using Speckle.Core.Logging; + +namespace Speckle.Core.Transports; + +public static class Utilities +{ + /// + /// Waits until the provided function returns true. + /// + /// + /// + /// + /// + public static async Task WaitUntil(Func condition, int frequency = 25, int timeout = -1) + { + var waitTask = Task.Run(async () => + { + while (!condition()) + { + await Task.Delay(frequency).ConfigureAwait(false); + } + }); + + if (waitTask != await Task.WhenAny(waitTask, Task.Delay(timeout)).ConfigureAwait(false)) + { + throw new SpeckleException("Process timed out", new TimeoutException()); + } + } +} diff --git a/src/Speckle.Core/packages.lock.json b/src/Speckle.Core/packages.lock.json new file mode 100644 index 00000000..c14ce699 --- /dev/null +++ b/src/Speckle.Core/packages.lock.json @@ -0,0 +1,440 @@ +{ + "version": 2, + "dependencies": { + ".NETStandard,Version=v2.0": { + "GraphQL.Client": { + "type": "Direct", + "requested": "[6.0.0, )", + "resolved": "6.0.0", + "contentHash": "8yPNBbuVBpTptivyAlak4GZvbwbUcjeQTL4vN1HKHRuOykZ4r7l5fcLS6vpyPyLn0x8FsL31xbOIKyxbmR9rbA==", + "dependencies": { + "GraphQL.Client.Abstractions": "6.0.0", + "GraphQL.Client.Abstractions.Websocket": "6.0.0", + "System.Reactive": "5.0.0" + } + }, + "Microsoft.CSharp": { + "type": "Direct", + "requested": "[4.7.0, )", + "resolved": "4.7.0", + "contentHash": "pTj+D3uJWyN3My70i2Hqo+OXixq3Os2D1nJ2x92FFo6sk8fYS1m1WLNTs0Dc1uPaViH0YvEEwvzddQ7y4rhXmA==" + }, + "Microsoft.Data.Sqlite": { + "type": "Direct", + "requested": "[7.0.5, )", + "resolved": "7.0.5", + "contentHash": "KGxbPeWsQMnmQy43DSBxAFtHz3l2JX8EWBSGUCvT3CuZ8KsuzbkqMIJMDOxWtG8eZSoCDI04aiVQjWuuV8HmSw==", + "dependencies": { + "Microsoft.Data.Sqlite.Core": "7.0.5", + "SQLitePCLRaw.bundle_e_sqlite3": "2.1.4" + } + }, + "NETStandard.Library": { + "type": "Direct", + "requested": "[2.0.3, )", + "resolved": "2.0.3", + "contentHash": "st47PosZSHrjECdjeIzZQbzivYBJFv6P2nv4cj2ypdI204DO+vZ7l5raGMiX4eXMJ53RfOIg+/s4DHVZ54Nu2A==", + "dependencies": { + "Microsoft.NETCore.Platforms": "1.1.0" + } + }, + "Polly": { + "type": "Direct", + "requested": "[7.2.3, )", + "resolved": "7.2.3", + "contentHash": "DeCY0OFbNdNxsjntr1gTXHJ5pKUwYzp04Er2LLeN3g6pWhffsGuKVfMBLe1lw7x76HrPkLxKEFxBlpRxS2nDEQ==" + }, + "Polly.Contrib.WaitAndRetry": { + "type": "Direct", + "requested": "[1.1.1, )", + "resolved": "1.1.1", + "contentHash": "1MUQLiSo4KDkQe6nzQRhIU05lm9jlexX5BVsbuw0SL82ynZ+GzAHQxJVDPVBboxV37Po3SG077aX8DuSy8TkaA==" + }, + "Polly.Extensions.Http": { + "type": "Direct", + "requested": "[3.0.0, )", + "resolved": "3.0.0", + "contentHash": "drrG+hB3pYFY7w1c3BD+lSGYvH2oIclH8GRSehgfyP5kjnFnHKQuuBhuHLv+PWyFuaTDyk/vfRpnxOzd11+J8g==", + "dependencies": { + "Polly": "7.1.0" + } + }, + "PolySharp": { + "type": "Direct", + "requested": "[1.14.1, )", + "resolved": "1.14.1", + "contentHash": "mOOmFYwad3MIOL14VCjj02LljyF1GNw1wP0YVlxtcPvqdxjGGMNdNJJxHptlry3MOd8b40Flm8RPOM8JOlN2sQ==" + }, + "Sentry": { + "type": "Direct", + "requested": "[3.33.0, )", + "resolved": "3.33.0", + "contentHash": "8vbD2o6IR2wrRrkSiRbnodWGWUOqIlwYtzpjvPNOb5raJdOf+zxMwfS8f6nx9bmrTTfDj7KrCB8C/5OuicAc8A==", + "dependencies": { + "System.Reflection.Metadata": "5.0.0", + "System.Text.Json": "5.0.2" + } + }, + "Sentry.Serilog": { + "type": "Direct", + "requested": "[3.33.0, )", + "resolved": "3.33.0", + "contentHash": "V8BU7QGWg2qLYfNPqtuTBhC1opysny5l+Ifp6J6PhOeAxU0FssR7nYfbJVetrnLIoh2rd3DlJ6hHYYQosQYcUQ==", + "dependencies": { + "Sentry": "3.33.0", + "Serilog": "2.7.1" + } + }, + "Serilog": { + "type": "Direct", + "requested": "[2.12.0, )", + "resolved": "2.12.0", + "contentHash": "xaiJLIdu6rYMKfQMYUZgTy8YK7SMZjB4Yk50C/u//Z4OsvxkUfSPJy4nknfvwAC34yr13q7kcyh4grbwhSxyZg==" + }, + "Serilog.Enrichers.ClientInfo": { + "type": "Direct", + "requested": "[1.3.0, )", + "resolved": "1.3.0", + "contentHash": "mTc7PM+wC9Hr7LWSwqt5mmnlAr7RJs+eTb3PGPRhwdOackk95MkhUZognuxXEdlW19HAFNmEBTSBY5DfLwM8jQ==", + "dependencies": { + "Microsoft.AspNetCore.Http": "2.1.1", + "Serilog": "2.7.1" + } + }, + "Serilog.Exceptions": { + "type": "Direct", + "requested": "[8.4.0, )", + "resolved": "8.4.0", + "contentHash": "nc/+hUw3lsdo0zCj0KMIybAu7perMx79vu72w0za9Nsi6mWyNkGXxYxakAjWB7nEmYL6zdmhEQRB4oJ2ALUeug==", + "dependencies": { + "Serilog": "2.8.0", + "System.Reflection.TypeExtensions": "4.7.0" + } + }, + "Serilog.Sinks.Console": { + "type": "Direct", + "requested": "[4.1.0, )", + "resolved": "4.1.0", + "contentHash": "K6N5q+5fetjnJPvCmkWOpJ/V8IEIoMIB1s86OzBrbxwTyHxdx3pmz4H+8+O/Dc/ftUX12DM1aynx/dDowkwzqg==", + "dependencies": { + "Serilog": "2.10.0" + } + }, + "Serilog.Sinks.Seq": { + "type": "Direct", + "requested": "[5.2.2, )", + "resolved": "5.2.2", + "contentHash": "1Csmo5ua7NKUe0yXUx+zsRefjAniPWcXFhUXxXG8pwo0iMiw2gjn9SOkgYnnxbgWqmlGv236w0N/dHc2v5XwMg==", + "dependencies": { + "Serilog": "2.12.0", + "Serilog.Formatting.Compact": "1.1.0", + "Serilog.Sinks.File": "5.0.0", + "Serilog.Sinks.PeriodicBatching": "3.1.0" + } + }, + "SerilogTimings": { + "type": "Direct", + "requested": "[3.0.1, )", + "resolved": "3.0.1", + "contentHash": "Zs28eTgszAMwpIrbBnWHBI50yuxL50p/dmAUWmy75+axdZYK/Sjm5/5m1N/CisR8acJUhTVcjPZrsB1P5iv0Uw==", + "dependencies": { + "Serilog": "2.10.0" + } + }, + "Speckle.Newtonsoft.Json": { + "type": "Direct", + "requested": "[13.0.2, )", + "resolved": "13.0.2", + "contentHash": "g1BejUZwax5PRfL6xHgLEK23sqHWOgOj9hE7RvfRRlN00AGt8GnPYt8HedSK7UB3HiRW8zCA9Pn0iiYxCK24BA==" + }, + "System.DoubleNumerics": { + "type": "Direct", + "requested": "[3.1.3, )", + "resolved": "3.1.3", + "contentHash": "KRKEM/L3KBodjA9VOg3EifFVWUY6EOqaMB05UvPEDm7Zeby/kZW+4kdWUEPzW6xtkwf46p661L9NrbeeQhtLzw==", + "dependencies": { + "NETStandard.Library": "1.6.1" + } + }, + "GraphQL.Client.Abstractions": { + "type": "Transitive", + "resolved": "6.0.0", + "contentHash": "h7uzWFORHZ+CCjwr/ThAyXMr0DPpzEANDa4Uo54wqCQ+j7qUKwqYTgOrb1W40sqbvNaZm9v/X7It31SUw0maHA==", + "dependencies": { + "GraphQL.Primitives": "6.0.0" + } + }, + "GraphQL.Client.Abstractions.Websocket": { + "type": "Transitive", + "resolved": "6.0.0", + "contentHash": "Nr9bPf8gIOvLuXpqEpqr9z9jslYFJOvd0feHth3/kPqeR3uMbjF5pjiwh4jxyMcxHdr8Pb6QiXkV3hsSyt0v7A==", + "dependencies": { + "GraphQL.Client.Abstractions": "6.0.0" + } + }, + "GraphQL.Primitives": { + "type": "Transitive", + "resolved": "6.0.0", + "contentHash": "yg72rrYDapfsIUrul7aF6wwNnTJBOFvuA9VdDTQpPa8AlAriHbufeXYLBcodKjfUdkCnaiggX1U/nEP08Zb5GA==" + }, + "Microsoft.AspNetCore.Http": { + "type": "Transitive", + "resolved": "2.1.1", + "contentHash": "pPDcCW8spnyibK3krpxrOpaFHf5fjV6k1Hsl6gfh77N/8gRYlLU7MOQDUnjpEwdlHmtxwJKQJNxZqVQOmJGRUw==", + "dependencies": { + "Microsoft.AspNetCore.Http.Abstractions": "2.1.1", + "Microsoft.AspNetCore.WebUtilities": "2.1.1", + "Microsoft.Extensions.ObjectPool": "2.1.1", + "Microsoft.Extensions.Options": "2.1.1", + "Microsoft.Net.Http.Headers": "2.1.1" + } + }, + "Microsoft.AspNetCore.Http.Abstractions": { + "type": "Transitive", + "resolved": "2.1.1", + "contentHash": "kQUEVOU4loc8CPSb2WoHFTESqwIa8Ik7ysCBfTwzHAd0moWovc9JQLmhDIHlYLjHbyexqZAlkq/FPRUZqokebw==", + "dependencies": { + "Microsoft.AspNetCore.Http.Features": "2.1.1", + "System.Text.Encodings.Web": "4.5.0" + } + }, + "Microsoft.AspNetCore.Http.Features": { + "type": "Transitive", + "resolved": "2.1.1", + "contentHash": "VklZ7hWgSvHBcDtwYYkdMdI/adlf7ebxTZ9kdzAhX+gUs5jSHE9mZlTamdgf9miSsxc1QjNazHXTDJdVPZKKTw==", + "dependencies": { + "Microsoft.Extensions.Primitives": "2.1.1" + } + }, + "Microsoft.AspNetCore.WebUtilities": { + "type": "Transitive", + "resolved": "2.1.1", + "contentHash": "PGKIZt4+412Z/XPoSjvYu/QIbTxcAQuEFNoA1Pw8a9mgmO0ZhNBmfaNyhgXFf7Rq62kP0tT/2WXpxdcQhkFUPA==", + "dependencies": { + "Microsoft.Net.Http.Headers": "2.1.1", + "System.Text.Encodings.Web": "4.5.0" + } + }, + "Microsoft.Bcl.AsyncInterfaces": { + "type": "Transitive", + "resolved": "5.0.0", + "contentHash": "W8DPQjkMScOMTtJbPwmPyj9c3zYSFGawDW3jwlBOOsnY+EzZFLgNQ/UMkK35JmkNOVPdCyPr2Tw7Vv9N+KA3ZQ==", + "dependencies": { + "System.Threading.Tasks.Extensions": "4.5.4" + } + }, + "Microsoft.Data.Sqlite.Core": { + "type": "Transitive", + "resolved": "7.0.5", + "contentHash": "FTerRmQPqHrCrnoUzhBu+E+1DNGwyrAMLqHkAqOOOu5pGfyMOj8qQUBxI/gDtWtG11p49UxSfWmBzRNlwZqfUg==", + "dependencies": { + "SQLitePCLRaw.core": "2.1.4" + } + }, + "Microsoft.Extensions.DependencyInjection.Abstractions": { + "type": "Transitive", + "resolved": "2.1.1", + "contentHash": "MgYpU5cwZohUMKKg3sbPhvGG+eAZ/59E9UwPwlrUkyXU+PGzqwZg9yyQNjhxuAWmoNoFReoemeCku50prYSGzA==" + }, + "Microsoft.Extensions.ObjectPool": { + "type": "Transitive", + "resolved": "2.1.1", + "contentHash": "SErON45qh4ogDp6lr6UvVmFYW0FERihW+IQ+2JyFv1PUyWktcJytFaWH5zarufJvZwhci7Rf1IyGXr9pVEadTw==" + }, + "Microsoft.Extensions.Options": { + "type": "Transitive", + "resolved": "2.1.1", + "contentHash": "V7lXCU78lAbzaulCGFKojcCyG8RTJicEbiBkPJjFqiqXwndEBBIehdXRMWEVU3UtzQ1yDvphiWUL9th6/4gJ7w==", + "dependencies": { + "Microsoft.Extensions.DependencyInjection.Abstractions": "2.1.1", + "Microsoft.Extensions.Primitives": "2.1.1" + } + }, + "Microsoft.Extensions.Primitives": { + "type": "Transitive", + "resolved": "2.1.1", + "contentHash": "scJ1GZNIxMmjpENh0UZ8XCQ6vzr/LzeF9WvEA51Ix2OQGAs9WPgPu8ABVUdvpKPLuor/t05gm6menJK3PwqOXg==", + "dependencies": { + "System.Memory": "4.5.1", + "System.Runtime.CompilerServices.Unsafe": "4.5.1" + } + }, + "Microsoft.Net.Http.Headers": { + "type": "Transitive", + "resolved": "2.1.1", + "contentHash": "lPNIphl8b2EuhOE9dMH6EZDmu7pS882O+HMi5BJNsigxHaWlBrYxZHFZgE18cyaPp6SSZcTkKkuzfjV/RRQKlA==", + "dependencies": { + "Microsoft.Extensions.Primitives": "2.1.1", + "System.Buffers": "4.5.0" + } + }, + "Microsoft.NETCore.Platforms": { + "type": "Transitive", + "resolved": "1.1.0", + "contentHash": "kz0PEW2lhqygehI/d6XsPCQzD7ff7gUJaVGPVETX611eadGsA3A877GdSlU0LRVMCTH/+P3o2iDTak+S08V2+A==" + }, + "Microsoft.NETCore.Targets": { + "type": "Transitive", + "resolved": "1.1.0", + "contentHash": "aOZA3BWfz9RXjpzt0sRJJMjAscAUm3Hoa4UWAfceV9UTYxgwZ1lZt5nO2myFf+/jetYQo4uTP7zS8sJY67BBxg==" + }, + "Serilog.Formatting.Compact": { + "type": "Transitive", + "resolved": "1.1.0", + "contentHash": "pNroKVjo+rDqlxNG5PXkRLpfSCuDOBY0ri6jp9PLe505ljqwhwZz8ospy2vWhQlFu5GkIesh3FcDs4n7sWZODA==", + "dependencies": { + "Serilog": "2.8.0" + } + }, + "Serilog.Sinks.File": { + "type": "Transitive", + "resolved": "5.0.0", + "contentHash": "uwV5hdhWPwUH1szhO8PJpFiahqXmzPzJT/sOijH/kFgUx+cyoDTMM8MHD0adw9+Iem6itoibbUXHYslzXsLEAg==", + "dependencies": { + "Serilog": "2.10.0" + } + }, + "Serilog.Sinks.PeriodicBatching": { + "type": "Transitive", + "resolved": "3.1.0", + "contentHash": "NDWR7m3PalVlGEq3rzoktrXikjFMLmpwF0HI4sowo8YDdU+gqPlTHlDQiOGxHfB0sTfjPA9JjA7ctKG9zqjGkw==", + "dependencies": { + "Serilog": "2.0.0" + } + }, + "SQLitePCLRaw.bundle_e_sqlite3": { + "type": "Transitive", + "resolved": "2.1.4", + "contentHash": "EWI1olKDjFEBMJu0+3wuxwziIAdWDVMYLhuZ3Qs84rrz+DHwD00RzWPZCa+bLnHCf3oJwuFZIRsHT5p236QXww==", + "dependencies": { + "SQLitePCLRaw.lib.e_sqlite3": "2.1.4", + "SQLitePCLRaw.provider.e_sqlite3": "2.1.4" + } + }, + "SQLitePCLRaw.core": { + "type": "Transitive", + "resolved": "2.1.4", + "contentHash": "inBjvSHo9UDKneGNzfUfDjK08JzlcIhn1+SP5Y3m6cgXpCxXKCJDy6Mka7LpgSV+UZmKSnC8rTwB0SQ0xKu5pA==", + "dependencies": { + "System.Memory": "4.5.3" + } + }, + "SQLitePCLRaw.lib.e_sqlite3": { + "type": "Transitive", + "resolved": "2.1.4", + "contentHash": "2C9Q9eX7CPLveJA0rIhf9RXAvu+7nWZu1A2MdG6SD/NOu26TakGgL1nsbc0JAspGijFOo3HoN79xrx8a368fBg==" + }, + "SQLitePCLRaw.provider.e_sqlite3": { + "type": "Transitive", + "resolved": "2.1.4", + "contentHash": "CSlb5dUp1FMIkez9Iv5EXzpeq7rHryVNqwJMWnpq87j9zWZexaEMdisDktMsnnrzKM6ahNrsTkjqNodTBPBxtQ==", + "dependencies": { + "SQLitePCLRaw.core": "2.1.4" + } + }, + "System.Buffers": { + "type": "Transitive", + "resolved": "4.5.1", + "contentHash": "Rw7ijyl1qqRS0YQD/WycNst8hUUMgrMH4FCn1nNm27M4VxchZ1js3fVjQaANHO5f3sN4isvP4a+Met9Y4YomAg==" + }, + "System.Collections.Immutable": { + "type": "Transitive", + "resolved": "5.0.0", + "contentHash": "FXkLXiK0sVVewcso0imKQoOxjoPAj42R8HtjjbSjVPAzwDfzoyoznWxgA3c38LDbN9SJux1xXoXYAhz98j7r2g==", + "dependencies": { + "System.Memory": "4.5.4" + } + }, + "System.Memory": { + "type": "Transitive", + "resolved": "4.5.4", + "contentHash": "1MbJTHS1lZ4bS4FmsJjnuGJOu88ZzTT2rLvrhW7Ygic+pC0NWA+3hgAen0HRdsocuQXCkUTdFn9yHJJhsijDXw==", + "dependencies": { + "System.Buffers": "4.5.1", + "System.Numerics.Vectors": "4.4.0", + "System.Runtime.CompilerServices.Unsafe": "4.5.3" + } + }, + "System.Numerics.Vectors": { + "type": "Transitive", + "resolved": "4.5.0", + "contentHash": "QQTlPTl06J/iiDbJCiepZ4H//BVraReU4O4EoRw1U02H5TLUIT7xn3GnDp9AXPSlJUDyFs4uWjWafNX6WrAojQ==" + }, + "System.Reactive": { + "type": "Transitive", + "resolved": "5.0.0", + "contentHash": "erBZjkQHWL9jpasCE/0qKAryzVBJFxGHVBAvgRN1bzM0q2s1S4oYREEEL0Vb+1kA/6BKb5FjUZMp5VXmy+gzkQ==", + "dependencies": { + "System.Runtime.InteropServices.WindowsRuntime": "4.3.0", + "System.Threading.Tasks.Extensions": "4.5.4" + } + }, + "System.Reflection.Metadata": { + "type": "Transitive", + "resolved": "5.0.0", + "contentHash": "5NecZgXktdGg34rh1OenY1rFNDCI8xSjFr+Z4OU4cU06AQHUdRnIIEeWENu3Wl4YowbzkymAIMvi3WyK9U53pQ==", + "dependencies": { + "System.Collections.Immutable": "5.0.0" + } + }, + "System.Reflection.TypeExtensions": { + "type": "Transitive", + "resolved": "4.7.0", + "contentHash": "VybpaOQQhqE6siHppMktjfGBw1GCwvCqiufqmP8F1nj7fTUNtW35LOEt3UZTEsECfo+ELAl/9o9nJx3U91i7vA==" + }, + "System.Runtime": { + "type": "Transitive", + "resolved": "4.3.0", + "contentHash": "JufQi0vPQ0xGnAczR13AUFglDyVYt4Kqnz1AZaiKZ5+GICq0/1MH/mO/eAJHt/mHW1zjKBJd7kV26SrxddAhiw==", + "dependencies": { + "Microsoft.NETCore.Platforms": "1.1.0", + "Microsoft.NETCore.Targets": "1.1.0" + } + }, + "System.Runtime.CompilerServices.Unsafe": { + "type": "Transitive", + "resolved": "5.0.0", + "contentHash": "ZD9TMpsmYJLrxbbmdvhwt9YEgG5WntEnZ/d1eH8JBX9LBp+Ju8BSBhUGbZMNVHHomWo2KVImJhTDl2hIgw/6MA==" + }, + "System.Runtime.InteropServices.WindowsRuntime": { + "type": "Transitive", + "resolved": "4.3.0", + "contentHash": "J4GUi3xZQLUBasNwZnjrffN8i5wpHrBtZoLG+OhRyGo/+YunMRWWtwoMDlUAIdmX0uRfpHIBDSV6zyr3yf00TA==", + "dependencies": { + "System.Runtime": "4.3.0" + } + }, + "System.Text.Encodings.Web": { + "type": "Transitive", + "resolved": "5.0.1", + "contentHash": "KmJ+CJXizDofbq6mpqDoRRLcxgOd2z9X3XoFNULSbvbqVRZkFX3istvr+MUjL6Zw1RT+RNdoI4GYidIINtgvqQ==", + "dependencies": { + "System.Buffers": "4.5.1", + "System.Memory": "4.5.4" + } + }, + "System.Text.Json": { + "type": "Transitive", + "resolved": "5.0.2", + "contentHash": "I47dVIGiV6SfAyppphxqupertT/5oZkYLDCX6vC3HpOI4ZLjyoKAreUoem2ie6G0RbRuFrlqz/PcTQjfb2DOfQ==", + "dependencies": { + "Microsoft.Bcl.AsyncInterfaces": "5.0.0", + "System.Buffers": "4.5.1", + "System.Memory": "4.5.4", + "System.Numerics.Vectors": "4.5.0", + "System.Runtime.CompilerServices.Unsafe": "5.0.0", + "System.Text.Encodings.Web": "5.0.1", + "System.Threading.Tasks.Extensions": "4.5.4" + } + }, + "System.Threading.Tasks.Extensions": { + "type": "Transitive", + "resolved": "4.5.4", + "contentHash": "zteT+G8xuGu6mS+mzDzYXbzS7rd3K6Fjb9RiZlYlJPam2/hU7JCBZBVEcywNuR+oZ1ncTvc/cq0faRr3P01OVg==", + "dependencies": { + "System.Runtime.CompilerServices.Unsafe": "4.5.3" + } + } + } + } +} \ No newline at end of file diff --git a/tests/Speckle.Core.Tests.Integration/Api.cs b/tests/Speckle.Core.Tests.Integration/Api.cs new file mode 100644 index 00000000..bc3b574f --- /dev/null +++ b/tests/Speckle.Core.Tests.Integration/Api.cs @@ -0,0 +1,498 @@ +using Speckle.Core.Api; +using Speckle.Core.Credentials; +using Speckle.Core.Models; +using Speckle.Core.Tests.Unit.Kits; +using Speckle.Core.Transports; + +namespace Speckle.Core.Tests.Integration; + +public class Api : IDisposable +{ + private string _branchId = ""; + private string _branchName = ""; + private string _commitId = ""; + + private Account _firstUserAccount, + _secondUserAccount; + + private Client _myClient, + _secondClient; + + private ServerTransport _myServerTransport, + _otherServerTransport; + + private string _objectId = ""; + + private string _streamId = ""; + + [OneTimeSetUp] + public async Task Setup() + { + _firstUserAccount = await Fixtures.SeedUser(); + _secondUserAccount = await Fixtures.SeedUser(); + + _myClient = new Client(_firstUserAccount); + _secondClient = new Client(_secondUserAccount); + } + + private void InitServerTransport() + { + _myServerTransport = new ServerTransport(_firstUserAccount, _streamId); + _myServerTransport.Api.CompressPayloads = false; + _otherServerTransport = new ServerTransport(_firstUserAccount, _streamId); + _otherServerTransport.Api.CompressPayloads = false; + } + + [Test] + public async Task ActiveUserGet() + { + var res = await _myClient.ActiveUserGet(); + Assert.That(res.id, Is.EqualTo(_myClient.Account.userInfo.id)); + } + + [Test] + public async Task OtherUserGet() + { + var res = await _myClient.OtherUserGet(_secondUserAccount.userInfo.id); + Assert.That(res!.name, Is.EqualTo(_secondUserAccount.userInfo.name)); + } + + [Test] + public async Task UserSearch() + { + var res = await _myClient.UserSearch(_firstUserAccount.userInfo.email); + Assert.That(res, Has.Count.EqualTo(1)); + Assert.That(res[0].id, Is.EqualTo(_firstUserAccount.userInfo.id)); + } + + [Test] + public async Task ServerVersion() + { + var res = await _myClient.GetServerVersion(); + + Assert.That(res, Is.Not.Null); + } + + [Test, Order(0)] + public async Task StreamCreate() + { + var res = await _myClient.StreamCreate( + new StreamCreateInput { description = "Hello World", name = "Super Stream 01" } + ); + + Assert.That(res, Is.Not.Null); + _streamId = res; + InitServerTransport(); + } + + [Test, Order(10)] + public async Task StreamsGet() + { + var res = await _myClient.StreamsGet(); + + Assert.That(res, Is.Not.Null); + } + + [Test, Order(11)] + public async Task StreamGet() + { + var res = await _myClient.StreamGet(_streamId); + + Assert.That(res, Is.Not.Null); + Assert.That(res.branches.items[0].name, Is.EqualTo("main")); + Assert.That(res.collaborators, Is.Not.Empty); + } + + [Test, Order(12)] + public async Task IsStreamAccessible() + { + var res = await _myClient.IsStreamAccessible(_streamId); + + Assert.That(res, Is.True); + } + + [Test, Order(13)] + public async Task StreamSearch() + { + var res = await _myClient.StreamSearch(_streamId); + + Assert.That(res, Is.Not.Null); + } + + [Test, Order(20)] + public async Task StreamUpdate() + { + var res = await _myClient.StreamUpdate( + new StreamUpdateInput + { + id = _streamId, + description = "Hello World", + name = "Super Stream 01 EDITED" + } + ); + + Assert.That(res, Is.True); + } + + [Test, Order(31)] + public async Task StreamInviteCreate() + { + var res = await _myClient.StreamInviteCreate( + new StreamInviteCreateInput + { + streamId = _streamId, + email = _secondUserAccount.userInfo.email, + message = "Whasssup!" + } + ); + + Assert.That(res, Is.True); + + Assert.ThrowsAsync( + async () => await _myClient.StreamInviteCreate(new StreamInviteCreateInput { streamId = _streamId }) + ); + } + + [Test, Order(32)] + public async Task StreamInviteGet() + { + var invites = await _secondClient.GetAllPendingInvites(); + + Assert.That(invites, Is.Not.Null); + } + + [Test, Order(33)] + public async Task StreamInviteUse() + { + var invites = await _secondClient.GetAllPendingInvites(); + + var res = await _secondClient.StreamInviteUse(invites[0].streamId, invites[0].token); + + Assert.That(res, Is.True); + } + + [Test, Order(34)] + public async Task StreamUpdatePermission() + { + var res = await _myClient.StreamUpdatePermission( + new StreamPermissionInput + { + role = "stream:reviewer", + streamId = _streamId, + userId = _secondUserAccount.userInfo.id + } + ); + + Assert.That(res, Is.True); + } + + [Test, Order(40)] + public async Task StreamRevokePermission() + { + var res = await _myClient.StreamRevokePermission( + new StreamRevokePermissionInput { streamId = _streamId, userId = _secondUserAccount.userInfo.id } + ); + + Assert.That(res, Is.True); + } + + #region activity + + [Test, Order(51)] + public async Task StreamGetActivity() + { + var res = await _myClient.StreamGetActivity(_streamId); + + Assert.That(res, Is.Not.Null); + //Assert.AreEqual(commitId, res[0].); + } + + #endregion + + #region comments + + [Test, Order(52)] + public async Task StreamGetComments() + { + var res = await _myClient.StreamGetActivity(_streamId); + + Assert.That(res, Is.Not.Null); + //Assert.AreEqual(commitId, res[0].); + } + + #endregion + + [Test, Order(60)] + public async Task StreamDelete() + { + var res = await _myClient.StreamDelete(_streamId); + Assert.That(res, Is.True); + } + + #region branches + + [Test, Order(41)] + public async Task BranchCreate() + { + var res = await _myClient.BranchCreate( + new BranchCreateInput + { + streamId = _streamId, + description = "this is a sample branch", + name = "sample-branch" + } + ); + Assert.That(res, Is.Not.Null); + _branchId = res; + _branchName = "sample-branch"; + } + + [Test, Order(42)] + public async Task BranchGet() + { + var res = await _myClient.BranchGet(_streamId, _branchName); + + Assert.That(res, Is.Not.Null); + Assert.That(res.description, Is.EqualTo("this is a sample branch")); + } + + [Test, Order(43)] + public async Task StreamGetBranches() + { + var res = await _myClient.StreamGetBranches(_streamId); + + Assert.That(res, Is.Not.Null); + // Branches are now returned in order of creation so 'main' should always go first. + Assert.That(res[0].name, Is.EqualTo("main")); + } + + [Test, Order(51)] + public async Task StreamGetBranches_Throws_WhenRequestingOverLimit() + { + Assert.ThrowsAsync>( + async () => await _myClient.StreamGetBranches(_streamId, ServerLimits.BRANCH_GET_LIMIT + 1) + ); + var res = await _myClient.StreamGetBranches(_streamId, ServerLimits.BRANCH_GET_LIMIT); + + Assert.That(res, Is.Not.Null); + } + + [Test, Order(52)] + public async Task StreamGetBranches_WithManyBranches() + { + var newStreamId = await _myClient.StreamCreate(new StreamCreateInput { name = "Many branches stream" }); + + await CreateEmptyBranches(_myClient, newStreamId, ServerLimits.BRANCH_GET_LIMIT); + + var res = await _myClient.StreamGetBranches(newStreamId, ServerLimits.BRANCH_GET_LIMIT); + + Assert.That(res, Is.Not.Null); + Assert.That(res, Has.Count.EqualTo(ServerLimits.BRANCH_GET_LIMIT)); + } + + private async Task CreateEmptyBranches( + Client client, + string streamId, + int branchCount, + string branchPrefix = "Test branch" + ) + { + // now let's send HTTP requests to each of these URLs in parallel + var options = new ParallelOptions { MaxDegreeOfParallelism = 2 }; + + // now let's send HTTP requests to each of these URLs in parallel + await Parallel.ForEachAsync( + Enumerable.Range(0, branchCount), + options, + async (i, cancellationToken) => + { + await client.BranchCreate( + new BranchCreateInput { name = $"{branchPrefix} {i}", streamId = streamId }, + cancellationToken + ); + } + ); + } + + #region commit + + [Test, Order(43)] + public async Task CommitCreate() + { + var myObject = new Base(); + var ptsList = new List(); + for (int i = 0; i < 100; i++) + { + ptsList.Add(new Point(i, i, i)); + } + + myObject["@Points"] = ptsList; + + _objectId = await Operations.Send(myObject, new List { _myServerTransport }); + + Assert.That(_objectId, Is.Not.Null); + + var res = await _myClient.CommitCreate( + new CommitCreateInput + { + streamId = _streamId, + branchName = _branchName, + objectId = _objectId, + message = "Fibber Fibbo", + sourceApplication = "Tests", + totalChildrenCount = 100 + } + ); + + Assert.That(res, Is.Not.Null); + _commitId = res; + + var res2 = await _myClient.CommitCreate( + new CommitCreateInput + { + streamId = _streamId, + branchName = _branchName, + objectId = _objectId, + message = "Fabber Fabbo", + sourceApplication = "Tests", + totalChildrenCount = 100, + parents = new List { _commitId } + } + ); + + Assert.That(res2, Is.Not.Null); + _commitId = res2; + } + + [Test, Order(44)] + public async Task CommitGet() + { + var res = await _myClient.CommitGet(_streamId, _commitId); + + Assert.That(res, Is.Not.Null); + Assert.That(res.message, Is.EqualTo("Fabber Fabbo")); + } + + [Test, Order(45)] + public async Task StreamGetCommits() + { + var res = await _myClient.StreamGetCommits(_streamId); + + Assert.That(res, Is.Not.Null); + Assert.That(res[0].id, Is.EqualTo(_commitId)); + } + + #region object + + [Test, Order(45)] + public async Task ObjectGet() + { + var res = await _myClient.ObjectGet(_streamId, _objectId); + + Assert.That(res, Is.Not.Null); + Assert.That(res.totalChildrenCount, Is.EqualTo(100)); + } + + #endregion + + [Test, Order(46)] + public async Task CommitUpdate() + { + var res = await _myClient.CommitUpdate( + new CommitUpdateInput + { + streamId = _streamId, + id = _commitId, + message = "DIM IS DA BEST" + } + ); + + Assert.That(res, Is.True); + } + + [Test, Order(47)] + public async Task CommitReceived() + { + var res = await _myClient.CommitReceived( + new CommitReceivedInput + { + commitId = _commitId, + streamId = _streamId, + sourceApplication = "sharp-tests", + message = "The test message" + } + ); + + Assert.That(res, Is.True); + } + + [Test, Order(48)] + public async Task CommitDelete() + { + var res = await _myClient.CommitDelete(new CommitDeleteInput { id = _commitId, streamId = _streamId }); + Assert.That(res, Is.True); + } + + #endregion + + + [Test, Order(49)] + public async Task BranchUpdate() + { + var res = await _myClient.BranchUpdate( + new BranchUpdateInput + { + streamId = _streamId, + id = _branchId, + name = "sample-branch EDITED" + } + ); + + Assert.That(res, Is.True); + } + + [Test, Order(50)] + public async Task BranchDelete() + { + var res = await _myClient.BranchDelete(new BranchDeleteInput { id = _branchId, streamId = _streamId }); + Assert.That(res, Is.True); + } + + #endregion + + #region send/receive bare + + //[Test, Order(60)] + //public async Task SendDetached() + //{ + // var myObject = new Base(); + // var ptsList = new List(); + // for (int i = 0; i < 100; i++) + // ptsList.Add(new Point(i, i, i)); + + // myObject["@Points"] = ptsList; + + // var otherTransport = new ServerTransport(firstUserAccount, null); + // otherTransport.StreamId = + + // objectId = await Operations.Send(myObject, new List() { myServerTransport }, disposeTransports: true); + //} + + //[Test, Order(61)] + //public async Task ReceiveAndCompose() + //{ + // var myObject = await Operations.Receive(objectId, myServerTransport); + // Assert.NotNull(myObject); + // Assert.AreEqual(100, ((List)myObject["@Points"]).Count); + //} + + #endregion + + public void Dispose() + { + _myClient?.Dispose(); + _secondClient?.Dispose(); + _myServerTransport?.Dispose(); + _otherServerTransport?.Dispose(); + } +} diff --git a/tests/Speckle.Core.Tests.Integration/Credentials/UserServerInfoTests.cs b/tests/Speckle.Core.Tests.Integration/Credentials/UserServerInfoTests.cs new file mode 100644 index 00000000..0929c909 --- /dev/null +++ b/tests/Speckle.Core.Tests.Integration/Credentials/UserServerInfoTests.cs @@ -0,0 +1,87 @@ +using GraphQL.Client.Http; +using Speckle.Core.Api; +using Speckle.Core.Credentials; + +namespace Speckle.Core.Tests.Integration.Credentials; + +public class UserServerInfoTests +{ + private Account acc; + + [OneTimeSetUp] + public async Task Setup() + { + acc = await Fixtures.SeedUser(); + } + + [Test] + public async Task IsFrontEnd2True() + { + ServerInfo result = await AccountManager.GetServerInfo("https://app.speckle.systems/"); + + Assert.That(result, Is.Not.Null); + Assert.That(result.frontend2, Is.True); + } + + [Test] + public async Task IsFrontEnd2False() + { + ServerInfo result = await AccountManager.GetServerInfo("https://speckle.xyz/"); + + Assert.That(result, Is.Not.Null); + Assert.That(result.frontend2, Is.False); + } + + /// + /// We get ServerInfo from "http://localhost:3000/graphql", + /// Then we mutate the `frontend2` property of ServerInfo by trying to fetch header from "http://localhost:3000/", + /// This is not doable in local server because there is no end-point on this to ping. + /// This is a bad sign for mutation. + /// + [Test] + public void GetServerInfo_ExpectFail_CantPing() + { + Uri serverUrl = new(acc.serverInfo.url); + + Assert.ThrowsAsync(async () => await AccountManager.GetServerInfo(serverUrl)); + } + + [Test] + public void GetServerInfo_ExpectFail_NoServer() + { + Uri serverUrl = new("http://invalidserver.local"); + + Assert.ThrowsAsync(async () => await AccountManager.GetServerInfo(serverUrl)); + } + + [Test] + public async Task GetUserInfo() + { + Uri serverUrl = new(acc.serverInfo.url); + UserInfo result = await AccountManager.GetUserInfo(acc.token, serverUrl); + + Assert.That(result.id, Is.EqualTo(acc.userInfo.id)); + Assert.That(result.name, Is.EqualTo(acc.userInfo.name)); + Assert.That(result.email, Is.EqualTo(acc.userInfo.email)); + Assert.That(result.company, Is.EqualTo(acc.userInfo.company)); + Assert.That(result.avatar, Is.EqualTo(acc.userInfo.avatar)); + } + + [Test] + public void GetUserInfo_ExpectFail_NoServer() + { + Uri serverUrl = new("http://invalidserver.local"); + + Assert.ThrowsAsync(async () => await AccountManager.GetUserInfo("", serverUrl)); + } + + [Test] + public void GetUserInfo_ExpectFail_NoUser() + { + Uri serverUrl = new(acc.serverInfo.url); + + Assert.ThrowsAsync( + async () => await AccountManager.GetUserInfo("Bearer 08913c3c1e7ac65d779d1e1f11b942a44ad9672ca9", serverUrl) + ); + } +} diff --git a/tests/Speckle.Core.Tests.Integration/Fixtures.cs b/tests/Speckle.Core.Tests.Integration/Fixtures.cs new file mode 100644 index 00000000..0e046f9b --- /dev/null +++ b/tests/Speckle.Core.Tests.Integration/Fixtures.cs @@ -0,0 +1,141 @@ +using System.Globalization; +using System.Net.Mime; +using System.Text; +using System.Web; +using Newtonsoft.Json; +using Speckle.Core.Api; +using Speckle.Core.Credentials; +using Speckle.Core.Logging; +using Speckle.Core.Models; + +namespace Speckle.Core.Tests.Integration; + +[SetUpFixture] +public class SetUp +{ + [OneTimeSetUp] + public void BeforeAll() + { + SpeckleLog.Initialize("Core", "Testing", new SpeckleLogConfiguration(logToFile: false, logToSeq: false)); + SpeckleLog.Logger.Information("Initialized logger for testing"); + } +} + +public static class Fixtures +{ + private static readonly ServerInfo s_server = new() { url = "http://localhost:3000", name = "Docker Server" }; + + public static async Task SeedUser() + { + var seed = Guid.NewGuid().ToString().ToLower(); + Dictionary user = + new() + { + ["email"] = $"{seed.Substring(0, 7)}@acme.com", + ["password"] = "12ABC3456789DEF0GHO", + ["name"] = $"{seed.Substring(0, 5)} Name" + }; + + using var httpClient = new HttpClient( + new HttpClientHandler { AllowAutoRedirect = false, CheckCertificateRevocationList = true } + ); + + httpClient.BaseAddress = new Uri(s_server.url); + + string redirectUrl; + try + { + var response = await httpClient.PostAsync( + "/auth/local/register?challenge=challengingchallenge", + // $"{Server.url}/auth/local/register?challenge=challengingchallenge", + new StringContent(JsonConvert.SerializeObject(user), Encoding.UTF8, MediaTypeNames.Application.Json) + ); + redirectUrl = response.Headers.Location!.AbsoluteUri; + } + catch (Exception e) + { + throw new Exception($"Cannot seed user on the server {s_server.url}", e); + } + + Uri uri = new(redirectUrl); + var query = HttpUtility.ParseQueryString(uri.Query); + + string accessCode = query["access_code"] ?? throw new Exception("Redirect Uri has no 'access_code'."); + Dictionary tokenBody = + new() + { + ["accessCode"] = accessCode, + ["appId"] = "spklwebapp", + ["appSecret"] = "spklwebapp", + ["challenge"] = "challengingchallenge" + }; + + var tokenResponse = await httpClient.PostAsync( + "/auth/token", + new StringContent(JsonConvert.SerializeObject(tokenBody), Encoding.UTF8, MediaTypeNames.Application.Json) + ); + var deserialised = JsonConvert.DeserializeObject>( + await tokenResponse.Content.ReadAsStringAsync() + ); + + var acc = new Account + { + token = deserialised["token"]!, + userInfo = new UserInfo + { + id = user["name"], + email = user["email"], + name = user["name"] + }, + serverInfo = s_server + }; + using var client = new Client(acc); + + var user1 = await client.ActiveUserGet(); + acc.userInfo.id = user1.id; + return acc; + } + + public static Base GenerateSimpleObject() + { + var @base = new Base + { + ["foo"] = "foo", + ["bar"] = "bar", + ["baz"] = "baz", + ["now"] = DateTime.Now.ToString(CultureInfo.InvariantCulture) + }; + + return @base; + } + + public static Base GenerateNestedObject() + { + var @base = new Base + { + ["foo"] = "foo", + ["bar"] = "bar", + ["@baz"] = new Base() { ["mux"] = "mux", ["qux"] = "qux" } + }; + + return @base; + } + + public static Blob[] GenerateThreeBlobs() + { + return new[] { GenerateBlob("blob 1 data"), GenerateBlob("blob 2 data"), GenerateBlob("blob 3 data") }; + } + + private static Blob GenerateBlob(string content) + { + var filePath = Path.GetTempFileName(); + File.WriteAllText(filePath, content); + return new Blob(filePath); + } +} + +public class UserIdResponse +{ + public string userId { get; set; } + public string apiToken { get; set; } +} diff --git a/tests/Speckle.Core.Tests.Integration/GraphQLCLient.cs b/tests/Speckle.Core.Tests.Integration/GraphQLCLient.cs new file mode 100644 index 00000000..afdb06ca --- /dev/null +++ b/tests/Speckle.Core.Tests.Integration/GraphQLCLient.cs @@ -0,0 +1,64 @@ +using GraphQL; +using Speckle.Core.Api; +using Speckle.Core.Credentials; + +namespace Speckle.Core.Tests.Integration; + +public class GraphQLClientTests : IDisposable +{ + private Account _account; + private Client _client; + + [OneTimeSetUp] + public async Task Setup() + { + _account = await Fixtures.SeedUser(); + _client = new Client(_account); + } + + [Test] + public void ThrowsForbiddenException() + { + Assert.ThrowsAsync>>( + async () => + await _client.ExecuteGraphQLRequest>( + new GraphQLRequest + { + Query = + @"query { + adminStreams{ + totalCount + } + }" + } + ) + ); + } + + [Test] + public void Cancellation() + { + using CancellationTokenSource tokenSource = new(); + tokenSource.Cancel(); + Assert.CatchAsync( + async () => + await _client.ExecuteGraphQLRequest>( + new GraphQLRequest + { + Query = + @"query { + adminStreams{ + totalCount + } + }" + }, + tokenSource.Token + ) + ); + } + + public void Dispose() + { + _client?.Dispose(); + } +} diff --git a/tests/Speckle.Core.Tests.Integration/ServerTransportTests.cs b/tests/Speckle.Core.Tests.Integration/ServerTransportTests.cs new file mode 100644 index 00000000..0baa04ac --- /dev/null +++ b/tests/Speckle.Core.Tests.Integration/ServerTransportTests.cs @@ -0,0 +1,167 @@ +using System.Collections; +using Speckle.Core.Api; +using Speckle.Core.Credentials; +using Speckle.Core.Helpers; +using Speckle.Core.Models; +using Speckle.Core.Transports; + +namespace Speckle.Core.Tests.Integration; + +public class ServerTransportTests : IDisposable +{ + private string _basePath; + private Account _account; + private Client _client; + private string _streamId; + private ServerTransport _transport; + + [OneTimeSetUp] + public async Task InitialSetup() + { + _basePath = Path.Join(Path.GetTempPath(), "speckleTest"); + + CleanData(); + Directory.CreateDirectory(_basePath); + SpecklePathProvider.OverrideApplicationDataPath(_basePath); + + _account = await Fixtures.SeedUser(); + _client = new Client(_account); + _streamId = await _client.StreamCreate(new StreamCreateInput { description = "Flobber", name = "Blobber" }); + } + + [SetUp] + public void Setup() + { + CleanData(); + // need to recreate the server transport object for each test + // to make sure all folders are properly initialized + _transport = new ServerTransport(_account, _streamId); + } + + [TearDown] + public void TearDown() + { + CleanData(); + } + + private void CleanData() + { + if (Directory.Exists(_basePath)) + { + Directory.Delete(_basePath, true); + } + } + + [Test] + public async Task SendObject() + { + var myObject = Fixtures.GenerateNestedObject(); + + var objectId = await Operations.Send(myObject, _transport, false); + + Assert.That(objectId, Is.Not.Null); + } + + [Test] + public async Task SendAndReceiveObjectWithBlobs() + { + var myObject = Fixtures.GenerateSimpleObject(); + myObject["blobs"] = Fixtures.GenerateThreeBlobs(); + + var sentObjectId = await Operations.Send(myObject, _transport, false); + + // NOTE: used to debug diffing + // await Operations.Send(myObject, new List { transport }); + + var receivedObject = await Operations.Receive(sentObjectId, _transport); + + var allFiles = Directory + .GetFiles(_transport.BlobStorageFolder) + .Select(fp => fp.Split(Path.DirectorySeparatorChar).Last()) + .ToList(); + var blobPaths = allFiles + .Where(fp => fp.Length > Blob.LocalHashPrefixLength) // excludes things like .DS_store + .ToList(); + + // Check that there are three downloaded blobs! + Assert.That(blobPaths, Has.Count.EqualTo(3)); + + var blobs = ((IList)receivedObject["blobs"]!).Cast().ToList(); + // Check that we have three blobs + Assert.That(blobs, Has.Count.EqualTo(3)); + // Check that received blobs point to local path (where they were received) + Assert.That(blobs[0].filePath, Contains.Substring(_transport.BlobStorageFolder)); + Assert.That(blobs[1].filePath, Contains.Substring(_transport.BlobStorageFolder)); + Assert.That(blobs[2].filePath, Contains.Substring(_transport.BlobStorageFolder)); + } + + [Test] + public async Task SendWithBlobsWithoutSQLiteSendCache() + { + var myObject = Fixtures.GenerateSimpleObject(); + myObject["blobs"] = Fixtures.GenerateThreeBlobs(); + + var memTransport = new MemoryTransport(); + var sentObjectId = await Operations.Send(myObject, new List { _transport, memTransport }); + + var receivedObject = await Operations.Receive(sentObjectId, _transport); + + var allFiles = Directory + .GetFiles(_transport.BlobStorageFolder) + .Select(fp => fp.Split(Path.DirectorySeparatorChar).Last()) + .ToList(); + var blobPaths = allFiles + .Where(fp => fp.Length > Blob.LocalHashPrefixLength) // excludes things like .DS_store + .ToList(); + + // Check that there are three downloaded blobs! + Assert.That(blobPaths, Has.Count.EqualTo(3)); + + var blobs = ((IList)receivedObject["blobs"]!).Cast().ToList(); + // Check that we have three blobs + Assert.That(blobs, Has.Count.EqualTo(3)); + // Check that received blobs point to local path (where they were received) + Assert.That(blobs[0].filePath, Contains.Substring(_transport.BlobStorageFolder)); + Assert.That(blobs[1].filePath, Contains.Substring(_transport.BlobStorageFolder)); + Assert.That(blobs[2].filePath, Contains.Substring(_transport.BlobStorageFolder)); + } + + [Test] + public async Task SendReceiveWithCleanedMemoryCache() + { + var myObject = Fixtures.GenerateSimpleObject(); + myObject["blobs"] = Fixtures.GenerateThreeBlobs(); + + var memTransport = new MemoryTransport(); + var sentObjectId = await Operations.Send(myObject, new ITransport[] { _transport, memTransport }); + + memTransport = new MemoryTransport(); + Base receivedObject = await Operations.Receive(sentObjectId, _transport, memTransport); + Assert.That(receivedObject, Is.Not.Null); + + var allFiles = Directory + .GetFiles(_transport.BlobStorageFolder) + .Select(fp => fp.Split(Path.DirectorySeparatorChar).Last()) + .ToList(); + var blobPaths = allFiles + .Where(fp => fp.Length > Blob.LocalHashPrefixLength) // excludes things like .DS_store + .ToList(); + + // Check that there are three downloaded blobs! + Assert.That(blobPaths.Count, Is.EqualTo(3)); + + var blobs = ((IList)receivedObject!["blobs"]!).Cast().ToList(); + // Check that we have three blobs + Assert.That(blobs, Has.Count.EqualTo(3)); + // Check that received blobs point to local path (where they were received) + Assert.That(blobs[0].filePath, Contains.Substring(_transport.BlobStorageFolder)); + Assert.That(blobs[1].filePath, Contains.Substring(_transport.BlobStorageFolder)); + Assert.That(blobs[2].filePath, Contains.Substring(_transport.BlobStorageFolder)); + } + + public void Dispose() + { + _client?.Dispose(); + _transport?.Dispose(); + } +} diff --git a/tests/Speckle.Core.Tests.Integration/Speckle.Core.Tests.Integration.csproj b/tests/Speckle.Core.Tests.Integration/Speckle.Core.Tests.Integration.csproj new file mode 100644 index 00000000..2f33bde0 --- /dev/null +++ b/tests/Speckle.Core.Tests.Integration/Speckle.Core.Tests.Integration.csproj @@ -0,0 +1,43 @@ + + + + net7.0 + enable + disable + false + true + + + + true + + + + + + + + + + + + + + + + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + all + runtime; build; native; contentfiles; analyzers; buildtransitive + + + + + + + + diff --git a/tests/Speckle.Core.Tests.Integration/Subscriptions/Branches.cs b/tests/Speckle.Core.Tests.Integration/Subscriptions/Branches.cs new file mode 100644 index 00000000..dce80673 --- /dev/null +++ b/tests/Speckle.Core.Tests.Integration/Subscriptions/Branches.cs @@ -0,0 +1,122 @@ +using Speckle.Core.Api; +using Speckle.Core.Api.SubscriptionModels; +using Speckle.Core.Credentials; + +namespace Speckle.Core.Tests.Integration.Subscriptions; + +public class Branches : IDisposable +{ + private BranchInfo _branchCreatedInfo; + private BranchInfo _branchDeletedInfo; + private string _branchId; + private BranchInfo _branchUpdatedInfo; + private Client _client; + private string _streamId; + private Account _testUserAccount; + + [OneTimeSetUp] + public async Task Setup() + { + _testUserAccount = await Fixtures.SeedUser(); + _client = new Client(_testUserAccount); + } + + [Test, Order(0)] + public async Task SubscribeBranchCreated() + { + var streamInput = new StreamCreateInput { description = "Hello World", name = "Super Stream 01" }; + + _streamId = await _client.StreamCreate(streamInput); + Assert.That(_streamId, Is.Not.Null); + + _client.SubscribeBranchCreated(_streamId); + _client.OnBranchCreated += Client_OnBranchCreated; + + Thread.Sleep(5000); //let server catch-up + + var branchInput = new BranchCreateInput + { + description = "Just testing branch create...", + name = "awesome-features", + streamId = _streamId + }; + + _branchId = await _client.BranchCreate(branchInput); + Assert.That(_branchId, Is.Not.Null); + + await Task.Run(() => + { + Thread.Sleep(1000); //let client catch-up + Assert.That(_branchCreatedInfo, Is.Not.Null); + Assert.That(_branchCreatedInfo.name, Is.EqualTo(branchInput.name)); + }); + } + + private void Client_OnBranchCreated(object sender, BranchInfo e) + { + _branchCreatedInfo = e; + } + + [Test, Order(1)] + public async Task SubscribeBranchUpdated() + { + _client.SubscribeBranchUpdated(_streamId); + _client.OnBranchUpdated += Client_OnBranchUpdated; + + Thread.Sleep(1000); //let server catch-up + + var branchInput = new BranchUpdateInput + { + description = "Just testing branch bpdate...", + name = "cool-features", + streamId = _streamId, + id = _branchId + }; + + var res = await _client.BranchUpdate(branchInput); + Assert.That(res, Is.True); + + await Task.Run(() => + { + Thread.Sleep(1000); //let client catch-up + Assert.That(_branchUpdatedInfo, Is.Not.Null); + Assert.That(_branchUpdatedInfo.name, Is.EqualTo(branchInput.name)); + }); + } + + private void Client_OnBranchUpdated(object sender, BranchInfo e) + { + _branchUpdatedInfo = e; + } + + [Test, Order(3)] + public async Task SubscribeBranchDeleted() + { + _client.SubscribeBranchDeleted(_streamId); + _client.OnBranchDeleted += Client_OnBranchDeleted; + + Thread.Sleep(1000); //let server catch-up + + var branchInput = new BranchDeleteInput { streamId = _streamId, id = _branchId }; + + var res = await _client.BranchDelete(branchInput); + Assert.That(res, Is.True); + + await Task.Run(() => + { + Thread.Sleep(1000); //let client catch-up + Assert.That(_branchDeletedInfo, Is.Not.Null); + Assert.That(_branchDeletedInfo.id, Is.EqualTo(_branchId)); + }); + } + + private void Client_OnBranchDeleted(object sender, BranchInfo e) + { + _branchDeletedInfo = e; + } + + public void Dispose() + { + _client?.Dispose(); + } +} diff --git a/tests/Speckle.Core.Tests.Integration/Subscriptions/Commits.cs b/tests/Speckle.Core.Tests.Integration/Subscriptions/Commits.cs new file mode 100644 index 00000000..ed10de7b --- /dev/null +++ b/tests/Speckle.Core.Tests.Integration/Subscriptions/Commits.cs @@ -0,0 +1,161 @@ +using Speckle.Core.Api; +using Speckle.Core.Api.SubscriptionModels; +using Speckle.Core.Credentials; +using Speckle.Core.Models; +using Speckle.Core.Tests.Unit.Kits; +using Speckle.Core.Transports; + +namespace Speckle.Core.Tests.Integration.Subscriptions; + +public class Commits : IDisposable +{ + private Client _client; + private CommitInfo _commitCreatedInfo; + private CommitInfo _commitDeletedInfo; + private string _commitId; + private CommitInfo _commitUpdatedInfo; + private ServerTransport _myServerTransport; + private string _streamId; + private Account _testUserAccount; + + [OneTimeSetUp] + public async Task Setup() + { + _testUserAccount = await Fixtures.SeedUser(); + _client = new Client(_testUserAccount); + } + + private void InitServerTransport() + { + _myServerTransport = new ServerTransport(_testUserAccount, _streamId); + _myServerTransport.Api.CompressPayloads = false; + } + + [Test, Order(0)] + //[Ignore("Ironically, it fails.")] + public async Task SubscribeCommitCreated() + { + var streamInput = new StreamCreateInput { description = "Hello World", name = "Super Stream 01" }; + + _streamId = await _client.StreamCreate(streamInput); + Assert.That(_streamId, Is.Not.Null); + + InitServerTransport(); + + var branchInput = new BranchCreateInput + { + description = "Just testing branch create...", + name = "awesome-features", + streamId = _streamId + }; + + var branchId = await _client.BranchCreate(branchInput); + Assert.That(branchId, Is.Not.Null); + + _client.SubscribeCommitCreated(_streamId); + _client.OnCommitCreated += Client_OnCommitCreated; + + Thread.Sleep(1000); //let server catch-up + + var myObject = new Base(); + var ptsList = new List(); + for (int i = 0; i < 100; i++) + { + ptsList.Add(new Point(i, i, i)); + } + + myObject["Points"] = ptsList; + + var objectId = await Operations.Send(myObject, _myServerTransport, false); + + var commitInput = new CommitCreateInput + { + streamId = _streamId, + branchName = "awesome-features", + objectId = objectId, + message = "sending some test points", + sourceApplication = "Tests", + totalChildrenCount = 20 + }; + + _commitId = await _client.CommitCreate(commitInput); + Assert.That(_commitId, Is.Not.Null); + + await Task.Run(() => + { + Thread.Sleep(2000); //let client catch-up + Assert.That(_commitCreatedInfo, Is.Not.Null); + Assert.That(_commitCreatedInfo.message, Is.EqualTo(commitInput.message)); + }); + } + + private void Client_OnCommitCreated(object sender, CommitInfo e) + { + _commitCreatedInfo = e; + } + + [Test, Order(1)] + //[Ignore("Ironically, it fails.")] + public async Task SubscribeCommitUpdated() + { + _client.SubscribeCommitUpdated(_streamId); + _client.OnCommitUpdated += Client_OnCommitUpdated; + + Thread.Sleep(1000); //let server catch-up + + var commitInput = new CommitUpdateInput + { + message = "Just testing commit update...", + streamId = _streamId, + id = _commitId + }; + + var res = await _client.CommitUpdate(commitInput); + Assert.That(res, Is.True); + + await Task.Run(() => + { + Thread.Sleep(2000); //let client catch-up + Assert.That(_commitUpdatedInfo, Is.Not.Null); + Assert.That(_commitUpdatedInfo.message, Is.EqualTo(commitInput.message)); + }); + } + + private void Client_OnCommitUpdated(object sender, CommitInfo e) + { + _commitUpdatedInfo = e; + } + + [Test, Order(3)] + //[Ignore("Ironically, it fails.")] + public async Task SubscribeCommitDeleted() + { + _client.SubscribeCommitDeleted(_streamId); + _client.OnCommitDeleted += Client_OnCommitDeleted; + + Thread.Sleep(1000); //let server catch-up + + var commitInput = new CommitDeleteInput { streamId = _streamId, id = _commitId }; + + var res = await _client.CommitDelete(commitInput); + Assert.That(res, Is.True); + + await Task.Run(() => + { + Thread.Sleep(2000); //let client catch-up + Assert.That(_commitDeletedInfo, Is.Not.Null); + Assert.That(_commitDeletedInfo.id, Is.EqualTo(_commitId)); + }); + } + + private void Client_OnCommitDeleted(object sender, CommitInfo e) + { + _commitDeletedInfo = e; + } + + public void Dispose() + { + _client?.Dispose(); + _myServerTransport?.Dispose(); + } +} diff --git a/tests/Speckle.Core.Tests.Integration/Subscriptions/Streams.cs b/tests/Speckle.Core.Tests.Integration/Subscriptions/Streams.cs new file mode 100644 index 00000000..5acd4d71 --- /dev/null +++ b/tests/Speckle.Core.Tests.Integration/Subscriptions/Streams.cs @@ -0,0 +1,113 @@ +using Speckle.Core.Api; +using Speckle.Core.Api.SubscriptionModels; +using Speckle.Core.Credentials; + +namespace Speckle.Core.Tests.Integration.Subscriptions; + +public class Streams : IDisposable +{ + private Client _client; + + private StreamInfo _streamAddedInfo; + private string _streamId; + private StreamInfo _streamRemovedInfo; + private StreamInfo _streamUpdatedInfo; + private Account _testUserAccount; + + [OneTimeSetUp] + public async Task Setup() + { + _testUserAccount = await Fixtures.SeedUser(); + _client = new Client(_testUserAccount); + } + + [Test, Order(0)] + public async Task SubscribeStreamAdded() + { + _client.SubscribeUserStreamAdded(); + _client.OnUserStreamAdded += Client_OnUserStreamAdded; + + Thread.Sleep(1000); //let server catch-up + + var streamInput = new StreamCreateInput { description = "Hello World", name = "Super Stream 01" }; + + var res = await _client.StreamCreate(streamInput); + _streamId = res; + Assert.That(res, Is.Not.Null); + + await Task.Run(() => + { + Thread.Sleep(1000); //let client catch-up + Assert.That(_streamAddedInfo, Is.Not.Null); + Assert.That(_streamAddedInfo.name, Is.EqualTo(streamInput.name)); + }); + } + + private void Client_OnUserStreamAdded(object sender, StreamInfo e) + { + _streamAddedInfo = e; + } + + [Test, Order(1)] + public async Task SubscribeStreamUpdated() + { + _client.SubscribeStreamUpdated(_streamId); + _client.OnStreamUpdated += Client_OnStreamUpdated; + + Thread.Sleep(100); //let server catch-up + + var streamInput = new StreamUpdateInput + { + id = _streamId, + description = "Hello World", + name = "Super Stream 01 EDITED" + }; + + var res = await _client.StreamUpdate(streamInput); + + Assert.That(res, Is.True); + + await Task.Run(() => + { + Thread.Sleep(100); //let client catch-up + Assert.That(_streamUpdatedInfo, Is.Not.Null); + Assert.That(_streamUpdatedInfo.name, Is.EqualTo(streamInput.name)); + }); + } + + private void Client_OnStreamUpdated(object sender, StreamInfo e) + { + _streamUpdatedInfo = e; + } + + [Test, Order(2)] + public async Task SubscribeUserStreamRemoved() + { + _client.SubscribeUserStreamRemoved(); + _client.OnUserStreamRemoved += Client_OnStreamRemoved; + ; + + Thread.Sleep(100); //let server catch-up + + var res = await _client.StreamDelete(_streamId); + + Assert.That(res, Is.True); + + await Task.Run(() => + { + Thread.Sleep(100); //let client catch-up + Assert.That(_streamRemovedInfo, Is.Not.Null); + Assert.That(_streamRemovedInfo.id, Is.EqualTo(_streamId)); + }); + } + + private void Client_OnStreamRemoved(object sender, StreamInfo e) + { + _streamRemovedInfo = e; + } + + public void Dispose() + { + _client?.Dispose(); + } +} diff --git a/tests/Speckle.Core.Tests.Integration/Usings.cs b/tests/Speckle.Core.Tests.Integration/Usings.cs new file mode 100644 index 00000000..32445676 --- /dev/null +++ b/tests/Speckle.Core.Tests.Integration/Usings.cs @@ -0,0 +1 @@ +global using NUnit.Framework; diff --git a/tests/Speckle.Core.Tests.Performance/Api/Operations/ReceiveFromSQLite.cs b/tests/Speckle.Core.Tests.Performance/Api/Operations/ReceiveFromSQLite.cs new file mode 100644 index 00000000..311bb71e --- /dev/null +++ b/tests/Speckle.Core.Tests.Performance/Api/Operations/ReceiveFromSQLite.cs @@ -0,0 +1,44 @@ +using System.Diagnostics; +using BenchmarkDotNet.Attributes; +using Speckle.Core.Models; + +namespace Speckle.Core.Tests.Performance.Api.Operations; + +[MemoryDiagnoser] +[RegressionTestConfig(1, 1, 8, nugetVersions: "2.15.2")] +public class ReceiveFromSQLite : IDisposable +{ + [Params(0, 4, 9, 19)] + public int DataComplexity { get; set; } + + private TestDataHelper _dataSource; + + [GlobalSetup] + public async Task Setup() + { + _dataSource = new TestDataHelper(); + await _dataSource.SeedTransport(DataComplexity).ConfigureAwait(false); + } + + [Benchmark] + public async Task Receive_FromSQLite() + { + Base b = await Speckle.Core.Api.Operations + .Receive(_dataSource.ObjectId, null, _dataSource.Transport) + .ConfigureAwait(false); + + Trace.Assert(b is not null); + return b; + } + + [GlobalCleanup] + public virtual void Cleanup() + { + Dispose(); + } + + public void Dispose() + { + _dataSource.Dispose(); + } +} diff --git a/tests/Speckle.Core.Tests.Performance/Api/Operations/TraverseCommit.cs b/tests/Speckle.Core.Tests.Performance/Api/Operations/TraverseCommit.cs new file mode 100644 index 00000000..df3a8434 --- /dev/null +++ b/tests/Speckle.Core.Tests.Performance/Api/Operations/TraverseCommit.cs @@ -0,0 +1,38 @@ +using BenchmarkDotNet.Attributes; +using Speckle.Core.Models; +using Speckle.Core.Models.GraphTraversal; + +namespace Speckle.Core.Tests.Performance.Api.Operations; + +[MemoryDiagnoser] +[RegressionTestConfig(1, 1, 20, nugetVersions: "2.15.2")] +public class TraverseCommit +{ + [Params(0, 4, 9, 19)] + public int DataComplexity { get; set; } + + private Base _testData; + private GraphTraversal _sut; + + [GlobalSetup] + public async Task Setup() + { + using var dataSource = new TestDataHelper(); + await dataSource.SeedTransport(DataComplexity).ConfigureAwait(false); + _testData = await dataSource.DeserializeBase().ConfigureAwait(false); + + var convertableRule = TraversalRule + .NewTraversalRule() + .When(b => b.speckle_type.Contains("Geometry")) + .When(DefaultTraversal.HasDisplayValue) + .ContinueTraversing(_ => DefaultTraversal.elementsPropAliases); + + _sut = new GraphTraversal(convertableRule, DefaultTraversal.DefaultRule); + } + + [Benchmark] + public List Traverse() + { + return _sut.Traverse(_testData).ToList(); + } +} diff --git a/tests/Speckle.Core.Tests.Performance/Program.cs b/tests/Speckle.Core.Tests.Performance/Program.cs new file mode 100644 index 00000000..815d61f4 --- /dev/null +++ b/tests/Speckle.Core.Tests.Performance/Program.cs @@ -0,0 +1,11 @@ +using BenchmarkDotNet.Running; + +namespace Speckle.Core.Tests.Performance; + +public static class Program +{ + public static void Main(string[] args) + { + BenchmarkSwitcher.FromAssemblies(new[] { typeof(Program).Assembly }).Run(args); + } +} diff --git a/tests/Speckle.Core.Tests.Performance/RegressionTestConfig.cs b/tests/Speckle.Core.Tests.Performance/RegressionTestConfig.cs new file mode 100644 index 00000000..ac3a03d5 --- /dev/null +++ b/tests/Speckle.Core.Tests.Performance/RegressionTestConfig.cs @@ -0,0 +1,61 @@ +using System.Diagnostics.CodeAnalysis; +using BenchmarkDotNet.Configs; +using BenchmarkDotNet.Engines; +using BenchmarkDotNet.Environments; +using BenchmarkDotNet.Jobs; + +namespace Speckle.Core.Tests.Performance; + +[AttributeUsage(AttributeTargets.Assembly | AttributeTargets.Class, AllowMultiple = true)] +[SuppressMessage( + "Design", + "CA1019:Define accessors for attribute arguments", + Justification = "Suggestion does not fit with IConfigSource pattern" +)] +public sealed class RegressionTestConfigAttribute : Attribute, IConfigSource +{ + public IConfig Config { get; private set; } + + public RegressionTestConfigAttribute( + int launchCount = 1, + int warmupCount = 0, + int iterationCount = 10, + RunStrategy strategy = RunStrategy.Monitoring, + bool includeHead = true, + params string[] nugetVersions + ) + { + List jobs = new(); + + if (includeHead) + { + jobs.Add( + new Job("Head") + .WithRuntime(ClrRuntime.Net481) + .WithStrategy(strategy) + .WithLaunchCount(launchCount) + .WithWarmupCount(warmupCount) + .WithIterationCount(iterationCount) + ); + } + + bool isBaseline = true; + foreach (var version in nugetVersions) + { + jobs.Add( + new Job(version) + .WithRuntime(ClrRuntime.Net481) + .WithStrategy(strategy) + .WithLaunchCount(launchCount) + .WithWarmupCount(warmupCount) + .WithIterationCount(iterationCount) + .WithNuGet("Speckle.Objects", version) + .WithBaseline(isBaseline) + ); + + isBaseline = false; + } + + Config = ManualConfig.CreateEmpty().AddJob(jobs.ToArray()); + } +} diff --git a/tests/Speckle.Core.Tests.Performance/Serialisation/DeserializationWorkerThreads.cs b/tests/Speckle.Core.Tests.Performance/Serialisation/DeserializationWorkerThreads.cs new file mode 100644 index 00000000..de50926d --- /dev/null +++ b/tests/Speckle.Core.Tests.Performance/Serialisation/DeserializationWorkerThreads.cs @@ -0,0 +1,43 @@ +using BenchmarkDotNet.Attributes; +using Speckle.Core.Models; +using Speckle.Core.Serialisation; + +namespace Speckle.Core.Tests.Performance.Serialisation; + +[MemoryDiagnoser] +[RegressionTestConfig(1, 1, 6)] +public class DeserializationWorkerThreads : IDisposable +{ + public static IEnumerable NumThreadsToTest => Enumerable.Range(0, Environment.ProcessorCount + 1); + + [Params(0, 9)] + public int DataComplexity { get; set; } + + private TestDataHelper _dataSource; + + [GlobalSetup] + public async Task Setup() + { + _dataSource = new TestDataHelper(); + await _dataSource.SeedTransport(DataComplexity).ConfigureAwait(false); + } + + [Benchmark] + [ArgumentsSource(nameof(NumThreadsToTest))] + public Base RunTest(int numThreads) + { + BaseObjectDeserializerV2 sut = new() { WorkerThreadCount = numThreads, ReadTransport = _dataSource.Transport }; + return sut.Deserialize(_dataSource.Transport.GetObject(_dataSource.ObjectId)!); + } + + [GlobalCleanup] + public virtual void Cleanup() + { + Dispose(); + } + + public void Dispose() + { + _dataSource.Dispose(); + } +} diff --git a/tests/Speckle.Core.Tests.Performance/Speckle.Core.Tests.Performance.csproj b/tests/Speckle.Core.Tests.Performance/Speckle.Core.Tests.Performance.csproj new file mode 100644 index 00000000..625d087f --- /dev/null +++ b/tests/Speckle.Core.Tests.Performance/Speckle.Core.Tests.Performance.csproj @@ -0,0 +1,23 @@ + + + + net48 + enable + disable + exe + true + + + + true + + + + + + + + + + + diff --git a/tests/Speckle.Core.Tests.Performance/TestDataHelper.cs b/tests/Speckle.Core.Tests.Performance/TestDataHelper.cs new file mode 100644 index 00000000..f95c872b --- /dev/null +++ b/tests/Speckle.Core.Tests.Performance/TestDataHelper.cs @@ -0,0 +1,54 @@ +using Microsoft.Data.Sqlite; +using Speckle.Core.Api; +using Speckle.Core.Credentials; +using Speckle.Core.Models; +using Speckle.Core.Transports; + +namespace Speckle.Core.Tests.Performance; + +public sealed class TestDataHelper : IDisposable +{ + private static readonly string s_basePath = $"./temp {Guid.NewGuid()}"; + private const string APPLICATION_NAME = "Speckle Performance Tests"; + + public SQLiteTransport Transport { get; private set; } + public string ObjectId { get; private set; } + + public async Task SeedTransport(int dataComplexity) + { + Transport = new SQLiteTransport(s_basePath, APPLICATION_NAME); + + //seed SQLite transport with test data + ObjectId = await SeedTransport(dataComplexity, Transport).ConfigureAwait(false); + } + + public static async Task SeedTransport(int dataComplexity, ITransport transport) + { + //seed SQLite transport with test data + StreamWrapper sw = new($"https://latest.speckle.dev/streams/efd2c6a31d/branches/{dataComplexity}"); + var acc = await sw.GetAccount().ConfigureAwait(false); + using var client = new Client(acc); + var branch = await client.BranchGet(sw.StreamId, sw.BranchName!, 1).ConfigureAwait(false); + var objectId = branch.commits.items[0].referencedObject; + + using ServerTransport remoteTransport = new(acc, sw.StreamId); + transport.BeginWrite(); + await remoteTransport.CopyObjectAndChildren(objectId, transport).ConfigureAwait(false); + transport.EndWrite(); + await transport.WriteComplete().ConfigureAwait(false); + + return objectId; + } + + public async Task DeserializeBase() + { + return await Operations.Receive(ObjectId, null, Transport).ConfigureAwait(false); + } + + public void Dispose() + { + Transport.Dispose(); + SqliteConnection.ClearAllPools(); + Directory.Delete(s_basePath, true); + } +} diff --git a/tests/Speckle.Core.Tests.Unit/Api/GraphQLClient.cs b/tests/Speckle.Core.Tests.Unit/Api/GraphQLClient.cs new file mode 100644 index 00000000..0733e39a --- /dev/null +++ b/tests/Speckle.Core.Tests.Unit/Api/GraphQLClient.cs @@ -0,0 +1,124 @@ +using System.Diagnostics; +using GraphQL; +using NUnit.Framework; +using Speckle.Core.Api; +using Speckle.Core.Credentials; + +namespace Speckle.Core.Tests.Unit.Api; + +[TestOf(typeof(Client))] +public sealed class GraphQLClientTests : IDisposable +{ + private Client _client; + + [OneTimeSetUp] + public void Setup() + { + _client = new Client( + new Account + { + token = "this is a scam", + serverInfo = new ServerInfo { url = "http://goto.testing" } + } + ); + } + + public void Dispose() + { + _client?.Dispose(); + } + + private static IEnumerable ErrorCases() + { + yield return new TestCaseData( + typeof(SpeckleGraphQLForbiddenException), + new Map { { "code", "FORBIDDEN" } } + ); + yield return new TestCaseData( + typeof(SpeckleGraphQLForbiddenException), + new Map { { "code", "UNAUTHENTICATED" } } + ); + yield return new TestCaseData( + typeof(SpeckleGraphQLInternalErrorException), + new Map { { "code", "INTERNAL_SERVER_ERROR" } } + ); + yield return new TestCaseData(typeof(SpeckleGraphQLException), new Map { { "foo", "bar" } }); + } + + [Test, TestCaseSource(nameof(ErrorCases))] + public void TestExceptionThrowingFromGraphQLErrors(Type exType, Map extensions) + { + Assert.Throws( + exType, + () => + _client.MaybeThrowFromGraphQLErrors( + new GraphQLRequest(), + new GraphQLResponse + { + Errors = new GraphQLError[] { new() { Extensions = extensions } } + } + ) + ); + } + + [Test] + public void TestMaybeThrowsDoesntThrowForNoErrors() + { + Assert.DoesNotThrow(() => _client.MaybeThrowFromGraphQLErrors(new GraphQLRequest(), new GraphQLResponse())); + } + + [Test] + public void TestExecuteWithResiliencePoliciesDoesntRetryTaskCancellation() + { + var timer = new Stopwatch(); + timer.Start(); + Assert.ThrowsAsync(async () => + { + var tokenSource = new CancellationTokenSource(); + tokenSource.Cancel(); + await _client.ExecuteWithResiliencePolicies( + async () => + await Task.Run( + async () => + { + await Task.Delay(1000); + return "foo"; + }, + tokenSource.Token + ) + ); + }); + timer.Stop(); + var elapsed = timer.ElapsedMilliseconds; + + // the default retry policy would retry 5 times with 1 second jitter backoff each + // if the elapsed is less than a second, this was def not retried + Assert.That(elapsed, Is.LessThan(1000)); + } + + [Test] + public async Task TestExecuteWithResiliencePoliciesRetry() + { + var counter = 0; + var maxRetryCount = 5; + var expectedResult = "finally it finishes"; + var timer = new Stopwatch(); + timer.Start(); + var result = await _client.ExecuteWithResiliencePolicies(() => + { + counter++; + if (counter < maxRetryCount) + { + throw new SpeckleGraphQLInternalErrorException(new GraphQLRequest(), new GraphQLResponse()); + } + + return Task.FromResult(expectedResult); + }); + timer.Stop(); + // The baseline for wait is 1 seconds between the jittered retry + Assert.That(timer.ElapsedMilliseconds, Is.GreaterThanOrEqualTo(5000)); + Assert.That(counter, Is.EqualTo(maxRetryCount)); + } + + public class FakeGqlResponseModel { } +} diff --git a/tests/Speckle.Core.Tests.Unit/Api/HelpersTests.cs b/tests/Speckle.Core.Tests.Unit/Api/HelpersTests.cs new file mode 100644 index 00000000..78c44cda --- /dev/null +++ b/tests/Speckle.Core.Tests.Unit/Api/HelpersTests.cs @@ -0,0 +1,35 @@ +using NUnit.Framework; + +namespace Speckle.Core.Tests.Unit.Api; + +[TestFixture] +[TestOf(typeof(Speckle.Core.Api.Helpers))] +public class HelpersTests +{ + [Test] + [TestCase(30, "just now")] + [TestCase(60, "1 minute ago")] + [TestCase(60 * 2, "2 minutes ago")] + [TestCase(60 * 60 * 1, "1 hour ago")] + [TestCase(60 * 60 * 2, "2 hours ago")] + [TestCase(60 * 60 * 24 * 1, "1 day ago")] + [TestCase(60 * 60 * 24 * 2, "2 days ago")] + [TestCase(60 * 60 * 24 * 7 * 1, "1 week ago")] + [TestCase(60 * 60 * 24 * 7 * 2, "2 weeks ago")] + [TestCase(60 * 60 * 24 * 31 * 1, "1 month ago")] + [TestCase(60 * 60 * 24 * 31 * 2, "2 months ago")] + [TestCase(60 * 60 * 24 * 365 * 1, "1 year ago")] + [TestCase(60 * 60 * 24 * 365 * 2, "2 years ago")] + public void TimeAgo_DisplaysTextCorrectly(int secondsAgo, string expectedText) + { + // Get current time and subtract the input amount + var dateTime = DateTime.UtcNow; + + dateTime = dateTime.Subtract(new TimeSpan(0, 0, secondsAgo)); + + // Get the timeAgo text representation + var actual = Speckle.Core.Api.Helpers.TimeAgo(dateTime); + + Assert.That(actual, Is.EqualTo(expectedText)); + } +} diff --git a/tests/Speckle.Core.Tests.Unit/Api/Operations/ClosureTests.cs b/tests/Speckle.Core.Tests.Unit/Api/Operations/ClosureTests.cs new file mode 100644 index 00000000..6d6f3d57 --- /dev/null +++ b/tests/Speckle.Core.Tests.Unit/Api/Operations/ClosureTests.cs @@ -0,0 +1,120 @@ +using Newtonsoft.Json; +using NUnit.Framework; +using Speckle.Core.Models; +using Speckle.Core.Tests.Unit.Kits; +using Speckle.Core.Transports; + +namespace Speckle.Core.Tests.Unit.Api.Operations; + +[TestFixture] +[TestOf(typeof(Core.Api.Operations))] +public class Closures +{ + [Test(Description = "Checks whether closures are generated correctly by the serialiser.")] + public void CorrectDecompositionTracking() + { + var d5 = new Base(); + ((dynamic)d5).name = "depth five"; // end v + + var d4 = new Base(); + ((dynamic)d4).name = "depth four"; + ((dynamic)d4)["@detach"] = d5; + + var d3 = new Base(); + ((dynamic)d3).name = "depth three"; + ((dynamic)d3)["@detach"] = d4; + + var d2 = new Base(); + ((dynamic)d2).name = "depth two"; + ((dynamic)d2)["@detach"] = d3; + ((dynamic)d2)["@joker"] = new object[] { d5 }; + + var d1 = new Base(); + ((dynamic)d1).name = "depth one"; + ((dynamic)d1)["@detach"] = d2; + ((dynamic)d1)["@joker"] = d5; // consequently, d5 depth in d1 should be 1 + + var transport = new MemoryTransport(); + + var result = Core.Api.Operations.Send(d1, transport, false).Result; + + var test = Core.Api.Operations.Receive(result, localTransport: transport).Result; + + Assert.IsNotNull(test.id); + Assert.That(d1.GetId(true), Is.EqualTo(test.id)); + + var d1_ = JsonConvert.DeserializeObject(transport.Objects[d1.GetId(true)]); + var d2_ = JsonConvert.DeserializeObject(transport.Objects[d2.GetId(true)]); + var d3_ = JsonConvert.DeserializeObject(transport.Objects[d3.GetId(true)]); + var d4_ = JsonConvert.DeserializeObject(transport.Objects[d4.GetId(true)]); + var d5_ = JsonConvert.DeserializeObject(transport.Objects[d5.GetId(true)]); + + var depthOf_d5_in_d1 = int.Parse((string)d1_.__closure[d5.GetId(true)]); + Assert.That(depthOf_d5_in_d1, Is.EqualTo(1)); + + var depthOf_d4_in_d1 = int.Parse((string)d1_.__closure[d4.GetId(true)]); + Assert.That(depthOf_d4_in_d1, Is.EqualTo(3)); + + var depthOf_d5_in_d3 = int.Parse((string)d3_.__closure[d5.GetId(true)]); + Assert.That(depthOf_d5_in_d3, Is.EqualTo(2)); + + var depthOf_d4_in_d3 = int.Parse((string)d3_.__closure[d4.GetId(true)]); + Assert.That(depthOf_d4_in_d3, Is.EqualTo(1)); + + var depthOf_d5_in_d2 = int.Parse((string)d2_.__closure[d5.GetId(true)]); + Assert.That(depthOf_d5_in_d2, Is.EqualTo(1)); + } + + [Test] + public void DescendantsCounting() + { + Base myBase = new(); + + var myList = new List(); + // These should be counted! + for (int i = 0; i < 100; i++) + { + var smolBase = new Base(); + smolBase["test"] = i; + myList.Add(smolBase); + } + + // Primitives should not be counted! + for (int i = 0; i < 10; i++) + { + myList.Add(i); + } + + myList.Add("Hello"); + myList.Add(new { hai = "bai" }); + + myBase["@detachTheList"] = myList; + + var dictionary = new Dictionary(); + for (int i = 0; i < 10; i++) + { + var smolBase = new Base { applicationId = i.ToString() }; + dictionary[$"key {i}"] = smolBase; + } + + dictionary["string value"] = "bol"; + dictionary["int value"] = 42; + dictionary["THIS IS RECURSIVE SURPRISE"] = myBase; + + myBase["@detachTheDictionary"] = dictionary; + + var count = myBase.GetTotalChildrenCount(); + Assert.That(count, Is.EqualTo(112)); + + var tableTest = new DiningTable(); + var tableKidsCount = tableTest.GetTotalChildrenCount(); + Assert.That(tableKidsCount, Is.EqualTo(10)); + + // Explicitely test for recurisve references! + var recursiveRef = new Base { applicationId = "random" }; + recursiveRef["@recursive"] = recursiveRef; + + var supriseCount = recursiveRef.GetTotalChildrenCount(); + Assert.That(supriseCount, Is.EqualTo(2)); + } +} diff --git a/tests/Speckle.Core.Tests.Unit/Api/Operations/OperationsReceiveTests.Exceptional.cs b/tests/Speckle.Core.Tests.Unit/Api/Operations/OperationsReceiveTests.Exceptional.cs new file mode 100644 index 00000000..844d20c2 --- /dev/null +++ b/tests/Speckle.Core.Tests.Unit/Api/Operations/OperationsReceiveTests.Exceptional.cs @@ -0,0 +1,41 @@ +using NUnit.Framework; +using Speckle.Core.Transports; + +namespace Speckle.Core.Tests.Unit.Api.Operations; + +public partial class OperationsReceiveTests +{ + [Test, TestCaseSource(nameof(TestCases))] + public void Receive_ObjectsDontExist_ExceptionThrown(string id) + { + MemoryTransport emptyTransport1 = new(); + MemoryTransport emptyTransport2 = new(); + Assert.ThrowsAsync(async () => + { + await Core.Api.Operations.Receive(id, emptyTransport1, emptyTransport2); + }); + } + + [Test, TestCaseSource(nameof(TestCases))] + public void Receive_ObjectsDontExistNullRemote_ExceptionThrown(string id) + { + MemoryTransport emptyTransport = new(); + Assert.ThrowsAsync(async () => + { + await Core.Api.Operations.Receive(id, null, emptyTransport); + }); + } + + [Test, TestCaseSource(nameof(TestCases))] + public void Receive_OperationCanceled_ExceptionThrown(string id) + { + using CancellationTokenSource ctc = new(); + ctc.Cancel(); + + MemoryTransport emptyTransport2 = new(); + Assert.CatchAsync(async () => + { + await Core.Api.Operations.Receive(id, _testCaseTransport, emptyTransport2, cancellationToken: ctc.Token); + }); + } +} diff --git a/tests/Speckle.Core.Tests.Unit/Api/Operations/OperationsReceiveTests.cs b/tests/Speckle.Core.Tests.Unit/Api/Operations/OperationsReceiveTests.cs new file mode 100644 index 00000000..858a8cca --- /dev/null +++ b/tests/Speckle.Core.Tests.Unit/Api/Operations/OperationsReceiveTests.cs @@ -0,0 +1,84 @@ +using NUnit.Framework; +using Speckle.Core.Models; +using Speckle.Core.Transports; + +namespace Speckle.Core.Tests.Unit.Api.Operations; + +[TestFixture, TestOf(nameof(Core.Api.Operations.Receive))] +public sealed partial class OperationsReceiveTests +{ + public static IEnumerable TestCases => s_testObjects.Select(x => x.GetId(true)); + + private static readonly Base[] s_testObjects = + { + new Base { ["string prop"] = "simple test case", ["numerical prop"] = 123, }, + new Base { ["@detachedProp"] = new Base() { ["the best prop"] = "1234!" } }, + new Base + { + ["@detachedList"] = new List { new Base { ["the worst prop"] = null } }, + ["dictionaryProp"] = new Dictionary { ["dict"] = new Base { ["the best prop"] = "" } }, + }, + }; + + private MemoryTransport _testCaseTransport; + + [OneTimeSetUp] + public async Task GlobalSetup() + { + _testCaseTransport = new MemoryTransport(); + foreach (var b in s_testObjects) + { + await Core.Api.Operations.Send(b, _testCaseTransport, false); + } + } + + [Test, TestCaseSource(nameof(TestCases))] + public async Task Receive_FromLocal_ExistingObjects(string id) + { + Base result = await Core.Api.Operations.Receive(id, null, _testCaseTransport); + + Assert.That(result.id, Is.EqualTo(id)); + } + + [Test, TestCaseSource(nameof(TestCases))] + public async Task Receive_FromRemote_ExistingObjects(string id) + { + MemoryTransport localTransport = new(); + Base result = await Core.Api.Operations.Receive(id, _testCaseTransport, localTransport); + + Assert.That(result.id, Is.EqualTo(id)); + } + + [Test, TestCaseSource(nameof(TestCases))] + public async Task Receive_FromLocal_OnProgressActionCalled(string id) + { + bool wasCalled = false; + _ = await Core.Api.Operations.Receive(id, null, _testCaseTransport, onProgressAction: _ => wasCalled = true); + + Assert.That(wasCalled, Is.True); + } + + [Test, TestCaseSource(nameof(TestCases))] + public async Task Receive_FromLocal_OnTotalChildrenCountKnownCalled(string id) + { + bool wasCalled = false; + int children = 0; + var result = await Core.Api.Operations.Receive( + id, + null, + _testCaseTransport, + onTotalChildrenCountKnown: c => + { + wasCalled = true; + children = c; + } + ); + + Assert.That(result.id, Is.EqualTo(id)); + + var expectedChildren = result.GetTotalChildrenCount() - 1; + + Assert.That(wasCalled, Is.True); + Assert.That(children, Is.EqualTo(expectedChildren)); + } +} diff --git a/tests/Speckle.Core.Tests.Unit/Api/Operations/SendReceiveLocal.cs b/tests/Speckle.Core.Tests.Unit/Api/Operations/SendReceiveLocal.cs new file mode 100644 index 00000000..1800fbc1 --- /dev/null +++ b/tests/Speckle.Core.Tests.Unit/Api/Operations/SendReceiveLocal.cs @@ -0,0 +1,278 @@ +using System.Collections.Concurrent; +using NUnit.Framework; +using Speckle.Core.Models; +using Speckle.Core.Tests.Unit.Kits; +using Speckle.Core.Transports; + +namespace Speckle.Core.Tests.Unit.Api.Operations; + +[TestFixture] +public sealed class SendReceiveLocal : IDisposable +{ + private string _objId01, + _commitId02; + + private const int NUM_OBJECTS = 3001; + + private readonly SQLiteTransport _sut = new(); + + [Test(Description = "Pushing a commit locally"), Order(1)] + public void LocalUpload() + { + var myObject = new Base(); + var rand = new Random(); + + myObject["@items"] = new List(); + + for (int i = 0; i < NUM_OBJECTS; i++) + { + ((List)myObject["@items"]).Add(new Point(i, i, i + rand.NextDouble()) { applicationId = i + "-___/---" }); + } + + using SQLiteTransport localTransport = new(); + _objId01 = Core.Api.Operations.Send(myObject, localTransport, false).Result; + + Assert.That(_objId01, Is.Not.Null); + TestContext.Out.WriteLine($"Written {NUM_OBJECTS + 1} objects. Commit id is {_objId01}"); + } + + [Test(Description = "Pulling a commit locally"), Order(2)] + public void LocalDownload() + { + var commitPulled = Core.Api.Operations.Receive(_objId01).Result; + + Assert.That(((List)commitPulled["@items"])[0], Is.TypeOf()); + Assert.That(((List)commitPulled["@items"]), Has.Count.EqualTo(NUM_OBJECTS)); + } + + [Test(Description = "Pushing and Pulling a commit locally")] + public void LocalUploadDownload() + { + var myObject = new Base(); + myObject["@items"] = new List(); + + var rand = new Random(); + + for (int i = 0; i < NUM_OBJECTS; i++) + { + ((List)myObject["@items"]).Add(new Point(i, i, i + rand.NextDouble()) { applicationId = i + "-___/---" }); + } + + _objId01 = Core.Api.Operations.Send(myObject, _sut, false).Result; + + var commitPulled = Core.Api.Operations.Receive(_objId01).Result; + List items = (List)commitPulled["@items"]; + + Assert.That(items, Has.All.TypeOf()); + Assert.That(items, Has.Count.EqualTo(NUM_OBJECTS)); + } + + [Test(Description = "Pushing and pulling a commit locally"), Order(3)] + public async Task LocalUploadDownloadSmall() + { + var myObject = new Base(); + myObject["@items"] = new List(); + + var rand = new Random(); + + for (int i = 0; i < 30; i++) + { + ((List)myObject["@items"]).Add(new Point(i, i, i + rand.NextDouble()) { applicationId = i + "-ugh/---" }); + } + + _objId01 = await Core.Api.Operations.Send(myObject, _sut, false); + + Assert.That(_objId01, Is.Not.Null); + TestContext.Out.WriteLine($"Written {NUM_OBJECTS + 1} objects. Commit id is {_objId01}"); + + var objsPulled = await Core.Api.Operations.Receive(_objId01); + Assert.That(((List)objsPulled["@items"]), Has.Count.EqualTo(30)); + } + + [Test(Description = "Pushing and pulling a commit locally"), Order(3)] + public async Task LocalUploadDownloadListDic() + { + var myList = new List { 1, 2, 3, "ciao" }; + var myDic = new Dictionary + { + { "a", myList }, + { "b", 2 }, + { "c", "ciao" } + }; + + var myObject = new Base(); + myObject["@dictionary"] = myDic; + myObject["@list"] = myList; + + _objId01 = await Core.Api.Operations.Send(myObject, _sut, false); + + Assert.That(_objId01, Is.Not.Null); + + var objsPulled = await Core.Api.Operations.Receive(_objId01); + Assert.That(((List)((Dictionary)objsPulled["@dictionary"])["a"]).First(), Is.EqualTo(1)); + Assert.That(((List)objsPulled["@list"]).Last(), Is.EqualTo("ciao")); + } + + [Test(Description = "Pushing and pulling a random object, with our without detachment"), Order(3)] + public async Task UploadDownloadNonCommitObject() + { + var obj = new Base(); + // Here we are creating a "non-standard" object to act as a base for our multiple objects. + ((dynamic)obj).LayerA = new List(); // Layer a and b will be stored "in" the parent object, + ((dynamic)obj).LayerB = new List(); + ((dynamic)obj)["@LayerC"] = new List(); // whereas this "layer" will be stored as references only. + ((dynamic)obj)["@LayerD"] = new Point[] { new(), new(12, 3, 4) }; + var rand = new Random(); + + for (int i = 0; i < 30; i++) + { + ((List)((dynamic)obj).LayerA).Add(new Point(i, i, i + rand.NextDouble()) { applicationId = i + "foo" }); + } + + for (int i = 0; i < 30; i++) + { + ((List)((dynamic)obj).LayerB).Add(new Point(i, i, i + rand.NextDouble()) { applicationId = i + "bar" }); + } + + for (int i = 0; i < 30; i++) + { + ((List)((dynamic)obj)["@LayerC"]).Add(new Point(i, i, i + rand.NextDouble()) { applicationId = i + "baz" }); + } + + _objId01 = await Core.Api.Operations.Send(obj, _sut, false); + + Assert.That(_objId01, Is.Not.Null); + TestContext.Out.WriteLine($"Written {NUM_OBJECTS + 1} objects. Commit id is {_objId01}"); + + var objPulled = await Core.Api.Operations.Receive(_objId01); + + Assert.That(objPulled, Is.TypeOf()); + + // Note: even if the layers were originally declared as lists of "Base" objects, on deserialisation we cannot know that, + // as it's a dynamic property. Dynamic properties, if their content value is ambigous, will default to a common-sense standard. + // This specifically manifests in the case of lists and dictionaries: List will become List, and + // Dictionary will deserialize to Dictionary. + var layerA = ((dynamic)objPulled)["LayerA"] as List; + Assert.That(layerA, Has.Count.EqualTo(30)); + + var layerC = ((dynamic)objPulled)["@LayerC"] as List; + Assert.That(layerC, Has.Count.EqualTo(30)); + Assert.That(layerC[0], Is.TypeOf()); + + var layerD = ((dynamic)objPulled)["@LayerD"] as List; + Assert.That(layerD, Has.Count.EqualTo(2)); + } + + [Test(Description = "Should show progress!"), Order(4)] + public async Task UploadProgressReports() + { + Base myObject = new() { ["items"] = new List() }; + var rand = new Random(); + + for (int i = 0; i < 30; i++) + { + ((List)myObject["items"]).Add(new Point(i, i, i + rand.NextDouble()) { applicationId = i + "-fab/---" }); + } + + ConcurrentDictionary progress = null; + _commitId02 = await Core.Api.Operations.Send( + myObject, + _sut, + false, + onProgressAction: dict => + { + progress = dict; + } + ); + + Assert.That(progress, Is.Not.Null); + Assert.That(progress!.Keys, Has.Count.GreaterThanOrEqualTo(1)); + } + + [Test(Description = "Should show progress!"), Order(5)] + public async Task DownloadProgressReports() + { + ConcurrentDictionary progress = null; + var pulledCommit = await Core.Api.Operations.Receive( + _commitId02, + onProgressAction: dict => + { + progress = dict; + } + ); + Assert.That(progress, Is.Not.Null); + Assert.That(progress.Keys, Has.Count.GreaterThanOrEqualTo(1)); + } + + [Test(Description = "Should dispose of transports after a send or receive operation if so specified.")] + [Obsolete("Send overloads that perform disposal are deprecated")] + public async Task ShouldDisposeTransports() + { + var @base = new Base(); + @base["test"] = "the best"; + + var myLocalTransport = new SQLiteTransport(); + var id = await Core.Api.Operations.Send( + @base, + new List { myLocalTransport }, + false, + disposeTransports: true + ); + + // Send + Assert.ThrowsAsync( + async () => + await Core.Api.Operations.Send(@base, new List { myLocalTransport }, false, disposeTransports: true) + ); + + myLocalTransport = myLocalTransport.Clone() as SQLiteTransport; + _ = await Core.Api.Operations.Receive(id, null, myLocalTransport, disposeTransports: true); + + Assert.ThrowsAsync( + async () => await Core.Api.Operations.Receive(id, null, myLocalTransport) + ); + } + + [Test(Description = "Should not dispose of transports if so specified.")] + public async Task ShouldNotDisposeTransports() + { + var @base = new Base(); + @base["test"] = "the best"; + + SQLiteTransport myLocalTransport = new(); + var id = await Core.Api.Operations.Send(@base, myLocalTransport, false); + await Core.Api.Operations.Send(@base, myLocalTransport, false); + + _ = await Core.Api.Operations.Receive(id, null, myLocalTransport); + await Core.Api.Operations.Receive(id, null, myLocalTransport); + } + + //[Test] + //public async Task DiskTransportTest() + //{ + // var myObject = new Base(); + // myObject["@items"] = new List(); + // myObject["test"] = "random"; + + // var rand = new Random(); + + // for (int i = 0; i < 100; i++) + // { + // ((List)myObject["@items"]).Add(new Point(i, i, i) { applicationId = i + "-___/---" }); + // } + + // var dt = new Speckle.Speckle.Core.Transports.Speckle.Speckle.Core.Transports(); + // var id = await Operations.Send(myObject, new List() { dt }, false); + + // Assert.IsNotNull(id); + + // var rebase = await Operations.Receive(id, dt); + + // Assert.AreEqual(rebase.GetId(true), id); + //} + + public void Dispose() + { + _sut.Dispose(); + } +} diff --git a/tests/Speckle.Core.Tests.Unit/Api/Operations/SerializationTests.cs b/tests/Speckle.Core.Tests.Unit/Api/Operations/SerializationTests.cs new file mode 100644 index 00000000..5c29d76e --- /dev/null +++ b/tests/Speckle.Core.Tests.Unit/Api/Operations/SerializationTests.cs @@ -0,0 +1,279 @@ +using System.Drawing; +using NUnit.Framework; +using Speckle.Core.Models; +using Speckle.Core.Tests.Unit.Kits; +using Point = Speckle.Core.Tests.Unit.Kits.Point; + +namespace Speckle.Core.Tests.Unit.Api.Operations; + +[TestFixture] +[TestOf(typeof(Core.Api.Operations))] +public class ObjectSerialization +{ + [Test] + public void SimpleSerialization() + { + var table = new DiningTable(); + ((dynamic)table)["@strangeVariable_NAme3"] = new TableLegFixture(); + + var result = Core.Api.Operations.Serialize(table); + var test = Core.Api.Operations.Deserialize(result); + + Assert.That(table.GetId(), Is.EqualTo(test.GetId())); + + var polyline = new Polyline(); + for (int i = 0; i < 100; i++) + { + polyline.Points.Add(new Point { X = i * 2, Y = i % 2 }); + } + + var strPoly = Core.Api.Operations.Serialize(polyline); + var dePoly = Core.Api.Operations.Deserialize(strPoly); + + Assert.That(dePoly.GetId(), Is.EqualTo(polyline.GetId())); + } + + [Test] + public void IgnoreCircularReferences() + { + var pt = new Point(1, 2, 3); + pt["circle"] = pt; + + var test = Core.Api.Operations.Serialize(pt); + + var result = Core.Api.Operations.Deserialize(test); + var circle = result["circle"]; + Assert.That(circle, Is.Null); + } + + [Test] + public void InterfacePropHandling() + { + Line tail = new() { Start = new Point(0, 0, 0), End = new Point(42, 42, 42) }; + PolygonalFeline cat = new() { Tail = tail }; + + for (int i = 0; i < 10; i++) + { + cat.Claws[$"Claw number {i}"] = new Line + { + Start = new Point(i, i, i), + End = new Point(i + 3.14, i + 3.14, i + 3.14) + }; + + if (i % 2 == 0) + { + cat.Whiskers.Add( + new Line { Start = new Point(i / 2, i / 2, i / 2), End = new Point(i + 3.14, i + 3.14, i + 3.14) } + ); + } + else + { + var brokenWhisker = new Polyline(); + brokenWhisker.Points.Add(new Point(-i, 0, 0)); + brokenWhisker.Points.Add(new Point(0, 0, 0)); + brokenWhisker.Points.Add(new Point(i, 0, 0)); + cat.Whiskers.Add(brokenWhisker); + } + + cat.Fur[i] = new Line { Start = new Point(i, i, i), End = new Point(i + 3.14, i + 3.14, i + 3.14) }; + } + + var result = Core.Api.Operations.Serialize(cat); + + var deserialisedFeline = Core.Api.Operations.Deserialize(result); + + Assert.That(deserialisedFeline.GetId(), Is.EqualTo(cat.GetId())); // If we're getting the same hash... we're probably fine! + } + + [Test] + public void InheritanceTests() + { + var superPoint = new SuperPoint + { + X = 10, + Y = 10, + Z = 10, + W = 42 + }; + + var str = Core.Api.Operations.Serialize(superPoint); + var sstr = Core.Api.Operations.Deserialize(str); + + Assert.That(sstr.speckle_type, Is.EqualTo(superPoint.speckle_type)); + } + + [Test] + public void ListDynamicProp() + { + var point = new Point(); + var test = new List(); + + for (var i = 0; i < 100; i++) + { + test.Add(new SuperPoint { W = i }); + } + + point["test"] = test; + + var str = Core.Api.Operations.Serialize(point); + var dsrls = Core.Api.Operations.Deserialize(str); + + var list = dsrls["test"] as List; // NOTE: on dynamically added lists, we cannot infer the inner type and we always fall back to a generic list. + Assert.That(list, Has.Count.EqualTo(100)); + } + + [Test] + public void ChunkSerialisation() + { + var baseBasedChunk = new DataChunk(); + for (var i = 0; i < 200; i++) + { + baseBasedChunk.data.Add(new SuperPoint { W = i }); + } + + var stringBasedChunk = new DataChunk(); + for (var i = 0; i < 200; i++) + { + stringBasedChunk.data.Add(i + "_hai"); + } + + var doubleBasedChunk = new DataChunk(); + for (var i = 0; i < 200; i++) + { + doubleBasedChunk.data.Add(i + 0.33); + } + + var baseChunkString = Core.Api.Operations.Serialize(baseBasedChunk); + var stringChunkString = Core.Api.Operations.Serialize(stringBasedChunk); + var doubleChunkString = Core.Api.Operations.Serialize(doubleBasedChunk); + + var baseChunkDeserialised = (DataChunk)Core.Api.Operations.Deserialize(baseChunkString); + var stringChunkDeserialised = (DataChunk)Core.Api.Operations.Deserialize(stringChunkString); + var doubleChunkDeserialised = (DataChunk)Core.Api.Operations.Deserialize(doubleChunkString); + + Assert.That(baseChunkDeserialised.data, Has.Count.EqualTo(baseBasedChunk.data.Count)); + Assert.That(stringChunkDeserialised.data, Has.Count.EqualTo(stringBasedChunk.data.Count)); + Assert.That(doubleChunkDeserialised.data, Has.Count.EqualTo(doubleBasedChunk.data.Count)); + } + + [Test] + public void ObjectWithChunksSerialisation() + { + const int MAX_NUM = 2020; + var mesh = new FakeMesh { ArrayOfDoubles = new double[MAX_NUM], ArrayOfLegs = new TableLeg[MAX_NUM] }; + + var customChunk = new List(); + var defaultChunk = new List(); + + for (int i = 0; i < MAX_NUM; i++) + { + mesh.Vertices.Add(i / 2); + customChunk.Add(i / 2); + defaultChunk.Add(i / 2); + mesh.Tables.Add(new Tabletop { length = 2000 }); + mesh.ArrayOfDoubles[i] = i * 3.3; + mesh.ArrayOfLegs[i] = new TableLeg { height = 2 + i }; + } + + mesh["@(800)CustomChunk"] = customChunk; + mesh["@()DefaultChunk"] = defaultChunk; + + var serialised = Core.Api.Operations.Serialize(mesh); + var deserialised = Core.Api.Operations.Deserialize(serialised); + + Assert.That(mesh.GetId(), Is.EqualTo(deserialised.GetId())); + } + + [Test] + public void EmptyListSerialisationTests() + { + // NOTE: expected behaviour is that empty lists should serialize as empty lists. Don't ask why, it's complicated. + // Regarding chunkable empty lists, to prevent empty chunks, the expected behaviour is to have an empty lists, with no chunks inside. + var test = new Base(); + + test["@(5)emptyChunks"] = new List(); + test["emptyList"] = new List(); + test["@emptyDetachableList"] = new List(); + + // Note: nested empty lists should be preserved. + test["nestedList"] = new List { new List { new List() } }; + test["@nestedDetachableList"] = new List { new List { new List() } }; + + var serialised = Core.Api.Operations.Serialize(test); + var isCorrect = + serialised.Contains("\"@(5)emptyChunks\":[]") + && serialised.Contains("\"emptyList\":[]") + && serialised.Contains("\"@emptyDetachableList\":[]") + && serialised.Contains("\"nestedList\":[[[]]]") + && serialised.Contains("\"@nestedDetachableList\":[[[]]]"); + + Assert.That(isCorrect, Is.EqualTo(true)); + } + + private class DateMock : Base + { + public DateTime TestField { get; set; } + } + + [Test] + public void DateSerialisation() + { + var date = new DateTime(2020, 1, 14); + var mockBase = new DateMock { TestField = date }; + + var result = Core.Api.Operations.Serialize(mockBase); + var test = (DateMock)Core.Api.Operations.Deserialize(result); + + Assert.That(test.TestField, Is.EqualTo(date)); + } + + private class GUIDMock : Base + { + public Guid TestField { get; set; } + } + + [Test] + public void GuidSerialisation() + { + var guid = Guid.NewGuid(); + var mockBase = new GUIDMock { TestField = guid }; + + var result = Core.Api.Operations.Serialize(mockBase); + var test = (GUIDMock)Core.Api.Operations.Deserialize(result); + + Assert.That(test.TestField, Is.EqualTo(guid)); + } + + private class ColorMock : Base + { + public Color TestField { get; set; } + } + + [Test] + public void ColorSerialisation() + { + var color = Color.FromArgb(255, 4, 126, 251); + var mockBase = new ColorMock { TestField = color }; + + var result = Core.Api.Operations.Serialize(mockBase); + var test = (ColorMock)Core.Api.Operations.Deserialize(result); + + Assert.That(test.TestField, Is.EqualTo(color)); + } + + private class StringDateTimeRegressionMock : Base + { + public string TestField { get; set; } + } + + [Test] + public void StringDateTimeRegression() + { + var mockBase = new StringDateTimeRegressionMock { TestField = "2021-11-12T11:32:01" }; + + var result = Core.Api.Operations.Serialize(mockBase); + var test = (StringDateTimeRegressionMock)Core.Api.Operations.Deserialize(result); + + Assert.That(test.TestField, Is.EqualTo(mockBase.TestField)); + } +} diff --git a/tests/Speckle.Core.Tests.Unit/Credentials/AccountServerMigrationTests.cs b/tests/Speckle.Core.Tests.Unit/Credentials/AccountServerMigrationTests.cs new file mode 100644 index 00000000..35c21adf --- /dev/null +++ b/tests/Speckle.Core.Tests.Unit/Credentials/AccountServerMigrationTests.cs @@ -0,0 +1,90 @@ +using NUnit.Framework; +using Speckle.Core.Api; +using Speckle.Core.Credentials; + +namespace Speckle.Core.Tests.Unit.Credentials; + +public class AccountServerMigrationTests +{ + private readonly List _accountsToCleanUp = new(); + + public static IEnumerable MigrationTestCase() + { + const string OLD_URL = "https://old.example.com"; + const string NEW_URL = "https://new.example.com"; + const string OTHER_URL = "https://other.example.com"; + Account oldAccount = CreateTestAccount(OLD_URL, null, new(NEW_URL)); + Account newAccount = CreateTestAccount(NEW_URL, new(OLD_URL), null); + Account otherAccount = CreateTestAccount(OTHER_URL, null, null); + + // new account user must match old account user id + newAccount.userInfo.id = oldAccount.userInfo.id; + + List givenAccounts = new() { oldAccount, newAccount, otherAccount }; + + yield return new TestCaseData(givenAccounts, NEW_URL, new[] { newAccount }) + .SetName("Get New") + .SetDescription("When requesting for new account, ensure only this account is returned"); + + yield return new TestCaseData(givenAccounts, OLD_URL, new[] { newAccount }) + .SetName("Get New via Old") + .SetDescription("When requesting for old account, ensure migrated account is returned first"); + + var reversed = Enumerable.Reverse(givenAccounts).ToList(); + + yield return new TestCaseData(reversed, OLD_URL, new[] { newAccount }) + .SetName("Get New via Old (Reversed order)") + .SetDescription("Account order shouldn't matter"); + } + + [Test] + [TestCaseSource(nameof(MigrationTestCase))] + public void TestServerMigration(IList accounts, string requestedUrl, IList expectedSequence) + { + AddAccounts(accounts); + + var result = AccountManager.GetAccounts(requestedUrl).ToList(); + + Assert.That(result, Is.EquivalentTo(expectedSequence)); + } + + [TearDown] + public void TearDown() + { + //Clean up any of the test accounts we made + foreach (var acc in _accountsToCleanUp) + { + Fixtures.DeleteLocalAccount(acc.id); + } + _accountsToCleanUp.Clear(); + } + + private static Account CreateTestAccount(string url, Uri movedFrom, Uri movedTo) + { + return new Account + { + token = "myToken", + serverInfo = new ServerInfo + { + url = url, + name = "myServer", + migration = new ServerMigration { movedTo = movedTo, movedFrom = movedFrom } + }, + userInfo = new UserInfo + { + id = Guid.NewGuid().ToString(), + email = "user@example.com", + name = "user" + } + }; + } + + private void AddAccounts(IEnumerable accounts) + { + foreach (Account account in accounts) + { + _accountsToCleanUp.Add(account); + Fixtures.UpdateOrSaveAccount(account); + } + } +} diff --git a/tests/Speckle.Core.Tests.Unit/Credentials/Accounts.cs b/tests/Speckle.Core.Tests.Unit/Credentials/Accounts.cs new file mode 100644 index 00000000..7c86a66f --- /dev/null +++ b/tests/Speckle.Core.Tests.Unit/Credentials/Accounts.cs @@ -0,0 +1,121 @@ +using NUnit.Framework; +using Speckle.Core.Api; +using Speckle.Core.Credentials; + +namespace Speckle.Core.Tests.Unit.Credentials; + +[TestFixture] +public class CredentialInfrastructure +{ + [OneTimeSetUp] + public static void SetUp() + { + s_testAccount1 = new Account + { + refreshToken = "bla", + token = "bla", + serverInfo = new ServerInfo { url = "https://bla.example.com", company = "bla" }, + userInfo = new UserInfo { email = "one@two.com" } + }; + + s_testAccount2 = new Account + { + refreshToken = "foo", + token = "bar", + serverInfo = new ServerInfo { url = "https://baz.example.com", company = "qux" }, + userInfo = new UserInfo { email = "three@four.com" } + }; + + s_testAccount3 = new Account + { + token = "secret", + serverInfo = new ServerInfo { url = "https://example.com", name = "qux" }, + userInfo = new UserInfo + { + email = "six@five.com", + id = "123345", + name = "Test Account 3" + } + }; + + Fixtures.UpdateOrSaveAccount(s_testAccount1); + Fixtures.UpdateOrSaveAccount(s_testAccount2); + Fixtures.SaveLocalAccount(s_testAccount3); + } + + [OneTimeTearDown] + public static void TearDown() + { + Fixtures.DeleteLocalAccount(s_testAccount1.id); + Fixtures.DeleteLocalAccount(s_testAccount2.id); + Fixtures.DeleteLocalAccountFile(); + } + + private static Account s_testAccount1, + s_testAccount2, + s_testAccount3; + + [Test] + public void GetAllAccounts() + { + var accs = AccountManager.GetAccounts().ToList(); + Assert.That(accs, Has.Count.GreaterThanOrEqualTo(3)); // Tests are adding three accounts, you might have extra accounts on your machine when testing :D + } + + [Test] + public void GetAccount_ById() + { + var result = AccountManager.GetAccount(s_testAccount1.id); + + Assert.That(result, Is.EqualTo(s_testAccount1)); + } + + [Test] + public void GetAccount_ById_ThrowsWhenNotFound() + { + Assert.Throws(() => AccountManager.GetAccount("Non_existent_id")); + } + + public static IEnumerable TestCases() + { + SetUp(); + return new[] { s_testAccount1, s_testAccount2, s_testAccount3 }; + } + + [Test] + [TestCaseSource(nameof(TestCases))] + public void GetAccountsForServer(Account target) + { + var accs = AccountManager.GetAccounts(target.serverInfo.url).ToList(); + + Assert.That(accs, Has.Count.EqualTo(1)); + + var acc = accs[0]; + + Assert.That(acc, Is.Not.SameAs(target), "We expect new objects (no reference equality)"); + Assert.That(acc.serverInfo.company, Is.EqualTo(target.serverInfo.company)); + Assert.That(acc.serverInfo.url, Is.EqualTo(target.serverInfo.url)); + Assert.That(acc.refreshToken, Is.EqualTo(target.refreshToken)); + Assert.That(acc.token, Is.EqualTo(target.token)); + } + + [Test] + public void EnsureLocalIdentifiers_AreUniqueAcrossServers() + { + // Accounts with the same user ID in different servers should always result in different local identifiers. + string id = "12345"; + var acc1 = new Account + { + serverInfo = new ServerInfo { url = "https://speckle.xyz" }, + userInfo = new UserInfo { id = id } + }.GetLocalIdentifier(); + + var acc2 = new Account + { + serverInfo = new ServerInfo { url = "https://app.speckle.systems" }, + userInfo = new UserInfo { id = id } + }.GetLocalIdentifier(); + + Assert.That(acc1, Is.Not.EqualTo(acc2)); + } +} diff --git a/tests/Speckle.Core.Tests.Unit/Credentials/FE2WrapperTests.cs b/tests/Speckle.Core.Tests.Unit/Credentials/FE2WrapperTests.cs new file mode 100644 index 00000000..843a737e --- /dev/null +++ b/tests/Speckle.Core.Tests.Unit/Credentials/FE2WrapperTests.cs @@ -0,0 +1,68 @@ +using NUnit.Framework; +using Speckle.Core.Credentials; +using Speckle.Core.Logging; + +namespace Speckle.Core.Tests.Unit.Credentials; + +[TestFixture] +[TestOf(typeof(StreamWrapper))] +public class Fe2WrapperTests +{ + [TestCase( + "https://latest.speckle.systems/projects/92b620fb17/models/76fd8a01c8", + StreamWrapperType.Branch, + "92b620fb17", + "76fd8a01c8" + )] + [TestCase( + "https://latest.speckle.systems/projects/92b620fb17/models/76fd8a01c8@7dc324e4bb", + StreamWrapperType.Commit, + "92b620fb17", + "76fd8a01c8", + "7dc324e4bb" + )] + [TestCase( + "https://latest.speckle.systems/projects/92b620fb17/models/bdd52d7fd174328a080770e2a7fef98a", + StreamWrapperType.Object, + "92b620fb17", + null, + null, + "bdd52d7fd174328a080770e2a7fef98a" + )] + public void ParseFe2Links( + string url, + StreamWrapperType expectedType, + string expectedProjectId, + string expectedBranchId = null, + string expectedCommitId = null, + string expectedObjectId = null + ) + { + var streamWrapper = new StreamWrapper(url); + Assert.That(streamWrapper, Is.Not.Null); + Assert.That(streamWrapper.Type, Is.EqualTo(expectedType)); + Assert.That(streamWrapper.StreamId, Is.EqualTo(expectedProjectId)); + Assert.That(streamWrapper.BranchName, Is.EqualTo(expectedBranchId)); + Assert.That(streamWrapper.CommitId, Is.EqualTo(expectedCommitId)); + Assert.That(streamWrapper.ObjectId, Is.EqualTo(expectedObjectId)); + } + + [TestCase("https://latest.speckle.systems/projects/92b620fb17/models/all")] + [TestCase("https://latest.speckle.systems/projects/92b620fb17/models/0fe8ca21c0,76fd8a01c8")] + [TestCase("https://latest.speckle.systems/projects/92b620fb17/models/A,76fd8a01c8@7dc324e4bb,B@C,D@E,F")] + public void ParseFe2NotSupportedLinks(string url) + { + Assert.Throws(() => new StreamWrapper(url)); + } + + [TestCase("https://latest.speckle.systems/")] + [TestCase("https://latest.speckle.systems/projects")] + [TestCase("https://latest.speckle.systems/projects/")] + [TestCase("https://latest.speckle.systems/projects/92b620fb17")] + [TestCase("https://latest.speckle.systems/projects/92b620fb17/")] + [TestCase("https://latest.speckle.systems/projects/92b620fb17/models/")] + public void ParseFe2InvalidLinks(string url) + { + Assert.Throws(() => new StreamWrapper(url)); + } +} diff --git a/tests/Speckle.Core.Tests.Unit/Credentials/StreamWrapperTests.cs b/tests/Speckle.Core.Tests.Unit/Credentials/StreamWrapperTests.cs new file mode 100644 index 00000000..e2f5f232 --- /dev/null +++ b/tests/Speckle.Core.Tests.Unit/Credentials/StreamWrapperTests.cs @@ -0,0 +1,82 @@ +using NUnit.Framework; +using Speckle.Core.Credentials; + +namespace Speckle.Core.Tests.Unit.Credentials; + +[TestFixture] +[TestOf(typeof(StreamWrapper))] +public class StreamWrapperTests +{ + [Test] + public void ParseStream() + { + var wrapper = new StreamWrapper("https://testing.speckle.dev/streams/a75ab4f10f"); + Assert.That(wrapper.Type, Is.EqualTo(StreamWrapperType.Stream)); + } + + [Test] + public void ParseBranch() + { + var wrapperCrazy = new StreamWrapper( + "https://testing.speckle.dev/streams/4c3ce1459c/branches/%F0%9F%8D%95%E2%AC%85%F0%9F%8C%9F%20you%20wat%3F" + ); + Assert.That(wrapperCrazy.BranchName, Is.EqualTo("🍕⬅🌟 you wat?")); + Assert.That(wrapperCrazy.Type, Is.EqualTo(StreamWrapperType.Branch)); + + wrapperCrazy = new StreamWrapper("https://testing.speckle.dev/streams/4c3ce1459c/branches/next%20level"); + Assert.That(wrapperCrazy.BranchName, Is.EqualTo("next level")); + Assert.That(wrapperCrazy.Type, Is.EqualTo(StreamWrapperType.Branch)); + } + + [Test] + public void ParseObject() + { + var wrapper = new StreamWrapper( + "https://testing.speckle.dev/streams/a75ab4f10f/objects/5530363e6d51c904903dafc3ea1d2ec6" + ); + Assert.That(wrapper.Type, Is.EqualTo(StreamWrapperType.Object)); + } + + [Test] + public void ParseCommit() + { + var wrapper = new StreamWrapper("https://testing.speckle.dev/streams/4c3ce1459c/commits/8b9b831792"); + Assert.That(wrapper.Type, Is.EqualTo(StreamWrapperType.Commit)); + } + + [Test] + public void ParseGlobalAsBranch() + { + var wrapper = new StreamWrapper("https://testing.speckle.dev/streams/0c6ad366c4/globals/"); + Assert.That(wrapper.Type, Is.EqualTo(StreamWrapperType.Branch)); + } + + [Test] + public void ParseGlobalAsCommit() + { + var wrapper = new StreamWrapper("https://testing.speckle.dev/streams/0c6ad366c4/globals/abd3787893"); + Assert.That(wrapper.Type, Is.EqualTo(StreamWrapperType.Commit)); + } + + [TestCase("https://testing.speckle.dev/projects/0c6ad366c4/models/abd3787893", StreamWrapperType.Branch)] + [TestCase("https://testing.speckle.dev/projects/28dd9ad7ba/models/117eb16f2c@b1b8579d93", StreamWrapperType.Commit)] + [TestCase( + "https://testing.speckle.dev/projects/28dd9ad7ba/models/6ae9712d6a8bad80a3efd4a29a21c31a", + StreamWrapperType.Object + )] + public void ParseFe2Urls(string speckleUrl, StreamWrapperType expectedType) + { + var wrapper = new StreamWrapper(speckleUrl); + Assert.That(wrapper.Type, Is.EqualTo(expectedType)); + Assert.That(wrapper.ToString(), Is.EqualTo(speckleUrl)); + } + + [TestCase( + "https://testing.speckle.dev/projects/28dd9ad7ba/models/117eb16f2c@b1b8579d93,abd3787893,6ae9712d6a8bad80a3efd4a29a21c31a", + StreamWrapperType.Object + )] + public void ParseFe2MultiModelUrls_IsNotSupported(string speckleUrl, StreamWrapperType expectedType) + { + Assert.Throws(() => new StreamWrapper(speckleUrl)); + } +} diff --git a/tests/Speckle.Core.Tests.Unit/Fixtures.cs b/tests/Speckle.Core.Tests.Unit/Fixtures.cs new file mode 100644 index 00000000..71457823 --- /dev/null +++ b/tests/Speckle.Core.Tests.Unit/Fixtures.cs @@ -0,0 +1,55 @@ +using Newtonsoft.Json; +using NUnit.Framework; +using Speckle.Core.Credentials; +using Speckle.Core.Helpers; +using Speckle.Core.Logging; +using Speckle.Core.Transports; + +namespace Speckle.Core.Tests.Unit; + +[SetUpFixture] +public class SetUp +{ + public static SpeckleLogConfiguration TestLogConfig { get; } = + new(logToFile: false, logToSeq: false, logToSentry: false); + + [OneTimeSetUp] + public void BeforeAll() + { + SpeckleLog.Initialize("Speckle.Core", "Testing", TestLogConfig); + SpeckleLog.Logger.Information("Initialized logger for testing"); + } +} + +public abstract class Fixtures +{ + private static readonly SQLiteTransport s_accountStorage = new(scope: "Accounts"); + + private static readonly string s_accountPath = Path.Combine( + SpecklePathProvider.AccountsFolderPath, + "TestAccount.json" + ); + + public static void UpdateOrSaveAccount(Account account) + { + DeleteLocalAccount(account.id); + string serializedObject = JsonConvert.SerializeObject(account); + s_accountStorage.SaveObjectSync(account.id, serializedObject); + } + + public static void SaveLocalAccount(Account account) + { + var json = JsonConvert.SerializeObject(account); + File.WriteAllText(s_accountPath, json); + } + + public static void DeleteLocalAccount(string id) + { + s_accountStorage.DeleteObject(id); + } + + public static void DeleteLocalAccountFile() + { + File.Delete(s_accountPath); + } +} diff --git a/tests/Speckle.Core.Tests.Unit/Helpers/Path.cs b/tests/Speckle.Core.Tests.Unit/Helpers/Path.cs new file mode 100644 index 00000000..7878ab51 --- /dev/null +++ b/tests/Speckle.Core.Tests.Unit/Helpers/Path.cs @@ -0,0 +1,92 @@ +using System.Runtime.InteropServices; +using NUnit.Framework; +using Speckle.Core.Helpers; + +namespace Speckle.Core.Tests.Unit.Helpers; + +[TestFixture] +[TestOf(nameof(SpecklePathProvider))] +public class SpecklePathTests +{ + [Test] + public void TestUserApplicationDataPath() + { + var userPath = SpecklePathProvider.UserApplicationDataPath(); + string pattern; + + if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) + { + pattern = @"C:\\Users\\.*\\AppData\\Roaming"; + } + else if (RuntimeInformation.IsOSPlatform(OSPlatform.OSX)) + { + pattern = @"\/Users\/.*\/\.config"; + } + else if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux)) + { + // if running under root user, the .config folder is in another location... + if (userPath.StartsWith("/root")) + { + pattern = @"\/root/\.config"; + } + else + { + pattern = @"\/home/.*/\.config"; + } + } + else + { + throw new NotImplementedException("Your OS platform is not supported"); + } + + Assert.That(userPath, Does.Match(pattern)); + } + + [Test] + public void TestUserApplicationDataPathOverride() + { + var newPath = Path.GetTempPath(); + SpecklePathProvider.OverrideApplicationDataPath(newPath); + Assert.That(SpecklePathProvider.UserApplicationDataPath(), Is.EqualTo(newPath)); + SpecklePathProvider.OverrideApplicationDataPath(null); + } + + [Test] + public void TestInstallApplicationDataPath() + { + var installPath = SpecklePathProvider.InstallApplicationDataPath; + string pattern; + + if (string.IsNullOrEmpty(Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData))) + { + pattern = @"\/root"; + } + else if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) + { + // this will prob fail on windows + pattern = @"C:\\Users\\.*\\AppData\\Roaming"; + } + else if (RuntimeInformation.IsOSPlatform(OSPlatform.OSX)) + { + pattern = @"\/Users\/.*\/\.config"; + } + else if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux)) + { + // if running under root user, the .config folder is in another location... + if (installPath.StartsWith("/root")) + { + pattern = @"\/root/\.config"; + } + else + { + pattern = @"\/home/.*/\.config"; + } + } + else + { + throw new NotImplementedException("Your OS platform is not supported"); + } + + Assert.That(installPath, Does.Match(pattern)); + } +} diff --git a/tests/Speckle.Core.Tests.Unit/Kits/KitManagerTests.cs b/tests/Speckle.Core.Tests.Unit/Kits/KitManagerTests.cs new file mode 100644 index 00000000..96a5313d --- /dev/null +++ b/tests/Speckle.Core.Tests.Unit/Kits/KitManagerTests.cs @@ -0,0 +1,19 @@ +using NUnit.Framework; +using Speckle.Core.Kits; + +namespace Speckle.Core.Tests.Unit.Kits; + +[TestFixture] +[TestOf(typeof(KitManager))] +public class KitManagerTests +{ + [Test] + public void KitsExist() + { + var kits = KitManager.Kits.ToArray(); + Assert.That(kits, Has.Length.GreaterThan(0)); + + var types = KitManager.Types.ToArray(); + Assert.That(types, Has.Length.GreaterThan(0)); + } +} diff --git a/tests/Speckle.Core.Tests.Unit/Kits/TestKit.cs b/tests/Speckle.Core.Tests.Unit/Kits/TestKit.cs new file mode 100644 index 00000000..c0c6b84d --- /dev/null +++ b/tests/Speckle.Core.Tests.Unit/Kits/TestKit.cs @@ -0,0 +1,185 @@ +using Newtonsoft.Json; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Speckle.Core.Tests.Unit.Kits; + +/// +/// Simple speckle kit (no conversions) used in tests. +/// +public class TestKit : ISpeckleKit +{ + public IEnumerable Types => GetType().Assembly.GetTypes().Where(type => type.IsSubclassOf(typeof(Base))); + + public string Description => "Simple object model for with some types for tests."; + + public string Name => nameof(TestKit); + + public string Author => "Dimitrie"; + + public string WebsiteOrEmail => "hello@Speckle.Speckle.Core.works"; + + public IEnumerable Converters => new List(); + + public ISpeckleConverter LoadConverter(string app) + { + throw new KitException("This is the test kit"); + } +} + +public class FakeMesh : Base +{ + [DetachProperty, Chunkable] + public List Vertices { get; set; } = new(); + + [DetachProperty, Chunkable] + public double[] ArrayOfDoubles { get; set; } + + [DetachProperty, Chunkable] + public TableLeg[] ArrayOfLegs { get; set; } + + [DetachProperty, Chunkable(2500)] + public List Tables { get; set; } = new(); +} + +public class DiningTable : Base +{ + public DiningTable() + { + LegOne = new TableLeg { height = 2 * 3, radius = 10 }; + LegTwo = new TableLeg { height = 1, radius = 5 }; + + MoreLegs.Add(new TableLeg { height = 4 }); + MoreLegs.Add(new TableLeg { height = 10 }); + + Tabletop = new Tabletop + { + length = 200, + width = 12, + thickness = 3 + }; + } + + [DetachProperty] + public TableLeg LegOne { get; set; } + + [DetachProperty] + public TableLeg LegTwo { get; set; } + + [DetachProperty] + public List MoreLegs { get; set; } = new(); + + [DetachProperty] + public Tabletop Tabletop { get; set; } + + public string TableModel { get; set; } = "Sample Table"; +} + +public class Tabletop : Base +{ + public double length { get; set; } + public double width { get; set; } + public double thickness { get; set; } +} + +public class TableLeg : Base +{ + public double height { get; set; } + public double radius { get; set; } + + [DetachProperty] + public TableLegFixture fixture { get; set; } = new(); +} + +public class TableLegFixture : Base +{ + public string nails { get; set; } = "MANY NAILS WOW "; +} + +public class Point : Base +{ + public Point() { } + + public Point(double x, double y, double z) + { + X = x; + Y = y; + Z = z; + } + + public double X { get; set; } + public double Y { get; set; } + public double Z { get; set; } +} + +public class SuperPoint : Point +{ + public double W { get; set; } +} + +public class Mesh : Base +{ + public List Faces = new(); + + [JsonIgnore] + public List Points = new(); + + public List Vertices + { + get => Points.SelectMany(pt => new List { pt.X, pt.Y, pt.Z }).ToList(); + set + { + for (int i = 0; i < value.Count; i += 3) + { + Points.Add(new Point(value[i], value[i + 1], value[i + 2])); + } + } + } +} + +public interface ICurve +{ + // Just for fun +} + +/// +/// Store individual points in a list structure for developer ergonomics. Nevertheless, for performance reasons (hashing, serialisation and storage) expose the same list of points as a typed array. +/// +public class Polyline : Base, ICurve +{ + [JsonIgnore] + public List Points { get; set; } = new(); + + public List Vertices + { + get => Points.SelectMany(pt => new List { pt.X, pt.Y, pt.Z }).ToList(); + set + { + for (int i = 0; i < value.Count; i += 3) + { + Points.Add(new Point(value[i], value[i + 1], value[i + 2])); + } + } + } +} + +public class Line : Base, ICurve +{ + public Point Start { get; set; } + public Point End { get; set; } +} + +/// +/// This class exists to purely test some weird cases in which Intefaces might trash serialisation. +/// +public class PolygonalFeline : Base +{ + public List Whiskers { get; set; } = new(); + + public Dictionary Claws { get; set; } = new(); + + [DetachProperty] + public ICurve Tail { get; set; } + + public ICurve[] Fur { get; set; } = new ICurve[1000]; +} diff --git a/tests/Speckle.Core.Tests.Unit/Logging/SpeckleLogTests.cs b/tests/Speckle.Core.Tests.Unit/Logging/SpeckleLogTests.cs new file mode 100644 index 00000000..22b70330 --- /dev/null +++ b/tests/Speckle.Core.Tests.Unit/Logging/SpeckleLogTests.cs @@ -0,0 +1,130 @@ +using NUnit.Framework; +using Serilog.Context; +using Serilog.Events; +using Speckle.Core.Logging; + +namespace Speckle.Core.Tests.Unit.Logging; + +[TestOf(typeof(SpeckleLog))] +public class SpeckleLogTests : IDisposable +{ + private StringWriter _stdOut; + + [SetUp] + public virtual void Setup() + { + _stdOut = new StringWriter(); + Console.SetOut(_stdOut); + } + + [TearDown] + public void TearDown() + { + _stdOut?.Dispose(); + } + + [OneTimeTearDown] + public void OneTimeTearDown() + { + var standardOutput = new StreamWriter(Console.OpenStandardOutput()); + standardOutput.AutoFlush = true; + Console.SetOut(standardOutput); + } + + [Test] + [TestCase(LogEventLevel.Fatal, true)] + [TestCase(LogEventLevel.Error, true)] + [TestCase(LogEventLevel.Warning, true)] + [TestCase(LogEventLevel.Information, true)] + [TestCase(LogEventLevel.Debug, true)] + [TestCase(LogEventLevel.Verbose, false)] + public void LoggerWrites_WithLogEventLevel(LogEventLevel logLevel, bool expectLog) + { + const string TEMPLATE = "My log message"; + + SpeckleLog.Logger.Write(logLevel, TEMPLATE); + + string result = _stdOut.ToString(); + + if (expectLog) + { + Assert.That(result, Contains.Substring(TEMPLATE)); + } + else + { + Assert.That(result, Is.Empty); + } + } + + [Test] + public void LoggerWrites_PositionalProperties() + { + const string PROP_NAME = "myProp"; + const string TEMPLATE = $"My log message with positional prop {{{PROP_NAME}}}"; + const string TARGET_VALUE = "my amazing value"; + SpeckleLog.Logger.Warning(TEMPLATE, TARGET_VALUE); + + string result = _stdOut.ToString(); + Assert.That(result, Does.Contain(TARGET_VALUE)); + Assert.That(result, Does.Not.Contain(PROP_NAME)); + } + + [Test] + public void LoggerWrites_ContextProperties() + { + const string PROP_NAME = "myProp"; + const string TEMPLATE = $"My log message with context prop {{{PROP_NAME}}}"; + const string TARGET_VALUE = "my amazing value"; + + SpeckleLog.Logger.ForContext(PROP_NAME, TARGET_VALUE).Warning(TEMPLATE); + + string result = _stdOut.ToString(); + Assert.That(result, Does.Contain(TARGET_VALUE)); + Assert.That(result, Does.Not.Contain(PROP_NAME)); + } + + [Test] + public void LoggerWrites_ScopedProperties() + { + const string PROP_NAME = "myProp"; + const string TEMPLATE = $"My log message with scoped prop {{{PROP_NAME}}}"; + const string TARGET_VALUE = "my amazing value"; + + using var d1 = LogContext.PushProperty(PROP_NAME, TARGET_VALUE); + + SpeckleLog.Logger.Warning(TEMPLATE); + + string result = _stdOut.ToString(); + Assert.That(result, Does.Contain(TARGET_VALUE)); + Assert.That(result, Does.Not.Contain(PROP_NAME)); + } + + [Test] + [TestCase(true)] + [TestCase(false)] + public void CreateConfiguredLogger_WritesToConsole_ToConsole(bool shouldWrite) + { + const string TEST_MESSAGE = "This is my test message"; + + SpeckleLogConfiguration config = + new(logToConsole: shouldWrite, logToSeq: false, logToSentry: false, logToFile: false); + using var logger = SpeckleLog.CreateConfiguredLogger("My Test Host App!!", null, config); + + logger.Fatal(TEST_MESSAGE); + + string result = _stdOut.ToString(); + if (shouldWrite) + { + Assert.That(result, Does.Contain(TEST_MESSAGE)); + } + else + { + Assert.That(result, Is.Empty); + } + } + + public void Dispose() + { + _stdOut?.Dispose(); + } +} diff --git a/tests/Speckle.Core.Tests.Unit/Models/BaseTests.cs b/tests/Speckle.Core.Tests.Unit/Models/BaseTests.cs new file mode 100644 index 00000000..90120cfe --- /dev/null +++ b/tests/Speckle.Core.Tests.Unit/Models/BaseTests.cs @@ -0,0 +1,238 @@ +using NUnit.Framework; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Speckle.Core.Tests.Unit.Models; + +[TestFixture] +[TestOf(typeof(Base))] +[TestOf(typeof(DynamicBase))] +public class BaseTests +{ + [Test] + public void CanGetSetDynamicItemProp() + { + var @base = new Base(); + @base["Item"] = "Item"; + + Assert.That(@base["Item"], Is.EqualTo("Item")); + } + + [Test] + public void CanGetSetTypedItemProp() + { + var @base = new ObjectWithItemProp { Item = "baz" }; + + Assert.That(@base["Item"], Is.EqualTo("baz")); + Assert.That(@base.Item, Is.EqualTo("baz")); + } + + [Test(Description = "Checks if validation is performed in property names")] + public void CanValidatePropNames() + { + dynamic @base = new Base(); + + // Word chars are OK + @base["something"] = "B"; + + // Only single leading @ allowed + @base["@something"] = "A"; + Assert.Throws(() => + { + @base["@@@something"] = "Testing"; + }); + + // Invalid chars: ./ + Assert.Throws(() => + { + @base["some.thing"] = "Testing"; + }); + Assert.Throws(() => + { + @base["some/thing"] = "Testing"; + }); + + // Trying to change a class member value will throw exceptions. + //Assert.Throws(() => { @base["speckle_type"] = "Testing"; }); + //Assert.Throws(() => { @base["id"] = "Testing"; }); + } + + [Test] + public void CountDynamicChunkables() + { + const int MAX_NUM = 3000; + var @base = new Base(); + var customChunk = new List(); + var customChunkArr = new double[MAX_NUM]; + + for (int i = 0; i < MAX_NUM; i++) + { + customChunk.Add(i / 2); + customChunkArr[i] = i; + } + + @base["@(1000)cc1"] = customChunk; + @base["@(1000)cc2"] = customChunkArr; + + var num = @base.GetTotalChildrenCount(); + Assert.That(num, Is.EqualTo(MAX_NUM / 1000 * 2 + 1)); + } + + [Test] + public void CountTypedChunkables() + { + const int MAX_NUM = 3000; + var @base = new SampleObject(); + var customChunk = new List(); + var customChunkArr = new double[MAX_NUM]; + + for (int i = 0; i < MAX_NUM; i++) + { + customChunk.Add(i / 2); + customChunkArr[i] = i; + } + + @base.list = customChunk; + @base.arr = customChunkArr; + + var num = @base.GetTotalChildrenCount(); + var actualNum = 1 + MAX_NUM / 300 + MAX_NUM / 1000; + Assert.That(num, Is.EqualTo(actualNum)); + } + + [Test(Description = "Checks that no ignored or obsolete properties are returned")] + public void CanGetMemberNames() + { + var @base = new SampleObject(); + var dynamicProp = "dynamicProp"; + @base[dynamicProp] = 123; + var names = @base.GetMembers().Keys; + Assert.That(names, Has.No.Member(nameof(@base.IgnoredSchemaProp))); + Assert.That(names, Has.No.Member(nameof(@base.ObsoleteSchemaProp))); + Assert.That(names, Has.Member(dynamicProp)); + Assert.That(names, Has.Member(nameof(@base.attachedProp))); + } + + [Test(Description = "Checks that only instance properties are returned, excluding obsolete and ignored.")] + public void CanGetMembers_OnlyInstance() + { + var @base = new SampleObject(); + @base["dynamicProp"] = 123; + + var names = @base.GetMembers(DynamicBaseMemberType.Instance).Keys; + Assert.That(names, Has.Member(nameof(@base.attachedProp))); + } + + [Test(Description = "Checks that only dynamic properties are returned")] + public void CanGetMembers_OnlyDynamic() + { + var @base = new SampleObject(); + var dynamicProp = "dynamicProp"; + @base[dynamicProp] = 123; + + var names = @base.GetMembers(DynamicBaseMemberType.Dynamic).Keys; + Assert.That(names, Has.Member(dynamicProp)); + Assert.That(names, Has.Count.EqualTo(1)); + } + + [Test(Description = "Checks that all typed properties (including ignored ones) are returned")] + public void CanGetMembers_OnlyInstance_IncludeIgnored() + { + var @base = new SampleObject(); + @base["dynamicProp"] = 123; + + var names = @base.GetMembers(DynamicBaseMemberType.Instance | DynamicBaseMemberType.SchemaIgnored).Keys; + Assert.That(names, Has.Member(nameof(@base.IgnoredSchemaProp))); + Assert.That(names, Has.Member(nameof(@base.attachedProp))); + } + + [Test(Description = "Checks that all typed properties (including obsolete ones) are returned")] + public void CanGetMembers_OnlyInstance_IncludeObsolete() + { + var @base = new SampleObject(); + @base["dynamicProp"] = 123; + + var names = @base.GetMembers(DynamicBaseMemberType.Instance | DynamicBaseMemberType.Obsolete).Keys; + Assert.That(names, Has.Member(nameof(@base.ObsoleteSchemaProp))); + Assert.That(names, Has.Member(nameof(@base.attachedProp))); + } + + [Test] + public void CanGetDynamicMembers() + { + var @base = new SampleObject(); + var dynamicProp = "dynamicProp"; + @base[dynamicProp] = null; + + var names = @base.GetDynamicMemberNames(); + Assert.That(names, Has.Member(dynamicProp)); + Assert.That(@base[dynamicProp], Is.Null); + } + + [Test] + public void CanSetDynamicMembers() + { + var @base = new SampleObject(); + var key = "dynamicProp"; + var value = "something"; + // Can create a new dynamic member + @base[key] = value; + Assert.That(value, Is.EqualTo((string)@base[key])); + + // Can overwrite existing + value = "some other value"; + @base[key] = value; + Assert.That(value, Is.EqualTo((string)@base[key])); + + // Accepts null values + @base[key] = null; + Assert.That(@base[key], Is.Null); + } + + [Test] + public void CanShallowCopy() + { + var sample = new SampleObject(); + var copy = sample.ShallowCopy(); + + var selectedMembers = + DynamicBaseMemberType.Dynamic | DynamicBaseMemberType.Instance | DynamicBaseMemberType.SchemaIgnored; + var sampleMembers = sample.GetMembers(selectedMembers); + var copyMembers = copy.GetMembers(selectedMembers); + + Assert.That(copyMembers.Keys, Is.EquivalentTo(sampleMembers.Keys)); + Assert.That(copyMembers.Values, Is.EquivalentTo(sampleMembers.Values)); + } + + public class SampleObject : Base + { + [Chunkable, DetachProperty] + public List list { get; set; } = new(); + + [Chunkable(300), DetachProperty] + public double[] arr { get; set; } + + [DetachProperty] + public SampleProp detachedProp { get; set; } + + public SampleProp attachedProp { get; set; } + + public string crazyProp { get; set; } + + [SchemaIgnore] + public string IgnoredSchemaProp { get; set; } + + [Obsolete("Use attached prop")] + public string ObsoleteSchemaProp { get; set; } + } + + public class SampleProp + { + public string name { get; set; } + } + + public class ObjectWithItemProp : Base + { + public string Item { get; set; } = "Item"; + } +} diff --git a/tests/Speckle.Core.Tests.Unit/Models/Extensions/BaseExtensionsTests.cs b/tests/Speckle.Core.Tests.Unit/Models/Extensions/BaseExtensionsTests.cs new file mode 100644 index 00000000..f5fa6e8f --- /dev/null +++ b/tests/Speckle.Core.Tests.Unit/Models/Extensions/BaseExtensionsTests.cs @@ -0,0 +1,63 @@ +using NUnit.Framework; +using Speckle.Core.Models; +using Speckle.Core.Models.Extensions; + +namespace Speckle.Core.Tests.Unit.Models.Extensions; + +[TestFixture] +[TestOf(nameof(BaseExtensions))] +public class BaseExtensionsTests +{ + [Test] + [TestCase("myDynamicProp")] + [TestCase("elements")] + public void GetDetachedPropName_Dynamic(string propertyName) + { + var data = new TestBase(); + + var result = data.GetDetachedPropName(propertyName); + var expected = $"@{propertyName}"; + Assert.That(result, Is.EqualTo(expected)); + } + + [Test] + [TestCase(nameof(TestBase.myProperty))] + [TestCase(nameof(TestBase.myOtherProperty))] + public void GetDetachedPropName_Instance(string propertyName) + { + var data = new TestBase(); + var result = data.GetDetachedPropName(propertyName); + + Assert.That(result, Is.EqualTo(propertyName)); + } + + [Test] + public void TraverseWithPath() + { + var collection = new Collection() { name = "collection" }; + var subCollection = new Collection { name = "subCollection" }; + collection.elements.Add(subCollection); + var data1 = new Base(); + subCollection.elements.Add(data1); + + var basePaths = collection.TraverseWithPath((obj => obj is not Collection)).ToList(); + + Assert.That(basePaths.Count, Is.EqualTo(3)); + Assert.That(basePaths[0].Item2.speckle_type, Is.EqualTo("Speckle.Speckle.Core.Models.Collection")); + Assert.That(basePaths[0].Item2["name"], Is.EqualTo("collection")); + Assert.That(basePaths[0].Item1, Is.EqualTo(new List())); + + Assert.That(basePaths[1].Item2.speckle_type, Is.EqualTo("Speckle.Speckle.Core.Models.Collection")); + Assert.That(basePaths[1].Item2["name"], Is.EqualTo("subCollection")); + Assert.That(basePaths[1].Item1, Is.EqualTo(new List() { "collection" })); + + Assert.That(basePaths[2].Item2.speckle_type, Is.EqualTo("Base")); + Assert.That(basePaths[2].Item1, Is.EqualTo(new List() { "collection", "subCollection" })); + } + + public class TestBase : Base + { + public string myProperty { get; set; } + public string myOtherProperty { get; set; } + } +} diff --git a/tests/Speckle.Core.Tests.Unit/Models/Extensions/ExceptionTests.cs b/tests/Speckle.Core.Tests.Unit/Models/Extensions/ExceptionTests.cs new file mode 100644 index 00000000..305d733e --- /dev/null +++ b/tests/Speckle.Core.Tests.Unit/Models/Extensions/ExceptionTests.cs @@ -0,0 +1,26 @@ +using NUnit.Framework; +using Speckle.Core.Models.Extensions; + +namespace Speckle.Core.Tests.Unit.Models.Extensions; + +[TestFixture] +[TestOf(typeof(BaseExtensions))] +public class ExceptionTests +{ + [Test] + public void CanPrintAllInnerExceptions() + { + var ex = new Exception("Some error"); + var exMsg = ex.ToFormattedString(); + Assert.That(exMsg, Is.Not.Null); + + var ex2 = new Exception("One or more errors occurred", ex); + var ex2Msg = ex2.ToFormattedString(); + Assert.That(ex2Msg, Is.Not.Null); + + var ex3 = new AggregateException("One or more errors occurred", ex2); + var ex3Msg = ex3.ToFormattedString(); + + Assert.That(ex3Msg, Is.Not.Null); + } +} diff --git a/tests/Speckle.Core.Tests.Unit/Models/GraphTraversal/GraphTraversalTests.cs b/tests/Speckle.Core.Tests.Unit/Models/GraphTraversal/GraphTraversalTests.cs new file mode 100644 index 00000000..c64c8439 --- /dev/null +++ b/tests/Speckle.Core.Tests.Unit/Models/GraphTraversal/GraphTraversalTests.cs @@ -0,0 +1,140 @@ +using System.Collections; +using NUnit.Framework; +using Speckle.Core.Models; +using Speckle.Core.Models.GraphTraversal; + +namespace Speckle.Core.Tests.Unit.Models.GraphTraversal; + +[TestFixture, TestOf(typeof(Core.Models.GraphTraversal.GraphTraversal))] +public class GraphTraversalTests +{ + private static IEnumerable Traverse(Base testCase, params ITraversalRule[] rules) + { + var sut = new Core.Models.GraphTraversal.GraphTraversal(rules); + return sut.Traverse(testCase); + } + + [Test] + public void Traverse_TraversesListMembers() + { + var traverseListsRule = TraversalRule + .NewTraversalRule() + .When(_ => true) + .ContinueTraversing( + x => x.GetMembers(DynamicBaseMemberType.All).Where(p => p.Value is IList).Select(kvp => kvp.Key) + ); + + var expectTraverse = new Base { id = "List Member" }; + var expectIgnored = new Base { id = "Not List Member" }; + + TraversalMock testCase = + new() + { + ListChildren = new List { expectTraverse }, + DictChildren = new Dictionary { ["myprop"] = expectIgnored }, + Child = expectIgnored + }; + + var ret = Traverse(testCase, traverseListsRule).Select(b => b.Current).ToList(); + + //Assert expected members present + Assert.That(ret, Has.Exactly(1).Items.EqualTo(testCase)); + Assert.That(ret, Has.Exactly(1).Items.EqualTo(expectTraverse)); + + //Assert unexpected members not present + Assert.That(ret, Has.No.Member(expectIgnored)); + Assert.That(ret, Has.Count.EqualTo(2)); + } + + [Test] + public void Traverse_TraversesDictMembers() + { + var traverseListsRule = TraversalRule + .NewTraversalRule() + .When(_ => true) + .ContinueTraversing( + x => x.GetMembers(DynamicBaseMemberType.All).Where(p => p.Value is IDictionary).Select(kvp => kvp.Key) + ); + + var expectTraverse = new Base { id = "Dict Member" }; + var expectIgnored = new Base { id = "Not Dict Member" }; + + TraversalMock testCase = + new() + { + ListChildren = new List { expectIgnored }, + DictChildren = new Dictionary { ["myprop"] = expectTraverse }, + Child = expectIgnored + }; + + var ret = Traverse(testCase, traverseListsRule).Select(b => b.Current).ToList(); + + //Assert expected members present + Assert.That(ret, Has.Exactly(1).Items.EqualTo(testCase)); + Assert.That(ret, Has.Exactly(1).Items.EqualTo(expectTraverse)); + + //Assert unexpected members not present + Assert.That(ret, Has.No.Member(expectIgnored)); + Assert.That(ret, Has.Count.EqualTo(2)); + } + + [Test] + public void Traverse_TraversesDynamic() + { + var traverseListsRule = TraversalRule + .NewTraversalRule() + .When(_ => true) + .ContinueTraversing(x => x.GetMembers(DynamicBaseMemberType.Dynamic).Select(kvp => kvp.Key)); + + var expectTraverse = new Base { id = "List Member" }; + var expectIgnored = new Base { id = "Not List Member" }; + + TraversalMock testCase = + new() + { + Child = expectIgnored, + ["dynamicChild"] = expectTraverse, + ["dynamicListChild"] = new List { expectTraverse } + }; + + var ret = Traverse(testCase, traverseListsRule).Select(b => b.Current).ToList(); + + //Assert expected members present + Assert.That(ret, Has.Exactly(1).Items.EqualTo(testCase)); + Assert.That(ret, Has.Exactly(2).Items.EqualTo(expectTraverse)); + + //Assert unexpected members not present + Assert.That(ret, Has.No.Member(expectIgnored)); + Assert.That(ret, Has.Count.EqualTo(3)); + } + + [Test] + public void Traverse_ExclusiveRule() + { + var expectTraverse = new Base { id = "List Member" }; + var expectIgnored = new Base { id = "Not List Member" }; + + var traverseListsRule = TraversalRule + .NewTraversalRule() + .When(_ => true) + .ContinueTraversing(x => x.GetMembers(DynamicBaseMemberType.Dynamic).Select(kvp => kvp.Key)); + + TraversalMock testCase = + new() + { + Child = expectIgnored, + ["dynamicChild"] = expectTraverse, + ["dynamicListChild"] = new List { expectTraverse } + }; + + var ret = Traverse(testCase, traverseListsRule).Select(b => b.Current).ToList(); + + //Assert expected members present + Assert.That(ret, Has.Exactly(1).Items.EqualTo(testCase)); + Assert.That(ret, Has.Exactly(2).Items.EqualTo(expectTraverse)); + + //Assert unexpected members not present + Assert.That(ret, Has.No.Member(expectIgnored)); + Assert.That(ret, Has.Count.EqualTo(3)); + } +} diff --git a/tests/Speckle.Core.Tests.Unit/Models/GraphTraversal/TraversalContextExtensionsTests.cs b/tests/Speckle.Core.Tests.Unit/Models/GraphTraversal/TraversalContextExtensionsTests.cs new file mode 100644 index 00000000..e8a38cf8 --- /dev/null +++ b/tests/Speckle.Core.Tests.Unit/Models/GraphTraversal/TraversalContextExtensionsTests.cs @@ -0,0 +1,60 @@ +using NUnit.Framework; +using Speckle.Core.Models; +using Speckle.Core.Models.GraphTraversal; + +namespace Speckle.Core.Tests.Unit.Models.GraphTraversal; + +[TestOf(typeof(TraversalContextExtensions))] +public class TraversalContextExtensionsTests +{ + public static int[] TestDepths => new[] { 1, 2, 10 }; + + private TraversalContext CreateLinkedList(int depth, Func createBaseFunc) + { + if (depth <= 0) + return null; + return new TraversalContext(createBaseFunc(depth), $"{depth}", CreateLinkedList(depth - 1, createBaseFunc)); + } + + [TestCaseSource(nameof(TestDepths))] + public void GetPropertyPath_ReturnsSequentialPath(int depth) + { + var testData = CreateLinkedList(depth, i => new()); + + var path = TraversalContextExtensions.GetPropertyPath(testData); + + var expected = Enumerable.Range(1, depth).Select(i => i.ToString()); + + Assert.That(path, Is.EquivalentTo(expected)); + } + + [TestCaseSource(nameof(TestDepths))] + public void GetAscendant(int depth) + { + var testData = CreateLinkedList(depth, i => new()); + + var all = TraversalContextExtensions.GetAscendants(testData).ToArray(); + + Assert.That(all, Has.Length.EqualTo(depth)); + } + + [TestCaseSource(nameof(TestDepths))] + public void GetAscendantOfType_AllBase(int depth) + { + var testData = CreateLinkedList(depth, i => new()); + + var all = TraversalContextExtensions.GetAscendantOfType(testData).ToArray(); + + Assert.That(all, Has.Length.EqualTo(depth)); + } + + [TestCaseSource(nameof(TestDepths))] + public void GetAscendantOfType_EveryOtherIsCollection(int depth) + { + var testData = CreateLinkedList(depth, i => i % 2 == 0 ? new Base() : new Collection()); + + var all = TraversalContextExtensions.GetAscendantOfType(testData).ToArray(); + + Assert.That(all, Has.Length.EqualTo(Math.Ceiling(depth / 2.0))); + } +} diff --git a/tests/Speckle.Core.Tests.Unit/Models/GraphTraversal/TraversalMockObjects.cs b/tests/Speckle.Core.Tests.Unit/Models/GraphTraversal/TraversalMockObjects.cs new file mode 100644 index 00000000..799fd249 --- /dev/null +++ b/tests/Speckle.Core.Tests.Unit/Models/GraphTraversal/TraversalMockObjects.cs @@ -0,0 +1,16 @@ +using Speckle.Core.Models; + +namespace Speckle.Core.Tests.Unit.Models.GraphTraversal; + +public class TraversalMock : Base +{ + public Base Child { get; set; } + + public object ObjectChild { get; set; } + + public List ListChildren { get; set; } = new(); + + public List> NestedListChildren { get; set; } = new(); + + public Dictionary DictChildren { get; set; } = new(); +} diff --git a/tests/Speckle.Core.Tests.Unit/Models/Hashing.cs b/tests/Speckle.Core.Tests.Unit/Models/Hashing.cs new file mode 100644 index 00000000..05fa1b6e --- /dev/null +++ b/tests/Speckle.Core.Tests.Unit/Models/Hashing.cs @@ -0,0 +1,85 @@ +using System.Diagnostics; +using NUnit.Framework; +using Speckle.Core.Models; +using Speckle.Core.Tests.Unit.Kits; + +namespace Speckle.Core.Tests.Unit.Models; + +[TestFixture] +[TestOf(typeof(Base))] +public class Hashing +{ + [Test(Description = "Checks that hashing (as represented by object ids) actually works.")] + public void HashChangeCheck() + { + var table = new DiningTable(); + var secondTable = new DiningTable(); + + Assert.That(secondTable.GetId(), Is.EqualTo(table.GetId())); + + ((dynamic)secondTable).testProp = "wonderful"; + + Assert.That(secondTable.GetId(), Is.Not.EqualTo(table.GetId())); + } + + [Test( + Description = "Tests the convention that dynamic properties that have key names prepended with '__' are ignored." + )] + public void IgnoredDynamicPropertiesCheck() + { + var table = new DiningTable(); + var originalHash = table.GetId(); + + ((dynamic)table).__testProp = "wonderful"; + + Assert.That(table.GetId(), Is.EqualTo(originalHash)); + } + + [Test(Description = "Rather stupid test as results vary wildly even on one machine.")] + public void HashingPerformance() + { + var polyline = new Polyline(); + + for (int i = 0; i < 1000; i++) + { + polyline.Points.Add(new Point { X = i * 2, Y = i % 2 }); + } + + var stopWatch = new Stopwatch(); + stopWatch.Start(); + + // Warm-up: first hashing always takes longer due to json serialisation init + _ = polyline.GetId(); + var stopWatchStep = stopWatch.ElapsedMilliseconds; + _ = polyline.GetId(); + + var diff1 = stopWatch.ElapsedMilliseconds - stopWatchStep; + Assert.That(diff1, Is.LessThan(300), $"Hashing shouldn't take that long ({diff1} ms) for the test object used."); + Console.WriteLine($"Big obj hash duration: {diff1} ms"); + + var pt = new Point + { + X = 10, + Y = 12, + Z = 30 + }; + stopWatchStep = stopWatch.ElapsedMilliseconds; + _ = pt.GetId(); + + var diff2 = stopWatch.ElapsedMilliseconds - stopWatchStep; + Assert.That(diff2, Is.LessThan(10), $"Hashing shouldn't take that long ({diff2} ms)for the point object used."); + Console.WriteLine($"Small obj hash duration: {diff2} ms"); + } + + [Test(Description = "The hash of a decomposed object is different that that of a non-decomposed object.")] + public void DecompositionHashes() + { + var table = new DiningTable(); + ((dynamic)table)["@decomposeMePlease"] = new Point(); + + var hash1 = table.GetId(); + var hash2 = table.GetId(true); + + Assert.That(hash2, Is.Not.EqualTo(hash1)); + } +} diff --git a/tests/Speckle.Core.Tests.Unit/Models/SpeckleType.cs b/tests/Speckle.Core.Tests.Unit/Models/SpeckleType.cs new file mode 100644 index 00000000..8e0b5994 --- /dev/null +++ b/tests/Speckle.Core.Tests.Unit/Models/SpeckleType.cs @@ -0,0 +1,34 @@ +using NUnit.Framework; +using Speckle.Core.Models; +using TestModels; + +namespace Speckle.Core.Tests.Unit.Models +{ + [TestFixture] + [TestOf(typeof(Base))] + public class SpeckleTypeTests + { + [Test, TestCaseSource(nameof(s_cases))] + public void SpeckleTypeIsProperlyBuilt(Base foo, string expectedType) + { + Assert.That(foo.speckle_type, Is.EqualTo(expectedType)); + } + + private static readonly object[] s_cases = + { + new object[] { new Base(), "Base" }, + new object[] { new Foo(), "TestModels.Foo" }, + new object[] { new Bar(), "TestModels.Foo:TestModels.Bar" }, + new object[] { new Baz(), "TestModels.Foo:TestModels.Bar:TestModels.Baz" } + }; + } +} + +namespace TestModels +{ + public class Foo : Base { } + + public class Bar : Foo { } + + public class Baz : Bar { } +} diff --git a/tests/Speckle.Core.Tests.Unit/Models/TraversalTests.cs b/tests/Speckle.Core.Tests.Unit/Models/TraversalTests.cs new file mode 100644 index 00000000..723aff75 --- /dev/null +++ b/tests/Speckle.Core.Tests.Unit/Models/TraversalTests.cs @@ -0,0 +1,102 @@ +using NUnit.Framework; +using Speckle.Core.Models; +using Speckle.Core.Models.Extensions; + +namespace Speckle.Core.Tests.Unit.Models; + +[TestFixture, TestOf(typeof(BaseExtensions))] +public class TraversalTests +{ + [Test, Description("Tests that provided breaker rules are respected")] + public void TestFlattenWithBreaker() + { + //Setup + Base root = + new() + { + id = "root", + ["child"] = new Base + { + id = "traverse through me", + ["child"] = new Base + { + id = "break on me, go no further", + ["child"] = new Base { id = "should have ignored me" } + } + } + }; + + static bool BreakRule(Base b) => b.id.Contains("break on me"); + + //Flatten + var ret = root.Flatten(BreakRule).ToList(); + + //Test + Assert.That(ret, Has.Count.EqualTo(3)); + Assert.That(ret, Is.Unique); + Assert.That(ret.Where(BreakRule), Is.Not.Empty); + Assert.That(ret, Has.No.Member(Contains.Substring("should have ignored me"))); + } + + [Test, TestCase(5, 5), TestCase(5, 10), TestCase(10, 5), Description("Tests breaking after a fixed number of items")] + public void TestBreakerFixed(int nestDepth, int flattenDepth) + { + //Setup + Base rootObject = new() { id = "0" }; + Base lastNode = rootObject; + for (int i = 1; i < nestDepth; i++) + { + Base newNode = new() { id = $"{i}" }; + lastNode["child"] = newNode; + lastNode = newNode; + } + + //Flatten + int counter = 0; + var ret = rootObject.Flatten(_ => ++counter >= flattenDepth).ToList(); + + //Test + Assert.That(ret, Has.Count.EqualTo(Math.Min(flattenDepth, nestDepth))); + Assert.That(ret, Is.Unique); + } + + [Test, Timeout(2000), Description("Tests that the flatten function does not get stuck on circular references")] + public void TestCircularReference() + { + //Setup + Base objectA = new() { id = "a" }; + Base objectB = new() { id = "b" }; + Base objectC = new() { id = "c" }; + + objectA["child"] = objectB; + objectB["child"] = objectC; + objectC["child"] = objectA; + + //Flatten + var ret = objectA.Flatten().ToList(); + + //Test + Assert.That(ret, Is.Unique); + Assert.That(ret, Is.EquivalentTo(new[] { objectA, objectB, objectC })); + Assert.That(ret, Has.Count.EqualTo(3)); + } + + [Test, Description("Tests that the flatten function correctly handles (non circular) duplicates")] + public void TestDuplicates() + { + //Setup + Base objectA = new() { id = "a" }; + Base objectB = new() { id = "b" }; + + objectA["child1"] = objectB; + objectA["child2"] = objectB; + + //Flatten + var ret = objectA.Flatten().ToList(); + + //Test + Assert.That(ret, Is.Unique); + Assert.That(ret, Is.EquivalentTo(new[] { objectA, objectB })); + Assert.That(ret, Has.Count.EqualTo(2)); + } +} diff --git a/tests/Speckle.Core.Tests.Unit/Models/UtilitiesTests.cs b/tests/Speckle.Core.Tests.Unit/Models/UtilitiesTests.cs new file mode 100644 index 00000000..07176d1b --- /dev/null +++ b/tests/Speckle.Core.Tests.Unit/Models/UtilitiesTests.cs @@ -0,0 +1,71 @@ +using NUnit.Framework; +using Speckle.Core.Helpers; + +namespace Speckle.Core.Tests.Unit.Models; + +[TestFixture(TestOf = typeof(Crypt))] +public sealed class UtilitiesTests +{ + [Test] + [TestOf(nameof(Crypt.Md5))] + [TestCase("WnAbz1hCznVmDh1", "ad48ff1e60ea2369de178aaab2fa99af")] + [TestCase("wQKrSUzBB7FI1o6", "2424cff4a88055b149e5ff2aaf0b3131")] + public void Md5(string input, string expected) + { + var lower = Crypt.Md5(input, "x2"); + var upper = Crypt.Md5(input, "X2"); + Assert.That(lower, Is.EqualTo(expected.ToLower())); + Assert.That(upper, Is.EqualTo(expected.ToUpper())); + } + + [TestCase("fxFB14cBcXvoENN", "887db9349afa455f957a95f9dbacbb3c10697749cf4d4afc5c6398932a596fbc")] + [TestCase("tgWsOH8frdAwJT7", "e486224ded0dcb1452d69d0d005a6dcbc52087f6e8c66e04803e1337a192abb4")] + [TestOf(nameof(Crypt.Sha256))] + public void Sha256(string input, string expected) + { + var lower = Crypt.Sha256(input, "x2"); + var upper = Crypt.Sha256(input, "X2"); + Assert.That(lower, Is.EqualTo(expected.ToLower())); + Assert.That(upper, Is.EqualTo(expected.ToUpper())); + } + + [Test] + public void FlattenToNativeConversion() + { + var singleObject = new object(); + var nestedObjects = new List() + { + new List() + { + new(), // obj 1 + new() // obj 2 + }, + new() // obj 3 + }; + + var testEnum = new List() { new(), new() }.Select(o => o); + + var nestedObjectsWithEnumerableInherited = new List() + { + new List() + { + new(), // obj 1 + new(), // obj 2 + testEnum // obj 3 + }, + new() // obj 4 + }; + + var parentTestEnumFlattened = Core.Models.Utilities.FlattenToHostConversionResult(testEnum); + var singleObjectFlattened = Core.Models.Utilities.FlattenToHostConversionResult(singleObject); + var nestedObjectsFlattened = Core.Models.Utilities.FlattenToHostConversionResult(nestedObjects); + var nestedObjectsWithEnumerableInheritedFlattened = Core.Models.Utilities.FlattenToHostConversionResult( + nestedObjectsWithEnumerableInherited + ); + + Assert.That(parentTestEnumFlattened.Count, Is.EqualTo(1)); + Assert.That(singleObjectFlattened.Count, Is.EqualTo(1)); + Assert.That(nestedObjectsFlattened.Count, Is.EqualTo(3)); + Assert.That(nestedObjectsWithEnumerableInheritedFlattened.Count, Is.EqualTo(4)); + } +} diff --git a/tests/Speckle.Core.Tests.Unit/Serialisation/ObjectModelDeprecationTests.cs b/tests/Speckle.Core.Tests.Unit/Serialisation/ObjectModelDeprecationTests.cs new file mode 100644 index 00000000..ed8a9aef --- /dev/null +++ b/tests/Speckle.Core.Tests.Unit/Serialisation/ObjectModelDeprecationTests.cs @@ -0,0 +1,35 @@ +using NUnit.Framework; +using Speckle.Core.Models; +using Speckle.Core.Serialisation.Deprecated; +using Speckle.Core.Serialisation.SerializationUtilities; + +namespace Speckle.Core.Tests.Unit.Serialisation +{ + [TestFixture] + [TestOf(typeof(BaseObjectSerializationUtilities))] + public class ObjectModelDeprecationTests + { + [Test] + public void GetDeprecatedAtomicType() + { + string destinationType = $"Speckle.Speckle.Core.Serialisation.{nameof(MySpeckleBase)}"; + + var result = BaseObjectSerializationUtilities.GetAtomicType(destinationType); + Assert.That(result, Is.EqualTo(typeof(MySpeckleBase))); + } + + [Test] + [TestCase("Objects.Geometry.Mesh", "Objects.Geometry.Deprecated.Mesh")] + [TestCase("Objects.Mesh", "Objects.Deprecated.Mesh")] + public void GetDeprecatedTypeName(string input, string expected) + { + var actual = BaseObjectSerializationUtilities.GetDeprecatedTypeName(input); + Assert.That(actual, Is.EqualTo(expected)); + } + } +} + +namespace Speckle.Core.Serialisation.Deprecated +{ + public class MySpeckleBase : Base { } +} diff --git a/tests/Speckle.Core.Tests.Unit/Serialisation/SerializerBreakingChanges.cs b/tests/Speckle.Core.Tests.Unit/Serialisation/SerializerBreakingChanges.cs new file mode 100644 index 00000000..0fc9549f --- /dev/null +++ b/tests/Speckle.Core.Tests.Unit/Serialisation/SerializerBreakingChanges.cs @@ -0,0 +1,47 @@ +using NUnit.Framework; +using Speckle.Core.Serialisation; + +namespace Speckle.Core.Tests.Unit.Serialisation; + +/// +/// Test fixture that documents what property typing changes break backwards/cross/forwards compatibility, and are "breaking" changes. +/// This doesn't guarantee things work this way for SpecklePy +/// Nor does it encompass other tricks (like deserialize callback, or computed json ignored properties) +/// +[TestFixture] +[Description( + "For certain types, changing property from one type to another is a breaking change, and not backwards/forwards compatible" +)] +public class SerializerBreakingChanges : PrimitiveTestFixture +{ + [Test] + public void StringToInt_ShouldThrow() + { + var from = new StringValueMock { value = "testValue" }; + + Assert.Throws(() => from.SerializeAsTAndDeserialize()); + } + + [Test, TestCaseSource(nameof(MyEnums))] + public void StringToEnum_ShouldThrow(MyEnum testCase) + { + var from = new StringValueMock { value = testCase.ToString() }; + + Assert.Throws(() => + { + var res = from.SerializeAsTAndDeserialize(); + }); + } + + [ + Test, + Description("Deserialization of a JTokenType.Float to a .NET short/int/long should throw exception"), + TestCaseSource(nameof(Float64TestCases)), + TestCase(1e+30) + ] + public void DoubleToInt_ShouldThrow(double testCase) + { + var from = new DoubleValueMock { value = testCase }; + Assert.Throws(() => from.SerializeAsTAndDeserialize()); + } +} diff --git a/tests/Speckle.Core.Tests.Unit/Serialisation/SerializerNonBreakingChanges.cs b/tests/Speckle.Core.Tests.Unit/Serialisation/SerializerNonBreakingChanges.cs new file mode 100644 index 00000000..ef2c5e50 --- /dev/null +++ b/tests/Speckle.Core.Tests.Unit/Serialisation/SerializerNonBreakingChanges.cs @@ -0,0 +1,292 @@ +using System.DoubleNumerics; +using System.Drawing; +using NUnit.Framework; +using Speckle.Core.Api; +using Speckle.Core.Helpers; +using Speckle.Core.Models; + +namespace Speckle.Core.Tests.Unit.Serialisation; + +/// +/// Test fixture that documents what property typing changes maintain backwards/cross/forwards compatibility, and are "non-breaking" changes. +/// This doesn't guarantee things work this way for SpecklePy +/// Nor does it encompass other tricks (like deserialize callback, or computed json ignored properties) +/// +[TestFixture] +[Description("For certain types, changing property from one type to another should be implicitly backwards compatible")] +public class SerializerNonBreakingChanges : PrimitiveTestFixture +{ + [Test, TestCaseSource(nameof(Int8TestCases)), TestCaseSource(nameof(Int32TestCases))] + public void IntToColor(int argb) + { + var from = new IntValueMock { value = argb }; + + var res = from.SerializeAsTAndDeserialize(); + Assert.That(res.value.ToArgb(), Is.EqualTo(argb)); + } + + [Test, TestCaseSource(nameof(Int8TestCases)), TestCaseSource(nameof(Int32TestCases))] + public void ColorToInt(int argb) + { + var from = new ColorValueMock { value = Color.FromArgb(argb) }; + + var res = from.SerializeAsTAndDeserialize(); + Assert.That(res.value, Is.EqualTo(argb)); + } + + [ + Test, + TestCaseSource(nameof(Int8TestCases)), + TestCaseSource(nameof(Int32TestCases)), + TestCaseSource(nameof(Int64TestCases)) + ] + public void IntToDouble(long testCase) + { + var from = new IntValueMock { value = testCase }; + + var res = from.SerializeAsTAndDeserialize(); + Assert.That(res.value, Is.EqualTo(testCase)); + } + + [ + Test, + TestCaseSource(nameof(Int8TestCases)), + TestCaseSource(nameof(Int32TestCases)), + TestCaseSource(nameof(Int64TestCases)) + ] + public void IntToString(long testCase) + { + var from = new IntValueMock { value = testCase }; + + var res = from.SerializeAsTAndDeserialize(); + Assert.That(res.value, Is.EqualTo(testCase.ToString())); + } + + private static readonly double[][] s_arrayTestCases = + { + Array.Empty(), + new double[] { 0, 1, int.MaxValue, int.MinValue }, + new[] { default, double.Epsilon, double.MaxValue, double.MinValue } + }; + + [Test, TestCaseSource(nameof(s_arrayTestCases))] + public void ArrayToList(double[] testCase) + { + var from = new ArrayDoubleValueMock { value = testCase }; + + var res = from.SerializeAsTAndDeserialize(); + Assert.That(res.value, Is.EquivalentTo(testCase)); + } + + [Test, TestCaseSource(nameof(s_arrayTestCases))] + public void ListToArray(double[] testCase) + { + var from = new ListDoubleValueMock { value = testCase.ToList() }; + + var res = from.SerializeAsTAndDeserialize(); + Assert.That(res.value, Is.EquivalentTo(testCase)); + } + + [Test, TestCaseSource(nameof(MyEnums))] + public void EnumToInt(MyEnum testCase) + { + var from = new EnumValueMock { value = testCase }; + + var res = from.SerializeAsTAndDeserialize(); + Assert.That(res.value, Is.EqualTo((int)testCase)); + } + + [Test, TestCaseSource(nameof(MyEnums))] + public void IntToEnum(MyEnum testCase) + { + var from = new IntValueMock { value = (int)testCase }; + + var res = from.SerializeAsTAndDeserialize(); + Assert.That(res.value, Is.EqualTo(testCase)); + } + + [Test] + [TestCaseSource(nameof(Float64TestCases))] + [TestCaseSource(nameof(Float32TestCases))] + public void DoubleToDouble(double testCase) + { + var from = new DoubleValueMock { value = testCase }; + + var res = from.SerializeAsTAndDeserialize(); + Assert.That(res.value, Is.EqualTo(testCase)); + } + + [Test] + [TestCase(123, 255)] + [TestCase(256, 1)] + [TestCase(256, float.MinValue)] + public void ListToMatrix64(int seed, double scalar) + { + Random rand = new(seed); + List testCase = Enumerable.Range(0, 16).Select(_ => rand.NextDouble() * scalar).ToList(); + + ListDoubleValueMock from = new() { value = testCase, }; + + //Test List -> Matrix + var res = from.SerializeAsTAndDeserialize(); + Assert.That(res.value.M11, Is.EqualTo(testCase[0])); + Assert.That(res.value.M44, Is.EqualTo(testCase[^1])); + + //Test Matrix -> List + var backAgain = res.SerializeAsTAndDeserialize(); + Assert.That(backAgain.value, Is.Not.Null); + Assert.That(backAgain.value, Is.EquivalentTo(testCase)); + } + + [Test] + [TestCase(123, 255)] + [TestCase(256, 1)] + [DefaultFloatingPointTolerance(Constants.EPS)] + public void Matrix32ToMatrix64(int seed, float scalar) + { + Random rand = new(seed); + List testCase = Enumerable.Range(0, 16).Select(_ => rand.NextDouble() * scalar).ToList(); + + ListDoubleValueMock from = new() { value = testCase, }; + + //Test List -> Matrix + var res = from.SerializeAsTAndDeserialize(); + Assert.That(res.value.M11, Is.EqualTo(testCase[0])); + Assert.That(res.value.M44, Is.EqualTo(testCase[^1])); + + //Test Matrix -> List + var backAgain = res.SerializeAsTAndDeserialize(); + Assert.That(backAgain.value, Is.Not.Null); + Assert.That(backAgain.value, Is.EquivalentTo(testCase)); + } +} + +public class TValueMock : SerializerMock +{ + public T value { get; set; } +} + +public class ListDoubleValueMock : SerializerMock +{ + public List value { get; set; } +} + +public class ArrayDoubleValueMock : SerializerMock +{ + public double[] value { get; set; } +} + +public class IntValueMock : SerializerMock +{ + public long value { get; set; } +} + +public class StringValueMock : SerializerMock +{ + public string value { get; set; } +} + +public class DoubleValueMock : SerializerMock +{ + public double value { get; set; } +} + +public class Matrix64ValueMock : SerializerMock +{ + public Matrix4x4 value { get; set; } +} + +public class Matrix32ValueMock : SerializerMock +{ + public System.Numerics.Matrix4x4 value { get; set; } +} + +public class ColorValueMock : SerializerMock +{ + public Color value { get; set; } +} + +public class EnumValueMock : SerializerMock +{ + public MyEnum value { get; set; } +} + +public enum MyEnum +{ + Zero, + One, + Two, + Three, + Neg = -1, + Min = int.MinValue, + Max = int.MaxValue +} + +public abstract class SerializerMock : Base +{ + private string _speckle_type; + + protected SerializerMock() + { + _speckle_type = base.speckle_type; + } + + public override string speckle_type => _speckle_type; + + public void SerializeAs() + where T : Base, new() + { + T target = new(); + _speckle_type = target.speckle_type; + } + + internal TTo SerializeAsTAndDeserialize() + where TTo : Base, new() + { + SerializeAs(); + + var json = Operations.Serialize(this); + + Base result = Operations.Deserialize(json); + Assert.That(result, Is.Not.Null); + Assert.That(result, Is.TypeOf()); + return (TTo)result; + } +} + +public abstract class PrimitiveTestFixture +{ + public static readonly sbyte[] Int8TestCases = { default, sbyte.MaxValue, sbyte.MinValue }; + public static readonly short[] Int16TestCases = { short.MaxValue, short.MinValue }; + public static readonly int[] Int32TestCases = { int.MinValue, int.MaxValue }; + public static readonly long[] Int64TestCases = { long.MaxValue, long.MinValue }; + + public static double[] Float64TestCases { get; } = + { + default, + double.Epsilon, + double.MaxValue, + double.MinValue, + double.PositiveInfinity, + double.NegativeInfinity, + double.NaN + }; + + public static float[] Float32TestCases { get; } = + { + default, + float.Epsilon, + float.MaxValue, + float.MinValue, + float.PositiveInfinity, + float.NegativeInfinity, + float.NaN + }; + + public static Half[] Float16TestCases { get; } = + { default, Half.Epsilon, Half.MaxValue, Half.MinValue, Half.PositiveInfinity, Half.NegativeInfinity, Half.NaN }; + + public static float[] FloatIntegralTestCases { get; } = { 0, 1, int.MaxValue, int.MinValue }; + + public static MyEnum[] MyEnums { get; } = Enum.GetValues(typeof(MyEnum)).Cast().ToArray(); +} diff --git a/tests/Speckle.Core.Tests.Unit/Speckle.Core.Tests.Unit.csproj b/tests/Speckle.Core.Tests.Unit/Speckle.Core.Tests.Unit.csproj new file mode 100644 index 00000000..ebcaf9d4 --- /dev/null +++ b/tests/Speckle.Core.Tests.Unit/Speckle.Core.Tests.Unit.csproj @@ -0,0 +1,24 @@ + + + + net8.0 + enable + false + true + + + + + + + + + + + + + + + + + diff --git a/tests/Speckle.Core.Tests.Unit/Transports/DiskTransportTests.cs b/tests/Speckle.Core.Tests.Unit/Transports/DiskTransportTests.cs new file mode 100644 index 00000000..eb19187d --- /dev/null +++ b/tests/Speckle.Core.Tests.Unit/Transports/DiskTransportTests.cs @@ -0,0 +1,36 @@ +using NUnit.Framework; +using Speckle.Core.Transports; + +namespace Speckle.Core.Tests.Unit.Transports; + +[TestFixture] +[TestOf(nameof(DiskTransport))] +public sealed class DiskTransportTests : TransportTests +{ + protected override ITransport Sut => _diskTransport!; + + private DiskTransport _diskTransport; + + private static readonly string s_basePath = $"./temp {Guid.NewGuid()}"; + private const string APPLICATION_NAME = "Speckle Integration Tests"; + private static readonly string s_fullPath = Path.Combine(s_basePath, APPLICATION_NAME); + + [SetUp] + public void Setup() + { + _diskTransport = new DiskTransport(s_fullPath); + } + + [TearDown] + public void TearDown() + { + Directory.Delete(s_basePath, true); + } + + [Test] + public void DirectoryCreated_AfterInitialization() + { + bool fileExists = Directory.Exists(s_fullPath); + Assert.That(fileExists, Is.True); + } +} diff --git a/tests/Speckle.Core.Tests.Unit/Transports/MemoryTransportTests.cs b/tests/Speckle.Core.Tests.Unit/Transports/MemoryTransportTests.cs new file mode 100644 index 00000000..e518eea5 --- /dev/null +++ b/tests/Speckle.Core.Tests.Unit/Transports/MemoryTransportTests.cs @@ -0,0 +1,20 @@ +using System.Collections.Concurrent; +using NUnit.Framework; +using Speckle.Core.Transports; + +namespace Speckle.Core.Tests.Unit.Transports; + +[TestFixture] +[TestOf(nameof(MemoryTransport))] +public sealed class MemoryTransportTests : TransportTests +{ + protected override ITransport Sut => _memoryTransport!; + + private MemoryTransport _memoryTransport; + + [SetUp] + public void Setup() + { + _memoryTransport = new MemoryTransport(new ConcurrentDictionary()); + } +} diff --git a/tests/Speckle.Core.Tests.Unit/Transports/SQLiteTransportTests.cs b/tests/Speckle.Core.Tests.Unit/Transports/SQLiteTransportTests.cs new file mode 100644 index 00000000..735536cd --- /dev/null +++ b/tests/Speckle.Core.Tests.Unit/Transports/SQLiteTransportTests.cs @@ -0,0 +1,164 @@ +using Microsoft.Data.Sqlite; +using NUnit.Framework; +using Speckle.Core.Transports; + +namespace Speckle.Core.Tests.Unit.Transports; + +[TestFixture] +[TestOf(nameof(SQLiteTransport))] +public sealed class SQLiteTransportTests : TransportTests, IDisposable +{ + protected override ITransport Sut => _sqlite!; + + private SQLiteTransport _sqlite; + + private static readonly string s_basePath = $"./temp {Guid.NewGuid()}"; + private const string APPLICATION_NAME = "Speckle Integration Tests"; + + [SetUp] + public void Setup() + { + _sqlite = new SQLiteTransport(s_basePath, APPLICATION_NAME); + } + + [TearDown] + public void TearDown() + { + _sqlite?.Dispose(); + SqliteConnection.ClearAllPools(); + Directory.Delete(s_basePath, true); + _sqlite = null; + } + + [Test] + public void DbCreated_AfterInitialization() + { + bool fileExists = File.Exists($"{s_basePath}/{APPLICATION_NAME}/Data.db"); + Assert.That(fileExists, Is.True); + } + + [Test] + [Description("Tests that an object can be updated")] + public async Task UpdateObject_AfterAdd() + { + const string PAYLOAD_ID = "MyTestObjectId"; + const string PAYLOAD_DATA = "MyTestObjectData"; + + _sqlite.SaveObject(PAYLOAD_ID, PAYLOAD_DATA); + await _sqlite.WriteComplete(); + + const string NEW_PAYLOAD = "MyEvenBetterObjectData"; + _sqlite.UpdateObject(PAYLOAD_ID, NEW_PAYLOAD); + await _sqlite.WriteComplete(); + + var result = _sqlite.GetObject(PAYLOAD_ID); + Assert.That(result, Is.EqualTo(NEW_PAYLOAD)); + } + + [Test] + [Description("Tests that updating an object that hasn't been saved previously adds the object to the DB")] + public async Task UpdateObject_WhenMissing() + { + const string PAYLOAD_ID = "MyTestObjectId"; + const string PAYLOAD_DATA = "MyTestObjectData"; + + var preUpdate = _sqlite.GetObject(PAYLOAD_ID); + Assert.That(preUpdate, Is.Null); + + _sqlite.UpdateObject(PAYLOAD_ID, PAYLOAD_DATA); + await _sqlite.WriteComplete(); + + var postUpdate = _sqlite.GetObject(PAYLOAD_ID); + Assert.That(postUpdate, Is.EqualTo(PAYLOAD_DATA)); + } + + [Test] + public void SaveAndRetrieveObject_Sync() + { + const string PAYLOAD_ID = "MyTestObjectId"; + const string PAYLOAD_DATA = "MyTestObjectData"; + + { + var preAdd = Sut.GetObject(PAYLOAD_ID); + Assert.That(preAdd, Is.Null); + } + + _sqlite.SaveObjectSync(PAYLOAD_ID, PAYLOAD_DATA); + + { + var postAdd = Sut.GetObject(PAYLOAD_ID); + Assert.That(postAdd, Is.EqualTo(PAYLOAD_DATA)); + } + } + + [Test( + Description = "Tests that it is possible to enumerate through all objects of the transport while updating them, without getting stuck in an infinite loop" + )] + [Timeout(1000)] + public void UpdateObject_WhileEnumerating() + { + //I question if this is the behaviour we want, but AccountManager.GetObjects is relying on being able to update objects while enumerating over them + const string UPDATE_STRING = "_new"; + Dictionary testData = + new() + { + { "a", "This is object a" }, + { "b", "This is object b" }, + { "c", "This is object c" }, + { "d", "This is object d" } + }; + int length = testData.Values.First().Length; + + foreach (var (key, data) in testData) + { + _sqlite.SaveObjectSync(key, data); + } + + foreach (var o in _sqlite.GetAllObjects()) + { + string newData = o + UPDATE_STRING; + string key = $"{o[length - 1]}"; + + _sqlite.UpdateObject(key, newData); + } + + //Assert that objects were updated + Assert.That(_sqlite.GetAllObjects().ToList(), Has.All.Contains(UPDATE_STRING)); + //Assert that objects were only updated once + Assert.That(_sqlite.GetAllObjects().ToList(), Has.All.Length.EqualTo(length + UPDATE_STRING.Length)); + } + + [Test] + [Repeat(10)] + [TestCase(6, 32)] + [Description( + $"Tests that the {nameof(SQLiteTransport.GetAllObjects)} function can be called concurrently from multiple threads" + )] + public void GetAllObjects_IsThreadSafe(int dataSize, int parallelism) + { + foreach (int i in Enumerable.Range(0, dataSize)) + { + _sqlite.SaveObjectSync(i.ToString(), Guid.NewGuid().ToString()); + } + + List[] results = new List[parallelism]; + Parallel.ForEach( + Enumerable.Range(0, parallelism), + i => + { + results[i] = _sqlite.GetAllObjects().ToList(); + } + ); + + foreach (var result in results) + { + Assert.That(result, Is.EquivalentTo(results[0])); + Assert.That(result, Has.Count.EqualTo(dataSize)); + } + } + + public void Dispose() + { + _sqlite?.Dispose(); + } +} diff --git a/tests/Speckle.Core.Tests.Unit/Transports/TransportTests.cs b/tests/Speckle.Core.Tests.Unit/Transports/TransportTests.cs new file mode 100644 index 00000000..067c5298 --- /dev/null +++ b/tests/Speckle.Core.Tests.Unit/Transports/TransportTests.cs @@ -0,0 +1,182 @@ +#nullable enable +using NUnit.Framework; +using Speckle.Core.Transports; +using Speckle.Newtonsoft.Json; + +namespace Speckle.Core.Tests.Unit.Transports; + +[TestFixture] +public abstract class TransportTests +{ + protected abstract ITransport Sut { get; } + + [Test] + public async Task SaveAndRetrieveObject() + { + const string PAYLOAD_ID = "MyTestObjectId"; + const string PAYLOAD_DATA = "MyTestObjectData"; + + { + var preAdd = Sut.GetObject(PAYLOAD_ID); + Assert.That(preAdd, Is.Null); + } + + Sut.SaveObject(PAYLOAD_ID, PAYLOAD_DATA); + await Sut.WriteComplete(); + + { + var postAdd = Sut.GetObject(PAYLOAD_ID); + Assert.That(postAdd, Is.EqualTo(PAYLOAD_DATA)); + } + } + + [Test] + public async Task HasObject() + { + const string PAYLOAD_ID = "MyTestObjectId"; + const string PAYLOAD_DATA = "MyTestObjectData"; + + { + var preAdd = await Sut.HasObjects(new[] { PAYLOAD_ID }); + Assert.That(preAdd, Has.Exactly(1).Items); + Assert.That(preAdd, Has.No.ContainValue(true)); + Assert.That(preAdd, Contains.Key(PAYLOAD_ID)); + } + + Sut.SaveObject(PAYLOAD_ID, PAYLOAD_DATA); + await Sut.WriteComplete(); + + { + var postAdd = await Sut.HasObjects(new[] { PAYLOAD_ID }); + + Assert.That(postAdd, Has.Exactly(1).Items); + Assert.That(postAdd, Has.No.ContainValue(false)); + Assert.That(postAdd, Contains.Key(PAYLOAD_ID)); + } + } + + [Test] + [Description("Test that transports save objects when many threads are concurrently saving data")] + public async Task SaveObject_ConcurrentWrites() + { + const int TEST_DATA_COUNT = 100; + List<(string id, string data)> testData = Enumerable + .Range(0, TEST_DATA_COUNT) + .Select(_ => (Guid.NewGuid().ToString(), Guid.NewGuid().ToString())) + .ToList(); + + Parallel.ForEach( + testData, + x => + { + Sut.SaveObject(x.id, x.data); + } + ); + + await Sut.WriteComplete(); + + //Test 1. SavedObjectCount //WARN: FAIL!!! seems this is not implemented for SQLite Transport + //Assert.That(transport.SavedObjectCount, Is.EqualTo(testDataCount)); + + //Test 2. HasObjects + var ids = testData.Select(x => x.id).ToList(); + var hasObjectsResult = await Sut.HasObjects(ids); + + Assert.That(hasObjectsResult, Does.Not.ContainValue(false)); + Assert.That(hasObjectsResult.Keys, Is.EquivalentTo(ids)); + + //Test 3. GetObjects + foreach (var x in testData) + { + var res = Sut.GetObject(x.id); + Assert.That(res, Is.EqualTo(x.data)); + } + } + + [Test] + public void SaveObject_FromTransport_FailsPredictably() + { + var exception = Assert.Throws(() => Sut.SaveObject("non-existent-id", Sut)); + Assert.That(exception?.Transport, Is.EqualTo(Sut)); + } + + [Test] + public async Task ProgressAction_Called_OnSaveObject() + { + bool wasCalled = false; + Sut.OnProgressAction = (_, _) => wasCalled = true; + + Sut.SaveObject("12345", "fake payload data"); + + await Sut.WriteComplete(); + + Assert.That(wasCalled, Is.True); + } + + [Test] + public void ToString_IsNotEmpty() + { + var toString = Sut.ToString(); + + Assert.That(toString, Is.Not.Null); + Assert.That(toString, Is.Not.Empty); + } + + [Test] + public void TransportName_IsNotEmpty() + { + var toString = Sut.TransportName; + + Assert.That(toString, Is.Not.Null); + Assert.That(toString, Is.Not.Empty); + } + + [Test] + public void SaveObject_ExceptionThrown_TaskIsCanceled() + { + using CancellationTokenSource tokenSource = new(); + Sut.CancellationToken = tokenSource.Token; + + tokenSource.Cancel(); + + Assert.CatchAsync(async () => + { + Sut.SaveObject("abcdef", "fake payload data"); + await Sut.WriteComplete(); + }); + } + + [Test] + public async Task CopyObjectAndChildren() + { + //Assemble + const int TEST_DATA_COUNT = 100; + List<(string id, string data)> testData = Enumerable + .Range(0, TEST_DATA_COUNT) + .Select(_ => (Guid.NewGuid().ToString(), Guid.NewGuid().ToString())) + .ToList(); + + foreach (var x in testData) + { + Sut.SaveObject(x.id, x.data); + } + + var parent = JsonConvert.SerializeObject( + new TransportHelpers.Placeholder() { __closure = testData.Select(x => x.id).ToDictionary(x => x, _ => 1) } + ); + Sut.SaveObject("root", parent); + + await Sut.WriteComplete(); + + // Act + MemoryTransport destination = new(); + await Sut.CopyObjectAndChildren("root", destination); + + //Assert + foreach (var (expectedId, expectedData) in testData) + { + var actual = destination.GetObject(expectedId); + Assert.That(actual, Is.EqualTo(expectedData)); + } + } +} diff --git a/tests/Speckle.Core.Tests.Unit/packages.lock.json b/tests/Speckle.Core.Tests.Unit/packages.lock.json new file mode 100644 index 00000000..bbeb29fb --- /dev/null +++ b/tests/Speckle.Core.Tests.Unit/packages.lock.json @@ -0,0 +1,428 @@ +{ + "version": 2, + "dependencies": { + "net8.0": { + "JunitXml.TestLogger": { + "type": "Direct", + "requested": "[3.0.124, )", + "resolved": "3.0.124", + "contentHash": "QTZhSNm/xjj24W1yterf6eABv6KO+Y9jBqpau5RzPehdXTXkZcGQaLf/i50nTl+qnSwpbKkrC+bSyvLRE1ZNAg==" + }, + "Microsoft.NET.Test.Sdk": { + "type": "Direct", + "requested": "[17.10.0, )", + "resolved": "17.10.0", + "contentHash": "0/2HeACkaHEYU3wc83YlcD2Fi4LMtECJjqrtvw0lPi9DCEa35zSPt1j4fuvM8NagjDqJuh1Ja35WcRtn1Um6/A==", + "dependencies": { + "Microsoft.CodeCoverage": "17.10.0", + "Microsoft.TestPlatform.TestHost": "17.10.0" + } + }, + "NUnit": { + "type": "Direct", + "requested": "[4.1.0, )", + "resolved": "4.1.0", + "contentHash": "MT/DpAhjtiytzhTgTqIhBuWx4y26PKfDepYUHUM+5uv4TsryHC2jwFo5e6NhWkApCm/G6kZ80dRjdJFuAxq3rg==" + }, + "NUnit3TestAdapter": { + "type": "Direct", + "requested": "[4.5.0, )", + "resolved": "4.5.0", + "contentHash": "s8JpqTe9bI2f49Pfr3dFRfoVSuFQyraTj68c3XXjIS/MRGvvkLnrg6RLqnTjdShX+AdFUCCU/4Xex58AdUfs6A==" + }, + "PolySharp": { + "type": "Direct", + "requested": "[1.14.1, )", + "resolved": "1.14.1", + "contentHash": "mOOmFYwad3MIOL14VCjj02LljyF1GNw1wP0YVlxtcPvqdxjGGMNdNJJxHptlry3MOd8b40Flm8RPOM8JOlN2sQ==" + }, + "GraphQL.Client.Abstractions": { + "type": "Transitive", + "resolved": "6.0.0", + "contentHash": "h7uzWFORHZ+CCjwr/ThAyXMr0DPpzEANDa4Uo54wqCQ+j7qUKwqYTgOrb1W40sqbvNaZm9v/X7It31SUw0maHA==", + "dependencies": { + "GraphQL.Primitives": "6.0.0" + } + }, + "GraphQL.Client.Abstractions.Websocket": { + "type": "Transitive", + "resolved": "6.0.0", + "contentHash": "Nr9bPf8gIOvLuXpqEpqr9z9jslYFJOvd0feHth3/kPqeR3uMbjF5pjiwh4jxyMcxHdr8Pb6QiXkV3hsSyt0v7A==", + "dependencies": { + "GraphQL.Client.Abstractions": "6.0.0" + } + }, + "GraphQL.Primitives": { + "type": "Transitive", + "resolved": "6.0.0", + "contentHash": "yg72rrYDapfsIUrul7aF6wwNnTJBOFvuA9VdDTQpPa8AlAriHbufeXYLBcodKjfUdkCnaiggX1U/nEP08Zb5GA==" + }, + "Microsoft.AspNetCore.Http": { + "type": "Transitive", + "resolved": "2.2.2", + "contentHash": "BAibpoItxI5puk7YJbIGj95arZueM8B8M5xT1fXBn3hb3L2G3ucrZcYXv1gXdaroLbntUs8qeV8iuBrpjQsrKw==", + "dependencies": { + "Microsoft.AspNetCore.Http.Abstractions": "2.2.0", + "Microsoft.AspNetCore.WebUtilities": "2.2.0", + "Microsoft.Extensions.ObjectPool": "2.2.0", + "Microsoft.Extensions.Options": "2.2.0", + "Microsoft.Net.Http.Headers": "2.2.0" + } + }, + "Microsoft.AspNetCore.Http.Abstractions": { + "type": "Transitive", + "resolved": "2.2.0", + "contentHash": "Nxs7Z1q3f1STfLYKJSVXCs1iBl+Ya6E8o4Oy1bCxJ/rNI44E/0f6tbsrVqAWfB7jlnJfyaAtIalBVxPKUPQb4Q==", + "dependencies": { + "Microsoft.AspNetCore.Http.Features": "2.2.0", + "System.Text.Encodings.Web": "4.5.0" + } + }, + "Microsoft.AspNetCore.Http.Features": { + "type": "Transitive", + "resolved": "2.2.0", + "contentHash": "ziFz5zH8f33En4dX81LW84I6XrYXKf9jg6aM39cM+LffN9KJahViKZ61dGMSO2gd3e+qe5yBRwsesvyqlZaSMg==", + "dependencies": { + "Microsoft.Extensions.Primitives": "2.2.0" + } + }, + "Microsoft.AspNetCore.WebUtilities": { + "type": "Transitive", + "resolved": "2.2.0", + "contentHash": "9ErxAAKaDzxXASB/b5uLEkLgUWv1QbeVxyJYEHQwMaxXOeFFVkQxiq8RyfVcifLU7NR0QY0p3acqx4ZpYfhHDg==", + "dependencies": { + "Microsoft.Net.Http.Headers": "2.2.0", + "System.Text.Encodings.Web": "4.5.0" + } + }, + "Microsoft.CodeCoverage": { + "type": "Transitive", + "resolved": "17.10.0", + "contentHash": "yC7oSlnR54XO5kOuHlVOKtxomNNN1BWXX8lK1G2jaPXT9sUok7kCOoA4Pgs0qyFaCtMrNsprztYMeoEGqCm4uA==" + }, + "Microsoft.CSharp": { + "type": "Transitive", + "resolved": "4.7.0", + "contentHash": "pTj+D3uJWyN3My70i2Hqo+OXixq3Os2D1nJ2x92FFo6sk8fYS1m1WLNTs0Dc1uPaViH0YvEEwvzddQ7y4rhXmA==" + }, + "Microsoft.Data.Sqlite.Core": { + "type": "Transitive", + "resolved": "7.0.5", + "contentHash": "FTerRmQPqHrCrnoUzhBu+E+1DNGwyrAMLqHkAqOOOu5pGfyMOj8qQUBxI/gDtWtG11p49UxSfWmBzRNlwZqfUg==", + "dependencies": { + "SQLitePCLRaw.core": "2.1.4" + } + }, + "Microsoft.Extensions.DependencyInjection.Abstractions": { + "type": "Transitive", + "resolved": "2.2.0", + "contentHash": "f9hstgjVmr6rmrfGSpfsVOl2irKAgr1QjrSi3FgnS7kulxband50f2brRLwySAQTADPZeTdow0mpSMcoAdadCw==" + }, + "Microsoft.Extensions.ObjectPool": { + "type": "Transitive", + "resolved": "2.2.0", + "contentHash": "gA8H7uQOnM5gb+L0uTNjViHYr+hRDqCdfugheGo/MxQnuHzmhhzCBTIPm19qL1z1Xe0NEMabfcOBGv9QghlZ8g==" + }, + "Microsoft.Extensions.Options": { + "type": "Transitive", + "resolved": "2.2.0", + "contentHash": "UpZLNLBpIZ0GTebShui7xXYh6DmBHjWM8NxGxZbdQh/bPZ5e6YswqI+bru6BnEL5eWiOdodsXtEz3FROcgi/qg==", + "dependencies": { + "Microsoft.Extensions.DependencyInjection.Abstractions": "2.2.0", + "Microsoft.Extensions.Primitives": "2.2.0", + "System.ComponentModel.Annotations": "4.5.0" + } + }, + "Microsoft.Extensions.Primitives": { + "type": "Transitive", + "resolved": "2.2.0", + "contentHash": "azyQtqbm4fSaDzZHD/J+V6oWMFaf2tWP4WEGIYePLCMw3+b2RQdj9ybgbQyjCshcitQKQ4lEDOZjmSlTTrHxUg==", + "dependencies": { + "System.Memory": "4.5.1", + "System.Runtime.CompilerServices.Unsafe": "4.5.1" + } + }, + "Microsoft.Net.Http.Headers": { + "type": "Transitive", + "resolved": "2.2.0", + "contentHash": "iZNkjYqlo8sIOI0bQfpsSoMTmB/kyvmV2h225ihyZT33aTp48ZpF6qYnXxzSXmHt8DpBAwBTX+1s1UFLbYfZKg==", + "dependencies": { + "Microsoft.Extensions.Primitives": "2.2.0", + "System.Buffers": "4.5.0" + } + }, + "Microsoft.TestPlatform.ObjectModel": { + "type": "Transitive", + "resolved": "17.10.0", + "contentHash": "KkwhjQevuDj0aBRoPLY6OLAhGqbPUEBuKLbaCs0kUVw29qiOYncdORd4mLVJbn9vGZ7/iFGQ/+AoJl0Tu5Umdg==", + "dependencies": { + "System.Reflection.Metadata": "1.6.0" + } + }, + "Microsoft.TestPlatform.TestHost": { + "type": "Transitive", + "resolved": "17.10.0", + "contentHash": "LWpMdfqhHvcUkeMCvNYJO8QlPLlYz9XPPb+ZbaXIKhdmjAV0wqTSrTiW5FLaf7RRZT50AQADDOYMOe0HxDxNgA==", + "dependencies": { + "Microsoft.TestPlatform.ObjectModel": "17.10.0", + "Newtonsoft.Json": "13.0.1" + } + }, + "Newtonsoft.Json": { + "type": "Transitive", + "resolved": "13.0.1", + "contentHash": "ppPFpBcvxdsfUonNcvITKqLl3bqxWbDCZIzDWHzjpdAHRFfZe0Dw9HmA0+za13IdyrgJwpkDTDA9fHaxOrt20A==" + }, + "Serilog.Formatting.Compact": { + "type": "Transitive", + "resolved": "1.1.0", + "contentHash": "pNroKVjo+rDqlxNG5PXkRLpfSCuDOBY0ri6jp9PLe505ljqwhwZz8ospy2vWhQlFu5GkIesh3FcDs4n7sWZODA==", + "dependencies": { + "Serilog": "2.8.0" + } + }, + "Serilog.Sinks.File": { + "type": "Transitive", + "resolved": "5.0.0", + "contentHash": "uwV5hdhWPwUH1szhO8PJpFiahqXmzPzJT/sOijH/kFgUx+cyoDTMM8MHD0adw9+Iem6itoibbUXHYslzXsLEAg==", + "dependencies": { + "Serilog": "2.10.0" + } + }, + "Serilog.Sinks.PeriodicBatching": { + "type": "Transitive", + "resolved": "3.1.0", + "contentHash": "NDWR7m3PalVlGEq3rzoktrXikjFMLmpwF0HI4sowo8YDdU+gqPlTHlDQiOGxHfB0sTfjPA9JjA7ctKG9zqjGkw==", + "dependencies": { + "Serilog": "2.0.0" + } + }, + "SQLitePCLRaw.bundle_e_sqlite3": { + "type": "Transitive", + "resolved": "2.1.4", + "contentHash": "EWI1olKDjFEBMJu0+3wuxwziIAdWDVMYLhuZ3Qs84rrz+DHwD00RzWPZCa+bLnHCf3oJwuFZIRsHT5p236QXww==", + "dependencies": { + "SQLitePCLRaw.lib.e_sqlite3": "2.1.4", + "SQLitePCLRaw.provider.e_sqlite3": "2.1.4" + } + }, + "SQLitePCLRaw.core": { + "type": "Transitive", + "resolved": "2.1.4", + "contentHash": "inBjvSHo9UDKneGNzfUfDjK08JzlcIhn1+SP5Y3m6cgXpCxXKCJDy6Mka7LpgSV+UZmKSnC8rTwB0SQ0xKu5pA==", + "dependencies": { + "System.Memory": "4.5.3" + } + }, + "SQLitePCLRaw.lib.e_sqlite3": { + "type": "Transitive", + "resolved": "2.1.4", + "contentHash": "2C9Q9eX7CPLveJA0rIhf9RXAvu+7nWZu1A2MdG6SD/NOu26TakGgL1nsbc0JAspGijFOo3HoN79xrx8a368fBg==" + }, + "SQLitePCLRaw.provider.e_sqlite3": { + "type": "Transitive", + "resolved": "2.1.4", + "contentHash": "CSlb5dUp1FMIkez9Iv5EXzpeq7rHryVNqwJMWnpq87j9zWZexaEMdisDktMsnnrzKM6ahNrsTkjqNodTBPBxtQ==", + "dependencies": { + "SQLitePCLRaw.core": "2.1.4" + } + }, + "System.Buffers": { + "type": "Transitive", + "resolved": "4.5.0", + "contentHash": "pL2ChpaRRWI/p4LXyy4RgeWlYF2sgfj/pnVMvBqwNFr5cXg7CXNnWZWxrOONLg8VGdFB8oB+EG2Qw4MLgTOe+A==" + }, + "System.ComponentModel.Annotations": { + "type": "Transitive", + "resolved": "4.5.0", + "contentHash": "UxYQ3FGUOtzJ7LfSdnYSFd7+oEv6M8NgUatatIN2HxNtDdlcvFAf+VIq4Of9cDMJEJC0aSRv/x898RYhB4Yppg==" + }, + "System.Memory": { + "type": "Transitive", + "resolved": "4.5.3", + "contentHash": "3oDzvc/zzetpTKWMShs1AADwZjQ/36HnsufHRPcOjyRAAMLDlu2iD33MBI2opxnezcVUtXyqDXXjoFMOU9c7SA==" + }, + "System.Reactive": { + "type": "Transitive", + "resolved": "5.0.0", + "contentHash": "erBZjkQHWL9jpasCE/0qKAryzVBJFxGHVBAvgRN1bzM0q2s1S4oYREEEL0Vb+1kA/6BKb5FjUZMp5VXmy+gzkQ==" + }, + "System.Reflection.Metadata": { + "type": "Transitive", + "resolved": "1.6.0", + "contentHash": "COC1aiAJjCoA5GBF+QKL2uLqEBew4JsCkQmoHKbN3TlOZKa2fKLz5CpiRQKDz0RsAOEGsVKqOD5bomsXq/4STQ==" + }, + "System.Reflection.TypeExtensions": { + "type": "Transitive", + "resolved": "4.7.0", + "contentHash": "VybpaOQQhqE6siHppMktjfGBw1GCwvCqiufqmP8F1nj7fTUNtW35LOEt3UZTEsECfo+ELAl/9o9nJx3U91i7vA==" + }, + "System.Runtime.CompilerServices.Unsafe": { + "type": "Transitive", + "resolved": "4.5.1", + "contentHash": "Zh8t8oqolRaFa9vmOZfdQm/qKejdqz0J9kr7o2Fu0vPeoH3BL1EOXipKWwkWtLT1JPzjByrF19fGuFlNbmPpiw==" + }, + "System.Text.Encodings.Web": { + "type": "Transitive", + "resolved": "4.5.0", + "contentHash": "Xg4G4Indi4dqP1iuAiMSwpiWS54ZghzR644OtsRCm/m/lBMG8dUBhLVN7hLm8NNrNTR+iGbshCPTwrvxZPlm4g==" + }, + "speckle.core": { + "type": "Project", + "dependencies": { + "GraphQL.Client": "[6.0.0, )", + "Microsoft.CSharp": "[4.7.0, )", + "Microsoft.Data.Sqlite": "[7.0.5, )", + "Polly": "[7.2.3, )", + "Polly.Contrib.WaitAndRetry": "[1.1.1, )", + "Polly.Extensions.Http": "[3.0.0, )", + "Sentry": "[3.33.0, )", + "Sentry.Serilog": "[3.33.0, )", + "Serilog": "[2.12.0, )", + "Serilog.Enrichers.ClientInfo": "[1.3.0, )", + "Serilog.Exceptions": "[8.4.0, )", + "Serilog.Sinks.Console": "[4.1.0, )", + "Serilog.Sinks.Seq": "[5.2.2, )", + "SerilogTimings": "[3.0.1, )", + "Speckle.Newtonsoft.Json": "[13.0.2, )", + "System.DoubleNumerics": "[3.1.3, )" + } + }, + "Speckle.Transports.Disk": { + "type": "Project", + "dependencies": { + "Speckle.Core": "[1.0.0, )" + } + }, + "GraphQL.Client": { + "type": "CentralTransitive", + "requested": "[6.0.0, )", + "resolved": "6.0.0", + "contentHash": "8yPNBbuVBpTptivyAlak4GZvbwbUcjeQTL4vN1HKHRuOykZ4r7l5fcLS6vpyPyLn0x8FsL31xbOIKyxbmR9rbA==", + "dependencies": { + "GraphQL.Client.Abstractions": "6.0.0", + "GraphQL.Client.Abstractions.Websocket": "6.0.0", + "System.Reactive": "5.0.0" + } + }, + "Microsoft.Data.Sqlite": { + "type": "CentralTransitive", + "requested": "[7.0.5, )", + "resolved": "7.0.5", + "contentHash": "KGxbPeWsQMnmQy43DSBxAFtHz3l2JX8EWBSGUCvT3CuZ8KsuzbkqMIJMDOxWtG8eZSoCDI04aiVQjWuuV8HmSw==", + "dependencies": { + "Microsoft.Data.Sqlite.Core": "7.0.5", + "SQLitePCLRaw.bundle_e_sqlite3": "2.1.4" + } + }, + "Polly": { + "type": "CentralTransitive", + "requested": "[7.2.3, )", + "resolved": "7.2.3", + "contentHash": "DeCY0OFbNdNxsjntr1gTXHJ5pKUwYzp04Er2LLeN3g6pWhffsGuKVfMBLe1lw7x76HrPkLxKEFxBlpRxS2nDEQ==" + }, + "Polly.Contrib.WaitAndRetry": { + "type": "CentralTransitive", + "requested": "[1.1.1, )", + "resolved": "1.1.1", + "contentHash": "1MUQLiSo4KDkQe6nzQRhIU05lm9jlexX5BVsbuw0SL82ynZ+GzAHQxJVDPVBboxV37Po3SG077aX8DuSy8TkaA==" + }, + "Polly.Extensions.Http": { + "type": "CentralTransitive", + "requested": "[3.0.0, )", + "resolved": "3.0.0", + "contentHash": "drrG+hB3pYFY7w1c3BD+lSGYvH2oIclH8GRSehgfyP5kjnFnHKQuuBhuHLv+PWyFuaTDyk/vfRpnxOzd11+J8g==", + "dependencies": { + "Polly": "7.1.0" + } + }, + "Sentry": { + "type": "CentralTransitive", + "requested": "[3.33.0, )", + "resolved": "3.33.0", + "contentHash": "8vbD2o6IR2wrRrkSiRbnodWGWUOqIlwYtzpjvPNOb5raJdOf+zxMwfS8f6nx9bmrTTfDj7KrCB8C/5OuicAc8A==" + }, + "Sentry.Serilog": { + "type": "CentralTransitive", + "requested": "[3.33.0, )", + "resolved": "3.33.0", + "contentHash": "V8BU7QGWg2qLYfNPqtuTBhC1opysny5l+Ifp6J6PhOeAxU0FssR7nYfbJVetrnLIoh2rd3DlJ6hHYYQosQYcUQ==", + "dependencies": { + "Sentry": "3.33.0", + "Serilog": "2.10.0" + } + }, + "Serilog": { + "type": "CentralTransitive", + "requested": "[2.12.0, )", + "resolved": "2.12.0", + "contentHash": "xaiJLIdu6rYMKfQMYUZgTy8YK7SMZjB4Yk50C/u//Z4OsvxkUfSPJy4nknfvwAC34yr13q7kcyh4grbwhSxyZg==" + }, + "Serilog.Enrichers.ClientInfo": { + "type": "CentralTransitive", + "requested": "[1.3.0, )", + "resolved": "1.3.0", + "contentHash": "mTc7PM+wC9Hr7LWSwqt5mmnlAr7RJs+eTb3PGPRhwdOackk95MkhUZognuxXEdlW19HAFNmEBTSBY5DfLwM8jQ==", + "dependencies": { + "Microsoft.AspNetCore.Http": "2.2.2", + "Serilog": "2.9.0" + } + }, + "Serilog.Exceptions": { + "type": "CentralTransitive", + "requested": "[8.4.0, )", + "resolved": "8.4.0", + "contentHash": "nc/+hUw3lsdo0zCj0KMIybAu7perMx79vu72w0za9Nsi6mWyNkGXxYxakAjWB7nEmYL6zdmhEQRB4oJ2ALUeug==", + "dependencies": { + "Serilog": "2.8.0", + "System.Reflection.TypeExtensions": "4.7.0" + } + }, + "Serilog.Sinks.Console": { + "type": "CentralTransitive", + "requested": "[4.1.0, )", + "resolved": "4.1.0", + "contentHash": "K6N5q+5fetjnJPvCmkWOpJ/V8IEIoMIB1s86OzBrbxwTyHxdx3pmz4H+8+O/Dc/ftUX12DM1aynx/dDowkwzqg==", + "dependencies": { + "Serilog": "2.10.0" + } + }, + "Serilog.Sinks.Seq": { + "type": "CentralTransitive", + "requested": "[5.2.2, )", + "resolved": "5.2.2", + "contentHash": "1Csmo5ua7NKUe0yXUx+zsRefjAniPWcXFhUXxXG8pwo0iMiw2gjn9SOkgYnnxbgWqmlGv236w0N/dHc2v5XwMg==", + "dependencies": { + "Serilog": "2.12.0", + "Serilog.Formatting.Compact": "1.1.0", + "Serilog.Sinks.File": "5.0.0", + "Serilog.Sinks.PeriodicBatching": "3.1.0" + } + }, + "SerilogTimings": { + "type": "CentralTransitive", + "requested": "[3.0.1, )", + "resolved": "3.0.1", + "contentHash": "Zs28eTgszAMwpIrbBnWHBI50yuxL50p/dmAUWmy75+axdZYK/Sjm5/5m1N/CisR8acJUhTVcjPZrsB1P5iv0Uw==", + "dependencies": { + "Serilog": "2.10.0" + } + }, + "Speckle.Newtonsoft.Json": { + "type": "CentralTransitive", + "requested": "[13.0.2, )", + "resolved": "13.0.2", + "contentHash": "g1BejUZwax5PRfL6xHgLEK23sqHWOgOj9hE7RvfRRlN00AGt8GnPYt8HedSK7UB3HiRW8zCA9Pn0iiYxCK24BA==" + }, + "System.DoubleNumerics": { + "type": "CentralTransitive", + "requested": "[3.1.3, )", + "resolved": "3.1.3", + "contentHash": "KRKEM/L3KBodjA9VOg3EifFVWUY6EOqaMB05UvPEDm7Zeby/kZW+4kdWUEPzW6xtkwf46p661L9NrbeeQhtLzw==", + "dependencies": { + "NETStandard.Library": "1.6.1" + } + } + } + } +} \ No newline at end of file diff --git a/tests/Tests/Objects.Tests.Unit/GenericTests.cs b/tests/Tests/Objects.Tests.Unit/GenericTests.cs new file mode 100644 index 00000000..1252aa1c --- /dev/null +++ b/tests/Tests/Objects.Tests.Unit/GenericTests.cs @@ -0,0 +1,54 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Reflection; +using NUnit.Framework; +using Speckle.Core.Kits; +using Speckle.Core.Models; + +namespace Objects.Tests.Unit; + +[TestFixture] +public class GenericTests +{ + public static IEnumerable AvailableTypesInKit() + { + // Get all types in the Objects assembly that inherit from Base + return Assembly.GetAssembly(typeof(ObjectsKit)).GetTypes().Where(t => typeof(Base).IsAssignableFrom(t)); + } + + public static IEnumerable NonAbstractTypesInKit() + { + return AvailableTypesInKit().Where(t => !t.IsAbstract); + } + + [ + Test(Description = "Checks that all objects inside the Default Kit have empty constructors."), + TestCaseSource(nameof(NonAbstractTypesInKit)) + ] + public void ObjectHasEmptyConstructor(Type t) + { + var constructor = t.GetConstructor(Type.EmptyTypes); + Assert.That(constructor, Is.Not.Null); + } + + [ + Test( + Description = "Checks that all methods with the 'SchemaComputed' attribute inside the Default Kit have no parameters." + ), + TestCaseSource(nameof(AvailableTypesInKit)) + ] + public void SchemaComputedMethod_CanBeCalledWithNoParameters(Type t) + { + t.GetMethods() + .Where(m => m.IsDefined(typeof(SchemaComputedAttribute))) + .ToList() + .ForEach(m => + { + // Check if all parameters are optional. + // This allows for other methods to be used as long as they can be called empty. + // But also covers the basic case of having no parameters in the first place. + Assert.That(m.GetParameters().All(p => p.IsOptional), Is.True); + }); + } +} diff --git a/tests/Tests/Objects.Tests.Unit/Geometry/ArcTests.cs b/tests/Tests/Objects.Tests.Unit/Geometry/ArcTests.cs new file mode 100644 index 00000000..d5343edc --- /dev/null +++ b/tests/Tests/Objects.Tests.Unit/Geometry/ArcTests.cs @@ -0,0 +1,23 @@ +using System; +using NUnit.Framework; +using Objects.Geometry; + +namespace Objects.Tests.Unit.Geometry; + +[TestFixture, TestOf(typeof(Arc))] +public class ArcTests +{ + private Plane TestPlane => new(new Point(0, 0), new Vector(0, 0, 1), new Vector(1, 0, 0), new Vector(0, 1, 0)); + + [Test] + public void CanCreateArc_HalfCircle() + { + var arc = new Arc(TestPlane, new Point(-5, 5), new Point(5, 5), Math.PI); + + Assert.That(arc.startAngle, Is.EqualTo(0)); + Assert.That(arc.endAngle, Is.EqualTo(Math.PI)); + + Assert.That(Point.Distance(arc.midPoint, new Point(0, 0)), Is.EqualTo(0).Within(0.0001)); + Assert.That(Point.Distance(arc.plane.origin, new Point(0, 5)), Is.EqualTo(0).Within(0.0001)); + } +} diff --git a/tests/Tests/Objects.Tests.Unit/Geometry/MeshTests.cs b/tests/Tests/Objects.Tests.Unit/Geometry/MeshTests.cs new file mode 100644 index 00000000..f17f729b --- /dev/null +++ b/tests/Tests/Objects.Tests.Unit/Geometry/MeshTests.cs @@ -0,0 +1,44 @@ +using NUnit.Framework; +using Objects.Geometry; + +namespace Objects.Tests.Unit.Geometry; + +[TestFixture, TestOf(typeof(Mesh))] +public class MeshTests +{ + private static Mesh[] s_testCaseSource = { CreateBlenderStylePolygon(), CreateRhinoStylePolygon() }; + + [Test, TestCaseSource(nameof(s_testCaseSource))] + public void CanAlignVertices(Mesh inPolygon) + { + inPolygon.AlignVerticesWithTexCoordsByIndex(); + + Assert.That(inPolygon.VerticesCount, Is.EqualTo(inPolygon.TextureCoordinatesCount)); + + var expectedPolygon = CreateRhinoStylePolygon(); + + Assert.That(inPolygon.vertices, Is.EquivalentTo(expectedPolygon.vertices)); + Assert.That(inPolygon.faces, Is.EquivalentTo(expectedPolygon.faces)); + Assert.That(inPolygon.textureCoordinates, Is.EquivalentTo(expectedPolygon.textureCoordinates)); + } + + private static Mesh CreateRhinoStylePolygon() + { + return new Mesh + { + vertices = { 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 1, 0, 1, 1, 0, 0 }, + faces = { 3, 0, 1, 2, 3, 3, 4, 5 }, + textureCoordinates = { 0, 0, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0 } + }; + } + + private static Mesh CreateBlenderStylePolygon() + { + return new Mesh + { + vertices = { 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0 }, + faces = { 3, 0, 1, 2, 3, 0, 2, 3 }, + textureCoordinates = { 0, 0, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0 } + }; + } +} diff --git a/tests/Tests/Objects.Tests.Unit/Geometry/PointTests.cs b/tests/Tests/Objects.Tests.Unit/Geometry/PointTests.cs new file mode 100644 index 00000000..c66ab42c --- /dev/null +++ b/tests/Tests/Objects.Tests.Unit/Geometry/PointTests.cs @@ -0,0 +1,53 @@ +using System.Diagnostics.CodeAnalysis; +using NUnit.Framework; +using Objects.Geometry; + +namespace Objects.Tests.Unit.Geometry; + +[TestFixture, TestOf(typeof(Point))] +public class PointTests +{ + [Test] + [SuppressMessage( + "Assertion", + "NUnit2010:Use EqualConstraint for better assertion messages in case of failure", + Justification = "Need to explicitly test equality operator" + )] + public void TestNull() + { + Point a = null; + Point b = null; + Point c = new(0, 0, 0, null); + + Assert.Multiple(() => + { + Assert.That(a == b, Is.True); + Assert.That(a != b, Is.False); + Assert.That(b == a, Is.True); + Assert.That(b != a, Is.False); + + Assert.That(a == c, Is.False); + Assert.That(a != c, Is.True); + Assert.That(c == a, Is.False); + Assert.That(c != a, Is.True); + }); + } + + [Test] + [TestCase(1, 1, 1, "m", 1, 1, 1, "m", ExpectedResult = true)] + [TestCase(1, 1, 1, "m", 0, 1, 1, "m", ExpectedResult = false)] + [TestCase(1, 1, 1, "m", 1, 0, 1, "m", ExpectedResult = false)] + [TestCase(1, 1, 1, "m", 1, 1, 0, "m", ExpectedResult = false)] + [TestCase(1, 1, 1, "", 1, 1, 1, "", ExpectedResult = true)] + [TestCase(1, 1, 1, null, 1, 1, 1, null, ExpectedResult = true)] + [TestCase(1, 1, 1, "m", 1, 1, 1, "meters", ExpectedResult = false)] + [TestCase(1, 1, 1, "m", 1, 1, 1, "M", ExpectedResult = false)] + // Units + public bool TestEqual(double x1, double y1, double z1, string units1, double x2, double y2, double z2, string units2) + { + Point p1 = new(x1, y1, z1, units1); + Point p2 = new(x2, y2, z2, units2); + + return p1 == p2; + } +} diff --git a/tests/Tests/Objects.Tests.Unit/Geometry/TransformTests.cs b/tests/Tests/Objects.Tests.Unit/Geometry/TransformTests.cs new file mode 100644 index 00000000..4a58004c --- /dev/null +++ b/tests/Tests/Objects.Tests.Unit/Geometry/TransformTests.cs @@ -0,0 +1,129 @@ +using System.Collections; +using System.DoubleNumerics; +using NUnit.Framework; +using Objects.Other; +using Speckle.Core.Kits; + +namespace Objects.Tests.Unit.Geometry; + +[TestFixture, TestOf(typeof(Transform))] +public class TransformTests +{ + private const float FLOAT_TOLERANCE = 1e-6f; + + [Test, TestCaseSource(nameof(TransformTestCases))] + public void ArrayBackAndForth(Matrix4x4 data) + { + var start = new Transform(data); + var asArr = start.ToArray(); + var end = new Transform(asArr); + + Assert.That(end.matrix, Is.EqualTo(data)); + } + + [Test, TestCaseSource(nameof(TransformTestCases))] + public void ConvertToUnits(Matrix4x4 data) + { + const float SF = 1000f; + + var transpose = Matrix4x4.Transpose(data); //NOTE: Transform expects matrices transposed (translation in column 4) + var mm = Matrix4x4.Transpose( + Transform.CreateMatrix(new Transform(transpose, Units.Meters).ConvertToUnits(Units.Millimeters)) + ); + + Matrix4x4.Decompose(data, out var ms, out var mr, out var mt); + Matrix4x4.Decompose(mm, out var mms, out var mmr, out var mmt); + + Assert.Multiple(() => + { + Assert.That(mms.X, Is.EqualTo(ms.X).Within(FLOAT_TOLERANCE), "Expect scale x to be unchanged"); + Assert.That(mms.Y, Is.EqualTo(ms.Y).Within(FLOAT_TOLERANCE), "Expect scale y to be unchanged"); + Assert.That(mms.Z, Is.EqualTo(ms.Z).Within(FLOAT_TOLERANCE), "Expect scale z to be unchanged"); + + Assert.That(Quaternion.Dot(mr, mmr), Is.LessThan(1).Within(FLOAT_TOLERANCE), "Expect rot x to be equivalent"); + + Assert.That(mmt.X, Is.EqualTo(mt.X * SF).Within(FLOAT_TOLERANCE), $"Expect translation x to be scaled by {SF}"); + Assert.That(mmt.Y, Is.EqualTo(mt.Y * SF).Within(FLOAT_TOLERANCE), $"Expect translation y to be scaled by {SF}"); + Assert.That(mmt.Z, Is.EqualTo(mt.Z * SF).Within(FLOAT_TOLERANCE), $"Expect translation z to be scaled by {SF}"); + }); + } + + [ + Test(Description = "Tests that Transform decompose matches the behaviour of Matrix4x4"), + TestCaseSource(nameof(TransformTestCases)) + ] + public void Decompose(Matrix4x4 data) + { + var transpose = Matrix4x4.Transpose(data); //NOTE: Transform expects matrices transposed (translation in column 4) + var sut = new Transform(transpose); + + sut.Decompose(out var s, out var r, out var t); + var actual = new Vector3(t.X, t.Y, t.Z); + Matrix4x4.Decompose(data, out var expectedS, out var expectedR, out var expectedT); + + Assert.Multiple(() => + { + Assert.That(s.X, Is.EqualTo(expectedS.X).Within(FLOAT_TOLERANCE), "Expect scale x to be unchanged"); + Assert.That(s.Y, Is.EqualTo(expectedS.Y).Within(FLOAT_TOLERANCE), "Expect scale y to be unchanged"); + Assert.That(s.Z, Is.EqualTo(expectedS.Z).Within(FLOAT_TOLERANCE), "Expect scale z to be unchanged"); + + Assert.That( + Quaternion.Dot(r, expectedR), + Is.LessThan(1).Within(FLOAT_TOLERANCE), + "Expect rot x to be equivalent" + ); + + Assert.That(actual.X, Is.EqualTo(expectedT.X).Within(FLOAT_TOLERANCE), "Expect translation x to be unchanged"); + Assert.That(actual.Y, Is.EqualTo(expectedT.Y).Within(FLOAT_TOLERANCE), "Expect translation y to be unchanged"); + Assert.That(actual.Z, Is.EqualTo(expectedT.Z).Within(FLOAT_TOLERANCE), "Expect translation z to be unchanged"); + }); + } + + /// + /// Set of TRS transforms (row dominant i.e. translation in row 4) + /// All with non-negative scale and rotation (for ease of testing scale and rot independently) + /// + /// + private static IEnumerable TransformTestCases() + { + var t = new Vector3(128.128f, 255.255f, 512.512f); + var r = Quaternion.CreateFromYawPitchRoll(1.9f, 0.6666667f, 0.5f); + var s = new Vector3(123f, 32f, 0.5f); + + yield return new TestCaseData(Matrix4x4.Identity).SetName("{m} Identity Matrix"); + + yield return new TestCaseData(Matrix4x4.CreateTranslation(t)).SetName("{m} Translation Only (positive)"); + + yield return new TestCaseData(Matrix4x4.CreateTranslation(t * -Vector3.UnitX)).SetName("{m} Translation Only -X"); + + yield return new TestCaseData(Matrix4x4.CreateTranslation(t * -Vector3.UnitY)).SetName("{m} Translation Only -Y"); + + yield return new TestCaseData(Matrix4x4.CreateTranslation(t * -Vector3.UnitZ)).SetName("{m} Translation Only -Z"); + + yield return new TestCaseData(Matrix4x4.CreateTranslation(-t)).SetName("{m} Translation Only -XYZ "); + + yield return new TestCaseData(Matrix4x4.CreateFromYawPitchRoll(0.5f, 0.0f, 0.0f)).SetName("{m} Rotation Only X "); + + yield return new TestCaseData(Matrix4x4.CreateFromYawPitchRoll(0.0f, 0.5f, 0.0f)).SetName("{m} Rotation Only Y "); + + yield return new TestCaseData(Matrix4x4.CreateFromYawPitchRoll(0.0f, 0.0f, 0.5f)).SetName("{m} Rotation Only Z "); + + yield return new TestCaseData(Matrix4x4.CreateFromYawPitchRoll(0.5f, 0.5f, 0.5f)).SetName("{m} Rotation Only XYZ "); + + yield return new TestCaseData(Matrix4x4.CreateFromQuaternion(r)).SetName("{m} Rotation Only"); + + yield return new TestCaseData(Matrix4x4.Identity + Matrix4x4.CreateScale(s)).SetName("{m} Scale Only"); + + yield return new TestCaseData(Matrix4x4.CreateTranslation(t) + Matrix4x4.CreateFromQuaternion(r)).SetName( + "{m} Translation + Rotation" + ); + + yield return new TestCaseData( + Matrix4x4.CreateTranslation(t) + Matrix4x4.CreateFromQuaternion(r) + Matrix4x4.CreateScale(s) + ).SetName("{m} Translation + Rotation + Scale"); + + yield return new TestCaseData( + Matrix4x4.CreateTranslation(t) + Matrix4x4.CreateFromQuaternion(r) + Matrix4x4.CreateScale(-s) + ).SetName("{m} Translation + Rotation + -Scale"); + } +} diff --git a/tests/Tests/Objects.Tests.Unit/NUnit_Fixtures.cs b/tests/Tests/Objects.Tests.Unit/NUnit_Fixtures.cs new file mode 100644 index 00000000..cd01db24 --- /dev/null +++ b/tests/Tests/Objects.Tests.Unit/NUnit_Fixtures.cs @@ -0,0 +1,22 @@ +using NUnit.Framework; +using Speckle.Core.Logging; + +namespace Objects.Tests.Unit; + +[SetUpFixture] +public class NUnitFixtures +{ + [OneTimeSetUp] + public void RunBeforeAnyTests() + { + SpeckleLog.Initialize( + "ObjectsTests", + "Testing", + new SpeckleLogConfiguration(logToConsole: false, logToFile: false, logToSeq: false) + ); + SpeckleLog.Logger.Information("Initialized logger for testing"); + } + + [OneTimeTearDown] + public void RunAfterAnyTests() { } +} diff --git a/tests/Tests/Objects.Tests.Unit/Objects.Tests.Unit.csproj b/tests/Tests/Objects.Tests.Unit/Objects.Tests.Unit.csproj new file mode 100644 index 00000000..808077f3 --- /dev/null +++ b/tests/Tests/Objects.Tests.Unit/Objects.Tests.Unit.csproj @@ -0,0 +1,40 @@ + + + + net7.0 + false + true + + + + true + + + + + + + + + + + + + + + + + all + runtime; build; native; contentfiles; analyzers; buildtransitive + + + all + runtime; build; native; contentfiles; analyzers; buildtransitive + + + + + + + + diff --git a/tests/Tests/Objects.Tests.Unit/Utils/MeshTriangulationHelperTests.cs b/tests/Tests/Objects.Tests.Unit/Utils/MeshTriangulationHelperTests.cs new file mode 100644 index 00000000..0a47c077 --- /dev/null +++ b/tests/Tests/Objects.Tests.Unit/Utils/MeshTriangulationHelperTests.cs @@ -0,0 +1,92 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using NUnit.Framework; +using Objects.Geometry; +using Objects.Utils; + +namespace Objects.Tests.Unit.Utils; + +[TestFixture, TestOf(typeof(MeshTriangulationHelper))] +public class MeshTriangulationHelperTests +{ + [Test] + public void PolygonTest([Range(3, 9)] int n, [Values] bool planar) + { + //Test Setup + List vertices = new(n) { 0, planar ? 0 : 1, 1 }; + for (int i = 1; i < n; i++) + { + vertices.Add(i); + vertices.Add(0); + vertices.Add(0); + } + + List faces = new(n + 1) { n }; + faces.AddRange(Enumerable.Range(0, n)); + + Mesh mesh = new(vertices, faces); + + //Test + mesh.TriangulateMesh(); + + //Results + int numExpectedTriangles = n - 2; + int expectedFaceCount = numExpectedTriangles * 4; + + Assert.That(mesh.faces, Has.Count.EqualTo(expectedFaceCount)); + for (int i = 0; i < expectedFaceCount; i += 4) + { + Assert.That(mesh.faces[i], Is.EqualTo(3)); + Assert.That(mesh.faces.GetRange(i + 1, 3), Is.Unique); + } + + Assert.That(mesh.faces, Is.SupersetOf(Enumerable.Range(0, n))); + + Assert.That(mesh.faces, Is.All.GreaterThanOrEqualTo(0)); + Assert.That(mesh.faces, Is.All.LessThan(Math.Max(n, 4))); + } + + [Test] + public void DoesntFlipNormals() + { + //Test Setup + List vertices = new() { 0, 0, 0, 1, 0, 0, 1, 0, 1 }; + + List faces = new() { 3, 0, 1, 2 }; + + Mesh mesh = new(vertices, new List(faces)); + + //Test + mesh.TriangulateMesh(); + + //Results + + List shift1 = faces; + List shift2 = new() { 3, 1, 2, 0 }; + List shift3 = new() { 3, 2, 0, 1 }; + + Assert.That(mesh.faces, Is.AnyOf(shift1, shift2, shift3)); + } + + [Test] + public void PreserveQuads([Values] bool preserveQuads) + { + //Test Setup + List vertices = new() { 0, 0, 0, 1, 0, 0, 1, 0, 1, 0, 0, 1 }; + + List faces = new() { 4, 0, 1, 2, 3 }; + + Mesh mesh = new(vertices, new List(faces)); + + //Test + mesh.TriangulateMesh(preserveQuads); + + //Results + int expectedN = preserveQuads ? 4 : 3; + int expectedFaceCount = preserveQuads ? 5 : 8; + + Assert.That(mesh.faces, Has.Count.EqualTo(expectedFaceCount)); + Assert.That(mesh.faces[0], Is.EqualTo(expectedN)); + } +} diff --git a/tests/Tests/Objects.Tests.Unit/Utils/ShallowCopyTests.cs b/tests/Tests/Objects.Tests.Unit/Utils/ShallowCopyTests.cs new file mode 100644 index 00000000..20550a2a --- /dev/null +++ b/tests/Tests/Objects.Tests.Unit/Utils/ShallowCopyTests.cs @@ -0,0 +1,26 @@ +using System.Collections; +using System.Collections.Generic; +using NUnit.Framework; +using Objects.BuiltElements; +using Objects.Geometry; +using Speckle.Core.Kits; + +namespace Objects.Tests.Unit.Utils; + +[TestFixture] +public class ShallowCopyTests +{ + [Test] + public void CanShallowCopy_Wall() + { + var wall = new Wall(5, new Line(new Point(0, 0), new Point(3, 0))) + { + units = Units.Meters, + displayValue = new List { new(), new() } + }; + + var shallow = wall.ShallowCopy(); + var displayValue = (IList)shallow["displayValue"]; + Assert.That(wall.displayValue, Has.Count.EqualTo(displayValue.Count)); + } +}