From 5f2dd97064c73185de6e5ed3a96f0a4b5aebc7fb Mon Sep 17 00:00:00 2001 From: rogeralsing Date: Tue, 28 Apr 2015 08:14:20 +0200 Subject: [PATCH 01/66] Removed Reentrancy behavior --- .../Akka.Tests/Dispatch/AsyncAwaitSpec.cs | 38 ++++--------------- .../Akka/Actor/ActorCell.DefaultMessages.cs | 22 ----------- src/core/Akka/Actor/ReceiveActor.cs | 12 +----- src/core/Akka/Actor/UntypedActor.cs | 8 ++-- src/core/Akka/Dispatch/ActorTaskScheduler.cs | 31 +++++---------- .../Akka/Dispatch/SysMsg/ISystemMessage.cs | 32 ---------------- 6 files changed, 22 insertions(+), 121 deletions(-) diff --git a/src/core/Akka.Tests/Dispatch/AsyncAwaitSpec.cs b/src/core/Akka.Tests/Dispatch/AsyncAwaitSpec.cs index eb14022b53c..47d23ce82fb 100644 --- a/src/core/Akka.Tests/Dispatch/AsyncAwaitSpec.cs +++ b/src/core/Akka.Tests/Dispatch/AsyncAwaitSpec.cs @@ -8,8 +8,6 @@ using System; using System.Threading.Tasks; using Akka.Actor; -using Akka.Dispatch; -using Akka.Event; using Akka.TestKit; using Xunit; @@ -29,8 +27,6 @@ public AsyncActor() } }); } - - ILoggingAdapter Log = Context.GetLogger(); } public class SuspendActor : ReceiveActor @@ -42,7 +38,7 @@ public SuspendActor() { state = 1; }); - Receive(AsyncBehavior.Suspend, async _ => + Receive(async _ => { Self.Tell("change"); await Task.Delay(TimeSpan.FromSeconds(1)); @@ -51,21 +47,7 @@ public SuspendActor() }); } } - public class ReentrantActor : ReceiveActor - { - public ReentrantActor() - { - var state = 0; - Receive(s => s == "change", _ => state = 1); - Receive(AsyncBehavior.Reentrant, async _ => - { - Self.Tell("change"); - await Task.Delay(TimeSpan.FromSeconds(1)); - //we expect that state should have changed due to an incoming message - Sender.Tell(state); - }); - } - } + public class AsyncAwaitActor : ReceiveActor { public AsyncAwaitActor() @@ -89,7 +71,7 @@ protected override void OnReceive(object message) { if (message is string) { - RunTask(AsyncBehavior.Suspend, async () => + RunTask(async () => { var sender = Sender; var self = Self; @@ -132,7 +114,7 @@ protected override void OnReceive(object message) { if (message is string) { - RunTask(AsyncBehavior.Suspend, async () => + RunTask(async () => { var sender = Sender; var self = Self; @@ -210,7 +192,7 @@ public AsyncTplActor() Receive(m => { //this is also safe, all tasks complete in the actor context - RunTask(AsyncBehavior.Suspend, () => + RunTask(() => { Task.Delay(TimeSpan.FromSeconds(1)) .ContinueWith(t => { Sender.Tell("done"); }); @@ -228,7 +210,7 @@ public AsyncTplExceptionActor(IActorRef callback) _callback = callback; Receive(m => { - RunTask(AsyncBehavior.Suspend, () => + RunTask(() => { Task.Delay(TimeSpan.FromSeconds(1)) .ContinueWith(t => { throw new Exception("foo"); }); @@ -320,13 +302,7 @@ public void Actors_should_be_able_to_supervise_exception_ContinueWith() asker.Tell("start"); ExpectMsg("done", TimeSpan.FromSeconds(5)); } - [Fact] - public async Task Actors_should_be_able_to_reenter() - { - var asker = Sys.ActorOf(Props.Create(() => new ReentrantActor())); - var res = await asker.Ask("start",TimeSpan.FromSeconds(5)); - res.ShouldBe(1); - } + [Fact] public async Task Actors_should_be_able_to_suspend_reentrancy() diff --git a/src/core/Akka/Actor/ActorCell.DefaultMessages.cs b/src/core/Akka/Actor/ActorCell.DefaultMessages.cs index 9b2e36d4279..4d6b48e630c 100644 --- a/src/core/Akka/Actor/ActorCell.DefaultMessages.cs +++ b/src/core/Akka/Actor/ActorCell.DefaultMessages.cs @@ -174,8 +174,6 @@ public void SystemInvoke(Envelope envelope) else if (m is Recreate) FaultRecreate((m as Recreate).Cause); else if (m is Suspend) FaultSuspend(); else if (m is Resume) FaultResume((m as Resume).CausedByFailure); - else if (m is SuspendReentrancy) HandleSuspendReentrancy(); - else if (m is ResumeReentrancy) HandleResumeReentrancy(); else if (m is Terminate) Terminate(); else if (m is Supervise) { @@ -193,16 +191,6 @@ public void SystemInvoke(Envelope envelope) } } - private void HandleSuspendReentrancy() - { - Mailbox.Suspend(MailboxSuspendStatus.AwaitingTask); - } - - private void HandleResumeReentrancy() - { - Mailbox.Resume(MailboxSuspendStatus.AwaitingTask); - } - private void HandleCompleteTask(CompleteTask task) { CurrentMessage = task.State.Message; @@ -357,16 +345,6 @@ public void Suspend() SendSystemMessage(Dispatch.SysMsg.Suspend.Instance); } - public void SuspendReentrancy() - { - SendSystemMessage(Dispatch.SysMsg.SuspendReentrancy.Instance); - } - - public void ResumeReentrancy() - { - SendSystemMessage(Dispatch.SysMsg.ResumeReentrancy.Instance); - } - private void SendSystemMessage(ISystemMessage systemMessage) { try diff --git a/src/core/Akka/Actor/ReceiveActor.cs b/src/core/Akka/Actor/ReceiveActor.cs index a6ef0aafbb5..f6bee454c03 100644 --- a/src/core/Akka/Actor/ReceiveActor.cs +++ b/src/core/Akka/Actor/ReceiveActor.cs @@ -116,17 +116,7 @@ protected void Receive(Func handler) _matchHandlerBuilders.Peek().Match( m => { Func wrap = () => handler(m); - ActorTaskScheduler.RunTask(AsyncBehavior.Suspend, wrap); - }); - } - - protected void Receive(AsyncBehavior behavior, Func handler) - { - EnsureMayConfigureMessageHandlers(); - _matchHandlerBuilders.Peek().Match(m => - { - Func wrap = () => handler(m); - ActorTaskScheduler.RunTask(behavior, wrap); + ActorTaskScheduler.RunTask(wrap); }); } diff --git a/src/core/Akka/Actor/UntypedActor.cs b/src/core/Akka/Actor/UntypedActor.cs index 37f50f1af02..996631e10d0 100644 --- a/src/core/Akka/Actor/UntypedActor.cs +++ b/src/core/Akka/Actor/UntypedActor.cs @@ -22,14 +22,14 @@ protected sealed override bool Receive(object message) return true; } - protected void RunTask(AsyncBehavior behavior, Action action) + protected void RunTask(Action action) { - ActorTaskScheduler.RunTask(behavior,action); + ActorTaskScheduler.RunTask(action); } - protected void RunTask(AsyncBehavior behavior, Func action) + protected void RunTask(Func action) { - ActorTaskScheduler.RunTask(behavior,action); + ActorTaskScheduler.RunTask(action); } /// diff --git a/src/core/Akka/Dispatch/ActorTaskScheduler.cs b/src/core/Akka/Dispatch/ActorTaskScheduler.cs index cc94ed6f18e..659b9481080 100644 --- a/src/core/Akka/Dispatch/ActorTaskScheduler.cs +++ b/src/core/Akka/Dispatch/ActorTaskScheduler.cs @@ -15,12 +15,6 @@ namespace Akka.Dispatch { - public enum AsyncBehavior - { - Reentrant, - Suspend - } - public class AmbientState { public IActorRef Self { get; set; } @@ -85,8 +79,8 @@ protected override bool TryExecuteTaskInline(Task task, bool taskWasPreviouslyQu //Is the current cell and the current state the same? if (cell != null && s != null && - cell.Self == s.Self && - cell.Sender == s.Sender && + Equals(cell.Self, s.Self) && + Equals(cell.Sender, s.Sender) && cell.CurrentMessage == s.Message) { var res = TryExecuteTask(task); @@ -96,24 +90,22 @@ protected override bool TryExecuteTaskInline(Task task, bool taskWasPreviouslyQu return false; } - public static void RunTask(AsyncBehavior behavior, Action action) + public static void RunTask(Action action) { - RunTask(behavior, () => + RunTask(() => { action(); return Task.FromResult(0); }); } - public static void RunTask(AsyncBehavior behavior, Func action) + public static void RunTask(Func action) { var context = ActorCell.Current; + var mailbox = context.Mailbox; - //if reentrancy is not allowed, suspend user message processing - if (behavior == AsyncBehavior.Suspend) - { - context.SuspendReentrancy(); - } + //suspend the mailbox + mailbox.Suspend(MailboxSuspendStatus.AwaitingTask); SetCurrentState(context.Self, context.Sender, null); @@ -134,11 +126,8 @@ await action() Faulted, TaskContinuationOptions.None); - //if reentrancy was suspended, make sure we re-enable message processing again - if (behavior == AsyncBehavior.Suspend) - { - context.ResumeReentrancy(); - } + //if mailbox was suspended, make sure we re-enable message processing again + mailbox.Resume(MailboxSuspendStatus.AwaitingTask); }, Outer, CancellationToken.None, diff --git a/src/core/Akka/Dispatch/SysMsg/ISystemMessage.cs b/src/core/Akka/Dispatch/SysMsg/ISystemMessage.cs index 5b932286876..54694b7a68a 100644 --- a/src/core/Akka/Dispatch/SysMsg/ISystemMessage.cs +++ b/src/core/Akka/Dispatch/SysMsg/ISystemMessage.cs @@ -360,38 +360,6 @@ public override string ToString() } } - /// - /// Class SuspendReentrancy. - /// - public sealed class SuspendReentrancy : ISystemMessage - { - private SuspendReentrancy() { } - private static readonly SuspendReentrancy _instance = new SuspendReentrancy(); - public static SuspendReentrancy Instance - { - get - { - return _instance; - } - } - } - - /// - /// Class ResumeReentrancy. - /// - public sealed class ResumeReentrancy : ISystemMessage - { - private ResumeReentrancy() { } - private static readonly ResumeReentrancy _instance = new ResumeReentrancy(); - public static ResumeReentrancy Instance - { - get - { - return _instance; - } - } - } - /// /// Class Stop. /// From 0ecfa5b0ac25a652833d63a4190e30742f9f0b1c Mon Sep 17 00:00:00 2001 From: Sean Gilliam Date: Tue, 28 Apr 2015 15:21:10 -0500 Subject: [PATCH 02/66] Documentation fixes for Dependency Injection - added code fences - fixed some stilted wording to flow better - rearranged sections to flow better - added relevant links to the wiki - added final example to each segment that ties the document together --- .../Akka.DI.AutoFac/Readme.md | 107 ++++--- .../Akka.DI.CastleWindsor/Readme.md | 98 +++++-- .../Akka.DI.Core/Readme.md | 269 +++++++++++------- .../Akka.DI.Ninject/Readme.md | 98 +++++-- .../Akka.DI.Unity/Readme.md | 99 +++++-- 5 files changed, 451 insertions(+), 220 deletions(-) diff --git a/src/contrib/dependencyInjection/Akka.DI.AutoFac/Readme.md b/src/contrib/dependencyInjection/Akka.DI.AutoFac/Readme.md index 32b9c2a4c34..c4fc1f22a1c 100644 --- a/src/contrib/dependencyInjection/Akka.DI.AutoFac/Readme.md +++ b/src/contrib/dependencyInjection/Akka.DI.AutoFac/Readme.md @@ -6,47 +6,90 @@ **Akka.DI.Autofac** is an **ActorSystem extension** for the Akka.NET framework that provides an alternative to the basic capabilities of [Props](http://akkadotnet.github.io/wiki/Props) when you have Actors with multiple dependencies. -If Autofac is your IOC container of choice and your Actors have dependencies that make using the factory method provided by Props prohibitive and code maintenance is an important concern then this is the extension for you. +If Autofac is your IoC container of choice and your actors have dependencies that make using the factory method provided by Props prohibitive and code maintenance is an important concern then this is the extension for you. #How to you use it? -The best way to understand how to use it is by example. If you are already considering this extension then we will assume that you know how how to use the [Autofac](https://github.com/autofac/Autofac) container. This example is demonstrating a system using ConsistentHashingGroup Routing along with extension. +The best way to understand how to use it is by example. If you are already considering this extension then we will assume that you know how how to use the [Autofac](https://github.com/autofac/Autofac) container. This example is demonstrating a system using [ConsistentHashing](http://getakka.net/docs/working-with-actors/Routers#consistenthashing) routing along with this extension. -Start by creating your builder, registering your Actors and dependencies, and build your container. +Start by creating your builder, registering your actors and dependencies, and build your container. - //Create builder - var builder = new Autofac.ContainerBuilder(); +```csharp +// Setup Autofac +ContainerBuilder builder = new ContainerBuilder(); +builder.RegisterType().As(); +builder.RegisterType(); +IContainer container = builder.Build(); +``` - //Register Actors and dependent services - builder.RegisterType().As(); - builder.RegisterType(); +Next you have to create your ```ActorSystem``` and inject that system reference along with the container reference into a new instance of the ```AutoFacDependencyResolver```. - //Build the container - Autofac.IContainer container = builder.Build(); - -Next you have to create your ActorSystem and inject that system reference along with the container reference into a new instance of the AutoFacDependencyResolver. +```csharp +// Create the ActorSystem +using (var system = ActorSystem.Create("MySystem")) +{ + // Create the dependency resolver + IDependencyResolver resolver = new AutoFacDependencyResolver(container, system); - //Create ActorSystem - using (var system = ActorSystem.Create("MySystem")) - { - //Create the dependency resolver - IDependencyResolver propsResolver = - new AutoFacDependencyResolver(container, system); + // we'll fill in the rest in the following steps +} +``` -To register the Actors with the system use method `Akka.Actor.Props Create()` Member of `IDependencyResolver` method implemented by the AutoFacDependencyResolver. - - system.ActorOf(propsResolver.Create(), "Worker1"); - system.ActorOf(propsResolver.Create(), "Worker2"); +To register the actors with the system use method ```Akka.Actor.Props Create()``` of the ```IDependencyResolver``` interface implemented by the ```AutoFacDependencyResolver```. -Finally create your router, message and send the message to the router. - - var hashGroup = - system.ActorOf(Props.Empty.WithRouter(new ConsistentHashingGroup(config))); - - TypedActorMessage msg = - new TypedActorMessage { Id = 1, - Name = Guid.NewGuid().ToString() }; - hashGroup.Tell(msg); +```csharp +// Register the actors with the system +system.ActorOf(resolver.Create(), "Worker1"); +system.ActorOf(resolver.Create(), "Worker2"); +``` - } +Finally create your router, message and send the message to the router. +```csharp +// Create the router +IActorRef router = system.ActorOf(Props.Empty.WithRouter(new ConsistentHashingGroup(config))); + +// Create the message to send +TypedActorMessage message = new TypedActorMessage +{ + Id = 1, + Name = Guid.NewGuid().ToString() +}; + +// Send the message to the router +router.Tell(message); +``` + +The resulting code should look similar to the the following: + +```csharp +// Setup Autofac +ContainerBuilder builder = new ContainerBuilder(); +builder.RegisterType().As(); +builder.RegisterType(); +IContainer container = builder.Build(); + +// Create the ActorSystem +using (var system = ActorSystem.Create("MySystem")) +{ + // Create the dependency resolver + IDependencyResolver resolver = new AutoFacDependencyResolver(container, system); + + // Register the actors with the system + system.ActorOf(resolver.Create(), "Worker1"); + system.ActorOf(resolver.Create(), "Worker2"); + + // Create the router + IActorRef router = system.ActorOf(Props.Empty.WithRouter(new ConsistentHashingGroup(config))); + + // Create the message to send + TypedActorMessage message = new TypedActorMessage + { + Id = 1, + Name = Guid.NewGuid().ToString() + }; + + // Send the message to the router + router.Tell(message); +} +``` diff --git a/src/contrib/dependencyInjection/Akka.DI.CastleWindsor/Readme.md b/src/contrib/dependencyInjection/Akka.DI.CastleWindsor/Readme.md index e8282aae32f..3345d2117fd 100644 --- a/src/contrib/dependencyInjection/Akka.DI.CastleWindsor/Readme.md +++ b/src/contrib/dependencyInjection/Akka.DI.CastleWindsor/Readme.md @@ -6,42 +6,88 @@ **Akka.DI.CastleWindsor** is an **ActorSystem extension** for the Akka.NET framework that provides an alternative to the basic capabilities of [Props](http://akkadotnet.github.io/wiki/Props) when you have Actors with multiple dependencies. -If CastleWindsor is your IOC container of choice and your Actors have dependencies that make using the factory method provided by Props prohibitive and code maintenance is an important concern then this is the extension for you. +If CastleWindsor is your IoC container of choice and your actors have dependencies that make using the factory method provided by Props prohibitive and code maintenance is an important concern then this is the extension for you. #How to you use it? -The best way to understand how to use it is by example. If you are already considering this extension then we will assume that you know how how to use the [CastleWindsor](http://www.castleproject.org/projects/windsor/) container. This example is demonstrating a system using ConsistentHashingGroup Routing along with extension. +The best way to understand how to use it is by example. If you are already considering this extension then we will assume that you know how how to use the [CastleWindsor](http://www.castleproject.org/projects/windsor/) container. This example is demonstrating a system using [ConsistentHashing](http://getakka.net/docs/working-with-actors/Routers#consistenthashing) routing along with this extension. -Start by creating your container and registering your Actors and dependencies. +Start by creating your container and registering your actors and dependencies. - IWindsorContainer container = new WindsorContainer(); - container.Register(Component.For().ImplementedBy()); - container.Register(Component.For().Named("TypedWorker").LifestyleTransient()); +```csharp +// Setup CastleWindsor +IWindsorContainer container = new WindsorContainer(); +container.Register(Component.For().ImplementedBy()); +container.Register(Component.For().Named("TypedWorker").LifestyleTransient()); +``` - -Next you have to create your ActorSystem and inject that system reference along with the container reference into a new instance of the WindsorDependencyResolver. +Next you have to create your ```ActorSystem``` and inject that system reference along with the container reference into a new instance of the ```WindsorDependencyResolver```. - //Create ActorSystem - using (var system = ActorSystem.Create("MySystem")) - { - //Create the dependency resolver - IDependencyResolver propsResolver = - new WindsorDependencyResolver(container,system); +```csharp +// Create the ActorSystem +using (var system = ActorSystem.Create("MySystem")) +{ + // Create the dependency resolver + IDependencyResolver resolver = new WindsorDependencyResolver(container, system); -To register the Actors with the system use method `Akka.Actor.Props Create()` Member of `IDependencyResolver` method implemented by the WindsorDependencyResolver. - - system.ActorOf(propsResolver.Create(), "Worker1"); - system.ActorOf(propsResolver.Create(), "Worker2"); + // we'll fill in the rest in the following steps +} +``` + +To register the actors with the system use method ```Akka.Actor.Props Create()``` of the ```IDependencyResolver``` interface implemented by the ```WindsorDependencyResolver```. + +```csharp +// Register the actors with the system +system.ActorOf(resolver.Create(), "Worker1"); +system.ActorOf(resolver.Create(), "Worker2"); +``` Finally create your router, message and send the message to the router. - var hashGroup = - system.ActorOf(Props.Empty.WithRouter(new ConsistentHashingGroup(config))); - - TypedActorMessage msg = - new TypedActorMessage { Id = 1, - Name = Guid.NewGuid().ToString() }; - hashGroup.Tell(msg); +```csharp +// Create the router +IActorRef router = system.ActorOf(Props.Empty.WithRouter(new ConsistentHashingGroup(config))); + +// Create the message to send +TypedActorMessage message = new TypedActorMessage +{ + Id = 1, + Name = Guid.NewGuid().ToString() +}; + +// Send the message to the router +router.Tell(message); +``` + +The resulting code should look similar to the the following: + +```csharp +// Setup CastleWindsor +IWindsorContainer container = new WindsorContainer(); +container.Register(Component.For().ImplementedBy()); +container.Register(Component.For().Named("TypedWorker").LifestyleTransient()); + +// Create the ActorSystem +using (var system = ActorSystem.Create("MySystem")) +{ + // Create the dependency resolver + IDependencyResolver resolver = new WindsorDependencyResolver(container, system); + + // Register the actors with the system + system.ActorOf(resolver.Create(), "Worker1"); + system.ActorOf(resolver.Create(), "Worker2"); + + // Create the router + IActorRef router = system.ActorOf(Props.Empty.WithRouter(new ConsistentHashingGroup(config))); - } + // Create the message to send + TypedActorMessage message = new TypedActorMessage + { + Id = 1, + Name = Guid.NewGuid().ToString() + }; + // Send the message to the router + router.Tell(message); +} +``` diff --git a/src/contrib/dependencyInjection/Akka.DI.Core/Readme.md b/src/contrib/dependencyInjection/Akka.DI.Core/Readme.md index dc2d78c564d..1ebd2b13561 100644 --- a/src/contrib/dependencyInjection/Akka.DI.Core/Readme.md +++ b/src/contrib/dependencyInjection/Akka.DI.Core/Readme.md @@ -4,128 +4,177 @@ #What is it? -**Akka.DI.Core** is an **ActorSystem extension** library for the Akka.NET framework that provides a simple way to create an Actor Dependency Resolver that can be used an alternative to the basic capabilities of [Props](http://akkadotnet.github.io/wiki/Props) when you have Actors with multiple dependencies. +**Akka.DI.Core** is an **ActorSystem extension** library for the Akka.NET framework that provides a simple way to create an Actor Dependency Resolver that can be used as an alternative to the basic capabilities of [Props](http://akkadotnet.github.io/wiki/Props) when you have actors with multiple dependencies. #How do you create an Extension? - Create a new class library -- Reference your favorite IOC Container, the Akka.DI.Core and of course Akka -- Create a class and implement the IDependencyResolver - -Let's walk through the process of creating one for CastleWindsor container. You need to create a new project named Akka.DI.CastleWindsor with all the necessary references including Akka.DI.Core, Akka and CastleWindosor. Name the initial class WindsorDependencyResolver. - - public class WindsorDependencyResolver : IDependencyResolver - { - Type GetType(string ActorName) - { - throw new NotImplementedException(); - } - - Func CreateActorFactory(Type actorType) - { - throw new NotImplementedException(); - } - - Props Create() - { - throw new NotImplementedException(); - } - - public void Release(ActorBase actor) - { - throw new NotImplementedException(); - } - } +- Reference your favorite IoC Container, the Akka.DI.Core and of course Akka +- Create a class that implements ```IDependencyResolver``` -Add a constructor and private fields. +Let's walk through the process of creating one for the CastleWindsor container. You need to create a new project named Akka.DI.CastleWindsor with all the necessary references including Akka.DI.Core, Akka and CastleWindsor. Name the initial class ```WindsorDependencyResolver```. + +```csharp +public class WindsorDependencyResolver : IDependencyResolver +{ + // we'll implement IDependencyResolver in the following steps +} +``` - private IWindsorContainer container; - private ActorSystem system; +Add a constructor and private fields. - public WindsorDependencyResolver(IWindsorContainer container, ActorSystem system) - { - if (system == null) throw new ArgumentNullException("system"); - if (container == null) throw new ArgumentNullException("container"); - this.container = container; - this.system = system; - this.system.AddDependencyResolver(this); - } +```csharp +private IWindsorContainer _container; +private ConcurrentDictionary _typeCache; +private ActorSystem _system; + +public WindsorDependencyResolver(IWindsorContainer container, ActorSystem system) +{ + if (system == null) throw new ArgumentNullException("system"); + if (container == null) throw new ArgumentNullException("container"); + _container = container; + _typeCache = new ConcurrentDictionary(StringComparer.InvariantCultureIgnoreCase); + _system = system; + _system.AddDependencyResolver(this); +} +``` You have defined three private fields -- IWindsorContainer container - - Reference to the container -- ActorSystem system - - Reference to the ActorSystem - -First you need to implement GetType. This is a basic implementation and is just from demonstration purposes. Essentially this is used by the Extension to get the Type of the Actor from it's type name. - - Type GetType(string actorName) - { - var firstTry = Type.GetType(actorName); - Func searchForType = () => - { - return - AppDomain. - CurrentDomain. - GetAssemblies(). - SelectMany(x => x.GetTypes()). - Where(t => t.Name.Equals(actorName)). - FirstOrDefault(); - }; - return firstTry ?? searchForType(); - } - -Secondly you need to implement the CreateActorFactory method which will be used by the extension to create the Actor. This implementation will depend upon the API of the container. - - public Func CreateActorFactory(Type actorType) - { - return () => (ActorBase)container.Resolve(actorType); - } - -Thirdly, you implement the Create which is used register the Props configuration for the referenced Actor Type with the ActorSystem. This method will always be the same implementation. - - public Props Create() where TActor : ActorBase - { - return system.GetExtension().Props(typeof(TActor).Name); - } - -Lastly, you implement the Release method which in this instance is very simple. - - public void Release(ActorBase actor) - { - this.container.Release(actor); - } +- ```IWindsorContainer _container``` is a reference to the CastleWindsor container. +- ```ConcurrentDictionary _typeCache``` is a thread safe map that contains actor name/type associations. +- ```ActorSystem _system``` is a reference to the ActorSystem. + +First you need to implement ```GetType```. This is a basic implementation and is just for demonstration purposes. Essentially this is used by the extension to get the type of the actor from it's type name. + +```csharp +public Type GetType(string actorName) +{ + _typeCache. + TryAdd(actorName, + actorName.GetTypeValue() ?? + _container.Kernel + .GetAssignableHandlers(typeof(object)) + .Where(handler => handler.ComponentModel.Name.Equals(actorName, StringComparison.InvariantCultureIgnoreCase)) + .Select(handler => handler.ComponentModel.Implementation) + .FirstOrDefault()); + + return _typeCache[actorName]; +} +``` + +Secondly you need to implement the ```CreateActorFactory``` method which will be used by the extension to create the actor. This implementation will depend upon the API of the container. + +```csharp +public Func CreateActorFactory(Type actorType) +{ + return () => (ActorBase)container.Resolve(actorType); +} +``` + +Thirdly, you implement the ```Create``` which is used register the ```Props``` configuration for the referenced actor type with the ```ActorSystem```. This method will always be the same implementation. + +```csharp +public Props Create() where TActor : ActorBase +{ + return system.GetExtension().Props(typeof(TActor).Name); +} +``` + +Lastly, you implement the Release method which in this instance is very simple. This method is used to remove the actor from the underlying container. + +```csharp +public void Release(ActorBase actor) +{ + this.container.Release(actor); +} +``` **Note: For further details on the importance of the release method please read the following blog [post](http://blog.ploeh.dk/2014/05/19/di-friendly-framework/).** -So with that you can do something like the following code example: - - IWindsorContainer container = new WindsorContainer(); - container.Register(Component.For().ImplementedBy()); - container.Register(Component.For().Named("TypedWorker").LifestyleTransient()); - - //Create ActorSystem - using (var system = ActorSystem.Create("MySystem")) - { - //Create the dependency resolver - IDependencyResolver propsResolver = - new WindsorDependencyResolver(container,system); - - system.ActorOf(propsResolver.Create(), "Worker1"); - system.ActorOf(propsResolver.Create(), "Worker2"); - - var hashGroup = - system.ActorOf(Props.Empty.WithRouter(new ConsistentHashingGroup(config))); - - TypedActorMessage msg = - new TypedActorMessage { Id = 1, - Name = Guid.NewGuid().ToString() }; - hashGroup.Tell(msg); - - } +The resulting class should look similar to the following: + +```csharp +public class WindsorDependencyResolver : IDependencyResolver +{ + private IWindsorContainer container; + private ConcurrentDictionary typeCache; + private ActorSystem system; + + public WindsorDependencyResolver(IWindsorContainer container, ActorSystem system) + { + if (system == null) throw new ArgumentNullException("system"); + if (container == null) throw new ArgumentNullException("container"); + this.container = container; + typeCache = new ConcurrentDictionary(StringComparer.InvariantCultureIgnoreCase); + this.system = system; + this.system.AddDependencyResolver(this); + } + + public Type GetType(string actorName) + { + typeCache.TryAdd(actorName, actorName.GetTypeValue() ?? + container.Kernel + .GetAssignableHandlers(typeof(object)) + .Where(handler => handler.ComponentModel.Name.Equals(actorName, StringComparison.InvariantCultureIgnoreCase)) + .Select(handler => handler.ComponentModel.Implementation) + .FirstOrDefault()); + + return typeCache[actorName]; + } + + public Func CreateActorFactory(Type actorType) + { + return () => (ActorBase)container.Resolve(actorType); + } + + public Props Create() where TActor : ActorBase + { + return system.GetExtension().Props(typeof(TActor)); + } + + public void Release(ActorBase actor) + { + this.container.Release(actor); + } +} +``` + +Now, with the preceding class, you can do something like the following example: + +```csharp +// Setup CastleWindsor +IWindsorContainer container = new WindsorContainer(); +container.Register(Component.For().ImplementedBy()); +container.Register(Component.For().Named("TypedWorker").LifestyleTransient()); + +// Create the ActorSystem +using (var system = ActorSystem.Create("MySystem")) +{ + // Create the dependency resolver + IDependencyResolver resolver = new WindsorDependencyResolver(container, system); + + // Register the actors with the system + system.ActorOf(resolver.Create(), "Worker1"); + system.ActorOf(resolver.Create(), "Worker2"); + + // Create the router + IActorRef router = system.ActorOf(Props.Empty.WithRouter(new ConsistentHashingGroup(config))); + + // Create the message to send + TypedActorMessage message = new TypedActorMessage + { + Id = 1, + Name = Guid.NewGuid().ToString() + }; + + // Send the message to the router + router.Tell(message); +} +``` ## Creating Child Actors using DI ## -When you want to create child actors from within your existing actors using the Dependency Injection you can just use the Actor Content extension just like in the following example. - - Context.DI().ActorOf().Tell(message); +When you want to create child actors from within your existing actors using Dependency Injection you can use the Actor Content extension just like in the following example. +```csharp +Context.DI().ActorOf().Tell(message); +``` diff --git a/src/contrib/dependencyInjection/Akka.DI.Ninject/Readme.md b/src/contrib/dependencyInjection/Akka.DI.Ninject/Readme.md index b7f39da880e..34781272c1f 100644 --- a/src/contrib/dependencyInjection/Akka.DI.Ninject/Readme.md +++ b/src/contrib/dependencyInjection/Akka.DI.Ninject/Readme.md @@ -6,42 +6,88 @@ **Akka.DI.Ninject** is an **ActorSystem extension** for the Akka.NET framework that provides an alternative to the basic capabilities of [Props](http://akkadotnet.github.io/wiki/Props) when you have Actors with multiple dependencies. -If Ninject is your IOC container of choice and your Actors have dependencies that make using the factory method provided by Props prohibitive and code maintenance is an important concern then this is the extension for you. +If Ninject is your IoC container of choice and your actors have dependencies that make using the factory method provided by Props prohibitive and code maintenance is an important concern then this is the extension for you. #How to you use it? -The best way to understand how to use it is by example. If you are already considering this extension then we will assume that you know how how to use the [Ninject](http://www.ninject.org/) container. This example is demonstrating a system using ConsistentHashingGroup Routing along with extension. +The best way to understand how to use it is by example. If you are already considering this extension then we will assume that you know how how to use the [Ninject](http://www.ninject.org/) container. This example is demonstrating a system using [ConsistentHashing](http://getakka.net/docs/working-with-actors/Routers#consistenthashing) routing along with this extension. -Start by creating your StandardKernel, registering your Actors and dependencies. +Start by creating your ```StandardKernel```, registering your actors and dependencies. - Ninject.IKernel container = new Ninject.StandardKernel(); - container.Bind().To(typeof(TypedWorker)); - container.Bind()To(typeof)WorkerService)); +```csharp +// Setup Ninject +IKernel container = new StandardKernel(); +container.Bind().To(typeof(TypedWorker)); +container.Bind()To(typeof)WorkerService)); +``` - -Next you have to create your ActorSystem and inject that system reference along with the container reference into a new instance of the NinjectDependencyResolver. +Next you have to create your ```ActorSystem``` and inject that system reference along with the container reference into a new instance of the ```NinjectDependencyResolver```. - //Create ActorSystem - using (var system = ActorSystem.Create("MySystem")) - { - //Create the dependency resolver - IDependencyResolver propsResolver = - new NinjectDependencyResolver(container,system); +```csharp +// Create the ActorSystem +using (var system = ActorSystem.Create("MySystem")) +{ + // Create the dependency resolver + IDependencyResolver resolver = new NinjectDependencyResolver(container, system); -To register the Actors with the system use method `Akka.Actor.Props Create()` Member of `IDependencyResolver` method implemented by the NinjectDependencyResolver. - - system.ActorOf(propsResolver.Create(), "Worker1"); - system.ActorOf(propsResolver.Create(), "Worker2"); + // we'll fill in the rest in the following steps +} +``` + +To register the actors with the system use method ```Akka.Actor.Props Create()``` of the ```IDependencyResolver``` interface implemented by the ```NinjectDependencyResolver```. + +```csharp +// Register the actors with the system +system.ActorOf(resolver.Create(), "Worker1"); +system.ActorOf(resolver.Create(), "Worker2"); +``` Finally create your router, message and send the message to the router. - var hashGroup = - system.ActorOf(Props.Empty.WithRouter(new ConsistentHashingGroup(config))); - - TypedActorMessage msg = - new TypedActorMessage { Id = 1, - Name = Guid.NewGuid().ToString() }; - hashGroup.Tell(msg); +```csharp +// Create the router +IActorRef router = system.ActorOf(Props.Empty.WithRouter(new ConsistentHashingGroup(config))); + +// Create the message to send +TypedActorMessage message = new TypedActorMessage +{ + Id = 1, + Name = Guid.NewGuid().ToString() +}; + +// Send the message to the router +router.Tell(message); +``` + +The resulting code should look similar to the the following: + +```csharp +// Setup Ninject +IKernel container = new StandardKernel(); +container.Bind().To(typeof(TypedWorker)); +container.Bind()To(typeof)WorkerService)); + +// Create the ActorSystem +using (var system = ActorSystem.Create("MySystem")) +{ + // Create the dependency resolver + IDependencyResolver resolver = new NinjectDependencyResolver(container, system); + + // Register the actors with the system + system.ActorOf(resolver.Create(), "Worker1"); + system.ActorOf(resolver.Create(), "Worker2"); + + // Create the router + IActorRef router = system.ActorOf(Props.Empty.WithRouter(new ConsistentHashingGroup(config))); - } + // Create the message to send + TypedActorMessage message = new TypedActorMessage + { + Id = 1, + Name = Guid.NewGuid().ToString() + }; + // Send the message to the router + router.Tell(message); +} +``` diff --git a/src/contrib/dependencyInjection/Akka.DI.Unity/Readme.md b/src/contrib/dependencyInjection/Akka.DI.Unity/Readme.md index 7376a958f0c..546d45da7dc 100644 --- a/src/contrib/dependencyInjection/Akka.DI.Unity/Readme.md +++ b/src/contrib/dependencyInjection/Akka.DI.Unity/Readme.md @@ -6,41 +6,88 @@ **Akka.DI.Unity** is an **ActorSystem extension** for the Akka.NET framework that provides an alternative to the basic capabilities of [Props](http://akkadotnet.github.io/wiki/Props) when you have Actors with multiple dependencies. -If Unity is your IOC container of choice and your Actors have dependencies that make using the factory method provided by Props prohibitive and code maintenance is an important concern then this is the extension for you. +If Unity is your IoC container of choice and your actors have dependencies that make using the factory method provided by Props prohibitive and code maintenance is an important concern then this is the extension for you. #How to you use it? -The best way to understand how to use it is by example. If you are already considering this extension then we will assume that you know how how to use the [Unity](https://unity.codeplex.com/) container. This example is demonstrating a system using ConsistentHashingGroup Routing along with extension. +The best way to understand how to use it is by example. If you are already considering this extension then we will assume that you know how how to use the [Unity](https://unity.codeplex.com/) container. This example is demonstrating a system using [ConsistentHashing](http://getakka.net/docs/working-with-actors/Routers#consistenthashing) routing along with extension. -Start by creating your UnityContainer, registering your Actors and dependencies. +Start by creating your ```UnityContainer```, registering your actors and dependencies. - IUnityContainer container = new UnityContainer(); - container.RegisterType(); - container.RegisterType(); - -Next you have to create your ActorSystem and inject that system reference along with the container reference into a new instance of the UnityDependencyResolver. +```csharp +// Setup Unity +IUnityContainer container = new UnityContainer(); +container.RegisterType(); +container.RegisterType(); +``` - //Create ActorSystem - using (var system = ActorSystem.Create("MySystem")) - { - //Create the dependency resolver - IDependencyResolver propsResolver = - new UnityDependencyResolver(container,system); +Next you have to create your ```ActorSystem``` and inject that system reference along with the container reference into a new instance of the ```UnityDependencyResolver```. -To register the Actors with the system use method `Akka.Actor.Props Create()` Member of `IDependencyResolver` method implemented by the UnityDependencyResolver. - - system.ActorOf(propsResolver.Create(), "Worker1"); - system.ActorOf(propsResolver.Create(), "Worker2"); +```csharp +// Create the ActorSystem +using (var system = ActorSystem.Create("MySystem")) +{ + // Create the dependency resolver + IDependencyResolver resolver = new UnityDependencyResolver(container, system); + + // we'll fill in the rest in the following steps +} +``` + +To register the actors with the system use method ```Akka.Actor.Props Create()``` of the ```IDependencyResolver``` interface implemented by the ```UnityDependencyResolver```. + +```csharp +// Register the actors with the system +system.ActorOf(resolver.Create(), "Worker1"); +system.ActorOf(resolver.Create(), "Worker2"); +``` Finally create your router, message and send the message to the router. - var hashGroup = - system.ActorOf(Props.Empty.WithRouter(new ConsistentHashingGroup(config))); - - TypedActorMessage msg = - new TypedActorMessage { Id = 1, - Name = Guid.NewGuid().ToString() }; - hashGroup.Tell(msg); +```csharp +// Create the router +IActorRef router = system.ActorOf(Props.Empty.WithRouter(new ConsistentHashingGroup(config))); + +// Create the message to send +TypedActorMessage message = new TypedActorMessage +{ + Id = 1, + Name = Guid.NewGuid().ToString() +}; + +// Send the message to the router +router.Tell(message); +``` + +The resulting code should look similar to the the following: + +```csharp +// Setup Unity +IUnityContainer container = new UnityContainer(); +container.RegisterType(); +container.RegisterType(); + +// Create the ActorSystem +using (var system = ActorSystem.Create("MySystem")) +{ + // Create the dependency resolver + IDependencyResolver resolver = new UnityDependencyResolver(container, system); + + // Register the actors with the system + system.ActorOf(resolver.Create(), "Worker1"); + system.ActorOf(resolver.Create(), "Worker2"); + + // Create the router + IActorRef router = system.ActorOf(Props.Empty.WithRouter(new ConsistentHashingGroup(config))); - } + // Create the message to send + TypedActorMessage message = new TypedActorMessage + { + Id = 1, + Name = Guid.NewGuid().ToString() + }; + // Send the message to the router + router.Tell(message); +} +``` From df9f4266bcee57016ce22aef9693b108f0fed40a Mon Sep 17 00:00:00 2001 From: Nikita Tsukanov Date: Wed, 29 Apr 2015 00:04:44 +0300 Subject: [PATCH 03/66] Update .editorconfig --- .editorconfig | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.editorconfig b/.editorconfig index 92314f0e6c2..64fe33bbaea 100644 --- a/.editorconfig +++ b/.editorconfig @@ -7,4 +7,5 @@ root = true end_of_line = CRLF [*.cs] -indent_style = tab \ No newline at end of file +indent_style = space +indent_size = 4 From 80e5c94311c903529cf40bd529622b1035f0e3e0 Mon Sep 17 00:00:00 2001 From: Aaron Stannard Date: Tue, 28 Apr 2015 18:10:21 -0700 Subject: [PATCH 04/66] added some more descriptive comments to the remoting components --- src/core/Akka.Cluster/ClusterDaemon.cs | 4 +- src/core/Akka.Remote/EndpointManager.cs | 4 +- .../Akka.Remote/RemoteActorRefProvider.cs | 4 ++ src/core/Akka.Remote/Remoting.cs | 15 ++++--- .../Transport/AkkaProtocolTransport.cs | 43 ++++++++++++++++++- .../Transport/Helios/HeliosTcpTransport.cs | 8 ++++ 6 files changed, 66 insertions(+), 12 deletions(-) diff --git a/src/core/Akka.Cluster/ClusterDaemon.cs b/src/core/Akka.Cluster/ClusterDaemon.cs index 6e83f3347b2..e60b3f29fe1 100644 --- a/src/core/Akka.Cluster/ClusterDaemon.cs +++ b/src/core/Akka.Cluster/ClusterDaemon.cs @@ -1023,8 +1023,8 @@ public void StopSeedNodeProcess() } } - //State transition to JOINING - new node joining. - //Received `Join` message and replies with `Welcome` message, containing + // State transition to JOINING - new node joining. + // Received `Join` message and replies with `Welcome` message, containing // current gossip state, including the new joining member. public void Joining(UniqueAddress node, ImmutableHashSet roles) { diff --git a/src/core/Akka.Remote/EndpointManager.cs b/src/core/Akka.Remote/EndpointManager.cs index 536328436dc..18c0221ff66 100644 --- a/src/core/Akka.Remote/EndpointManager.cs +++ b/src/core/Akka.Remote/EndpointManager.cs @@ -711,8 +711,8 @@ private Task x.Listen().ContinueWith( - result => Tuple.Create(new ProtocolTransportAddressPair(x, result.Result.Item1), result.Result.Item2), TaskContinuationOptions.ExecuteSynchronously | TaskContinuationOptions.AttachedToParent)); - _listens = Task.WhenAll(tasks).ContinueWith(transportResults => transportResults.Result.ToList(), TaskContinuationOptions.ExecuteSynchronously | TaskContinuationOptions.AttachedToParent); + result => Tuple.Create(new ProtocolTransportAddressPair(x, result.Result.Item1), result.Result.Item2), TaskContinuationOptions.ExecuteSynchronously & TaskContinuationOptions.AttachedToParent)); + _listens = Task.WhenAll(tasks).ContinueWith(transportResults => transportResults.Result.ToList(), TaskContinuationOptions.ExecuteSynchronously & TaskContinuationOptions.AttachedToParent); } return _listens; } diff --git a/src/core/Akka.Remote/RemoteActorRefProvider.cs b/src/core/Akka.Remote/RemoteActorRefProvider.cs index 75678ec6736..a33684658c7 100644 --- a/src/core/Akka.Remote/RemoteActorRefProvider.cs +++ b/src/core/Akka.Remote/RemoteActorRefProvider.cs @@ -406,6 +406,10 @@ public void AfterSendSystemMessage(ISystemMessage message) #region Internals + /// + /// All of the private internals used by , namely its transport + /// registry, remote serializers, and the instance. + /// class Internals : INoSerializationVerificationNeeded { public Internals(RemoteTransport transport, Akka.Serialization.Serialization serialization, IInternalActorRef remoteDaemon) diff --git a/src/core/Akka.Remote/Remoting.cs b/src/core/Akka.Remote/Remoting.cs index 0c6113c0f58..336183cae8e 100644 --- a/src/core/Akka.Remote/Remoting.cs +++ b/src/core/Akka.Remote/Remoting.cs @@ -324,6 +324,9 @@ internal static Address LocalAddressForRemote( #endregion } + /// + /// Message type used to provide both and a name for a new transport actor + /// internal sealed class RegisterTransportActor : INoSerializationVerificationNeeded { public RegisterTransportActor(Props props, string name) @@ -337,6 +340,9 @@ public RegisterTransportActor(Props props, string name) public string Name { get; private set; } } + /// + /// Actor responsible for supervising the creation of all transport actors + /// internal class TransportSupervisor : UntypedActor { private readonly SupervisorStrategy _strategy = new OneForOneStrategy(3, TimeSpan.FromMinutes(1), exception => Directive.Restart); @@ -347,15 +353,10 @@ protected override SupervisorStrategy SupervisorStrategy() protected override void OnReceive(object message) { - PatternMatch.Match(message) + message.Match() .With(r => { - /* - * TODO: need to add support for RemoteDispatcher here. - * See https://github.com/akka/akka/blob/master/akka-remote/src/main/scala/akka/remote/RemoteSettings.scala#L42 - * and https://github.com/akka/akka/blob/master/akka-remote/src/main/scala/akka/remote/Remoting.scala#L95 - */ - Sender.Tell(Context.ActorOf(r.Props.WithDeploy(Deploy.Local), r.Name)); + Sender.Tell(Context.ActorOf(RARP.For(Context.System).ConfigureDispatcher(r.Props.WithDeploy(Deploy.Local)), r.Name)); }); } } diff --git a/src/core/Akka.Remote/Transport/AkkaProtocolTransport.cs b/src/core/Akka.Remote/Transport/AkkaProtocolTransport.cs index 38143de0cc4..30773567cf2 100644 --- a/src/core/Akka.Remote/Transport/AkkaProtocolTransport.cs +++ b/src/core/Akka.Remote/Transport/AkkaProtocolTransport.cs @@ -505,6 +505,11 @@ private void InitializeFSM() .With(h => fsmEvent.StateData.Match() .With(ou => { + /* + * Association has been established, but handshake is not yet complete. + * This actor, the outbound ProtocolStateActor, can now set itself as + * the read handler for the remainder of the handshake process. + */ AssociationHandle wrappedHandle = h; var statusPromise = ou.StatusCompletionSource; wrappedHandle.ReadHandlerSource.TrySetResult(new ActorHandleEventListener(Self)); @@ -512,14 +517,16 @@ private void InitializeFSM() { _failureDetector.HeartBeat(); InitTimers(); + // wait for reply from the inbound side of the connection (WaitHandshake) nextState = GoTo(AssociationState.WaitHandshake) .Using(new OutboundUnderlyingAssociated(statusPromise, wrappedHandle)); } else { + //Otherwise, retry SetTimer("associate-retry", wrappedHandle, - ((RemoteActorRefProvider) ((ActorSystemImpl) Context.System).Provider) + ((RemoteActorRefProvider) ((ActorSystemImpl) Context.System).Provider) //TODO: rewrite using RARP ActorSystem Extension .RemoteSettings.BackoffPeriod, repeat: false); nextState = Stay(); } @@ -554,6 +561,10 @@ private void InitializeFSM() @event.StateData.Match() .With(ola => { + /* + * This state is used for OutboundProtocolState actors when they receive + * a reply back from the inbound end of the association. + */ var wrappedHandle = ola.WrappedHandle; var statusCompletionSource = ola.StatusCompletionSource; pdu.Match() @@ -593,6 +604,11 @@ private void InitializeFSM() }) .With(iu => { + /* + * This state is used by inbound protocol state actors + * when they receive an association attempt from the + * outbound side of the association. + */ var associationHandler = iu.AssociationEventListener; var wrappedHandle = iu.WrappedHandle; pdu.Match() @@ -775,14 +791,23 @@ private void InitializeFSM() .With(iu => iu.WrappedHandle.Disassociate())); + /* + * Set the initial ProtocolStateActor state to CLOSED if OUTBOUND + * Set the initial ProtocolStateActor state to WAITHANDSHAKE if INBOUND + * */ _initialData.Match() .With(d => { + // attempt to open underlying transport to the remote address + // if using Helios, this is where the socket connection is opened. d.Transport.Associate(d.RemoteAddress).PipeTo(Self); StartWith(AssociationState.Closed, d); }) .With(d => { + // inbound transport is opened already inside the ProtocolStateManager + // therefore we just have to set ourselves as listener and wait for + // incoming handshake attempts from the client. d.WrappedHandle.ReadHandlerSource.SetResult(new ActorHandleEventListener(Self)); StartWith(AssociationState.WaitHandshake, d); }); @@ -941,6 +966,22 @@ private void PublishError(UnderlyingTransportError transportError) #region Static methods + + /// + /// used when creating OUTBOUND associations to remote endpoints. + /// + /// These create outbound instances, + /// which begin a state of + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// public static Props OutboundProps(HandshakeInfo handshakeInfo, Address remoteAddress, TaskCompletionSource statusCompletionSource, Transport transport, AkkaProtocolSettings settings, AkkaPduCodec codec, FailureDetector failureDetector, int? refuseUid = null) diff --git a/src/core/Akka.Remote/Transport/Helios/HeliosTcpTransport.cs b/src/core/Akka.Remote/Transport/Helios/HeliosTcpTransport.cs index b6055db4a77..9ed83444b9b 100644 --- a/src/core/Akka.Remote/Transport/Helios/HeliosTcpTransport.cs +++ b/src/core/Akka.Remote/Transport/Helios/HeliosTcpTransport.cs @@ -200,6 +200,14 @@ public override void Disassociate() } } + /// + /// TCP implementation of a . + /// + /// + /// Due to the connection-oriented nature of TCP connections, this transport doesn't have to do any + /// additional bookkeeping when transports are disposed or opened. + /// + /// class HeliosTcpTransport : HeliosTransport { public HeliosTcpTransport(ActorSystem system, Config config) From c62981bba004eb882101588071d10ad3b9d7c146 Mon Sep 17 00:00:00 2001 From: Rafael Teixeira Date: Wed, 29 Apr 2015 12:48:13 -0300 Subject: [PATCH 05/66] Update README.md --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index de3d8485547..f466dc67155 100644 --- a/README.md +++ b/README.md @@ -15,7 +15,7 @@ ### Documentation and resources -#### [Akka.NET Community SIte](http://getakka.net) +#### [Akka.NET Community Site](http://getakka.net) ### Install Akka.NET via NuGet @@ -38,6 +38,6 @@ PM> Install-Package Akka.FSharp ### Contribute If you are interested in helping porting Akka to .NET please take a look at [Contributing to Akka.NET](http://akkadotnet.github.io/wiki/Contributing to Akka.NET). -Also, please see [Building Akka .NET](https://github.com/akkadotnet/akka.net/wiki/Building-and-Distributing-Pigeon). +Also, please see [Building Akka.NET](https://github.com/akkadotnet/akka.net/wiki/Building-and-Distributing-Pigeon). ReSharper From cbc15d64d3d19604f67f4c9ad2f3c2a2cd0e1e25 Mon Sep 17 00:00:00 2001 From: Sean Gilliam Date: Wed, 29 Apr 2015 15:30:00 -0500 Subject: [PATCH 06/66] Updated xmldocs in Akka.Serialization - moved regular comments into xmldoc - added class cross-references - fleshed out auto-generated xmldoc - split JavaSerializer into its own file --- src/core/Akka/Akka.csproj | 1 + .../Akka/Serialization/ByteArraySerializer.cs | 38 ++--- src/core/Akka/Serialization/JavaSerializer.cs | 64 ++++++++ .../Serialization/NewtonSoftJsonSerializer.cs | 32 ++-- src/core/Akka/Serialization/NullSerializer.cs | 34 ++--- src/core/Akka/Serialization/Serializer.cs | 141 ++++-------------- 6 files changed, 131 insertions(+), 179 deletions(-) create mode 100644 src/core/Akka/Serialization/JavaSerializer.cs diff --git a/src/core/Akka/Akka.csproj b/src/core/Akka/Akka.csproj index e3a9dd6175e..e9f908d1a05 100644 --- a/src/core/Akka/Akka.csproj +++ b/src/core/Akka/Akka.csproj @@ -232,6 +232,7 @@ + diff --git a/src/core/Akka/Serialization/ByteArraySerializer.cs b/src/core/Akka/Serialization/ByteArraySerializer.cs index 5a3cde55a22..1680ecb353f 100644 --- a/src/core/Akka/Serialization/ByteArraySerializer.cs +++ b/src/core/Akka/Serialization/ByteArraySerializer.cs @@ -10,53 +10,43 @@ namespace Akka.Serialization { - /** - * This is a special Serializer that Serializes and deserializes byte arrays only, - * (just returns the byte array unchanged/uncopied) - */ - /// - /// Class ByteArraySerializer. + /// This is a special that serializes and deserializes byte arrays only + /// (just returns the byte array unchanged/uncopied). /// public class ByteArraySerializer : Serializer { /// /// Initializes a new instance of the class. /// - /// The system. + /// The actor system to associate with this serializer. public ByteArraySerializer(ExtendedActorSystem system) : base(system) { } /// - /// Gets the identifier. + /// Completely unique value to identify this implementation of the used to optimize network traffic /// - /// The identifier. - /// Completely unique value to identify this implementation of Serializer, used to optimize network traffic - /// Values from 0 to 16 is reserved for Akka internal usage public override int Identifier { get { return 4; } } /// - /// Gets a value indicating whether [include manifest]. - /// - /// true if [include manifest]; otherwise, false . /// Returns whether this serializer needs a manifest in the fromBinary method + /// public override bool IncludeManifest { get { return false; } } /// - /// To the binary. + /// Serializes the given object into a byte array /// - /// The object. - /// System.Byte[][]. - /// - /// Serializes the given object into an Array of Byte + /// The object to serialize + /// A byte array containing the serialized object + /// public override byte[] ToBinary(object obj) { if (obj == null) @@ -67,16 +57,14 @@ public override byte[] ToBinary(object obj) } /// - /// Froms the binary. + /// Deserializes a byte array into an object of type . /// - /// The bytes. - /// The type. - /// System.Object. - /// Produces an object from an array of bytes, with an optional type; + /// The array containing the serialized object + /// The type of object contained in the array + /// The object contained in the array public override object FromBinary(byte[] bytes, Type type) { return bytes; } } } - diff --git a/src/core/Akka/Serialization/JavaSerializer.cs b/src/core/Akka/Serialization/JavaSerializer.cs new file mode 100644 index 00000000000..bb8aaec5a46 --- /dev/null +++ b/src/core/Akka/Serialization/JavaSerializer.cs @@ -0,0 +1,64 @@ +//----------------------------------------------------------------------- +// +// Copyright (C) 2009-2015 Typesafe Inc. +// Copyright (C) 2013-2015 Akka.NET project +// +//----------------------------------------------------------------------- + +using System; +using Akka.Actor; + +namespace Akka.Serialization +{ + /// + /// This is a special that serializes and deserializes Java objects only. + /// + public class JavaSerializer : Serializer + { + /// + /// Initializes a new instance of the class. + /// + /// The actor system to associate with this serializer. + public JavaSerializer(ExtendedActorSystem system) + : base(system) + { + } + + /// + /// Completely unique value to identify this implementation of the used to optimize network traffic + /// + public override int Identifier + { + get { return 1; } + } + + /// + /// Returns whether this serializer needs a manifest in the fromBinary method + /// + public override bool IncludeManifest + { + get { throw new NotSupportedException(); } + } + + /// + /// Serializes the given object into a byte array + /// + /// The object to serialize + /// A byte array containing the serialized object + public override byte[] ToBinary(object obj) + { + throw new NotSupportedException(); + } + + /// + /// Deserializes a byte array into an object of type + /// + /// The array containing the serialized object + /// The type of object contained in the array + /// The object contained in the array + public override object FromBinary(byte[] bytes, Type type) + { + throw new NotSupportedException(); + } + } +} diff --git a/src/core/Akka/Serialization/NewtonSoftJsonSerializer.cs b/src/core/Akka/Serialization/NewtonSoftJsonSerializer.cs index 1bc7aef98d5..c318998d84a 100644 --- a/src/core/Akka/Serialization/NewtonSoftJsonSerializer.cs +++ b/src/core/Akka/Serialization/NewtonSoftJsonSerializer.cs @@ -20,7 +20,8 @@ namespace Akka.Serialization { /// - /// Class NewtonSoftJsonSerializer. + /// This is a special that serializes and deserializes javascript objects only. + /// These objects need to be in the JavaScript Object Notation (JSON) format. /// public class NewtonSoftJsonSerializer : Serializer { @@ -29,9 +30,9 @@ public class NewtonSoftJsonSerializer : Serializer public JsonSerializerSettings Settings { get { return _settings; } } /// - /// Initializes a new instance of the class. + /// Initializes a new instance of the class. /// - /// The system. + /// The actor system to associate with this serializer. public NewtonSoftJsonSerializer(ExtendedActorSystem system) : base(system) { @@ -70,32 +71,26 @@ protected override JsonProperty CreateProperty(MemberInfo member, MemberSerializ } /// - /// Gets the identifier. + /// Completely unique value to identify this implementation of the used to optimize network traffic /// - /// The identifier. - /// Completely unique value to identify this implementation of Serializer, used to optimize network traffic - /// Values from 0 to 16 is reserved for Akka internal usage public override int Identifier { get { return -3; } } /// - /// Gets a value indicating whether [include manifest]. - /// - /// true if [include manifest]; otherwise, false. /// Returns whether this serializer needs a manifest in the fromBinary method + /// public override bool IncludeManifest { get { return false; } } /// - /// To the binary. + /// Serializes the given object into a byte array /// - /// The object. - /// System.Byte[][]. - /// Serializes the given object into an Array of Byte + /// The object to serialize + /// A byte array containing the serialized object public override byte[] ToBinary(object obj) { Serialization.CurrentSystem = system; @@ -105,12 +100,11 @@ public override byte[] ToBinary(object obj) } /// - /// Froms the binary. + /// Deserializes a byte array into an object of type . /// - /// The bytes. - /// The type. - /// System.Object. - /// Produces an object from an array of bytes, with an optional type; + /// The array containing the serialized object + /// The type of object contained in the array + /// The object contained in the array public override object FromBinary(byte[] bytes, Type type) { Serialization.CurrentSystem = system; diff --git a/src/core/Akka/Serialization/NullSerializer.cs b/src/core/Akka/Serialization/NullSerializer.cs index e7c43abedf9..f943912dee7 100644 --- a/src/core/Akka/Serialization/NullSerializer.cs +++ b/src/core/Akka/Serialization/NullSerializer.cs @@ -10,12 +10,8 @@ namespace Akka.Serialization { - /** - * This is a special Serializer that Serializes and deserializes nulls only - */ - /// - /// Class NullSerializer. + /// This is a special that serializes and deserializes nulls only /// public class NullSerializer : Serializer { @@ -24,52 +20,44 @@ public class NullSerializer : Serializer /// /// Initializes a new instance of the class. /// - /// The system. + /// The actor system to associate with this serializer. public NullSerializer(ExtendedActorSystem system) : base(system) { } /// - /// Gets the Serializer identifier. - /// This is used for remote messaging envelopes. + /// Completely unique value to identify this implementation of the used to optimize network traffic /// - /// The identifier. - /// Completely unique value to identify this implementation of Serializer, used to optimize network traffic - /// Values from 0 to 16 is reserved for Akka internal usage public override int Identifier { get { return 0; } } /// - /// Gets a value indicating whether a manifest should be included in remote messages. - /// - /// true if [include manifest]; otherwise, false. /// Returns whether this serializer needs a manifest in the fromBinary method + /// public override bool IncludeManifest { get { return false; } } /// - /// Serializes an object to binary. + /// Serializes the given object into a byte array /// - /// The object. - /// System.Byte[][]. - /// Serializes the given object into an Array of Byte + /// The object to serialize + /// A byte array containing the serialized object public override byte[] ToBinary(object obj) { return nullBytes; } /// - /// Deserializes a binary representation to an object. + /// Deserializes a byte array into an object of type /// - /// The bytes. - /// The type. - /// System.Object. - /// Produces an object from an array of bytes, with an optional type; + /// The array containing the serialized object + /// The type of object contained in the array + /// The object contained in the array public override object FromBinary(byte[] bytes, Type type) { return null; diff --git a/src/core/Akka/Serialization/Serializer.cs b/src/core/Akka/Serialization/Serializer.cs index c8f43b45928..7f2660baf94 100644 --- a/src/core/Akka/Serialization/Serializer.cs +++ b/src/core/Akka/Serialization/Serializer.cs @@ -10,149 +10,66 @@ namespace Akka.Serialization { - /** - * A Serializer represents a bimap between an object and an array of bytes representing that object. - * - * Serializers are loaded using reflection during [[akka.actor.ActorSystem]] - * start-up, where two constructors are tried in order: - * - *
    - *
  • taking exactly one argument of type [[akka.actor.ExtendedActorSystem]]; - * this should be the preferred one because all reflective loading of classes - * during deserialization should use ExtendedActorSystem.dynamicAccess (see - * [[akka.actor.DynamicAccess]]), and
  • - *
  • without arguments, which is only an option if the serializer does not - * load classes using reflection.
  • - *
- * - * Be sure to always use the PropertyManager for loading classes! This is necessary to - * avoid strange match errors and inequalities which arise from different class loaders loading - * the same class. - */ - /// - /// Class Serializer. + /// A Serializer represents a bimap between an object and an array of bytes representing that object. + /// + /// Serializers are loaded using reflection during + /// start-up, where two constructors are tried in order: + /// + ///
    + ///
  • taking exactly one argument of type ; + /// this should be the preferred one because all reflective loading of classes + /// during deserialization should use ExtendedActorSystem.dynamicAccess (see + /// [[akka.actor.DynamicAccess]]), and
  • + ///
  • without arguments, which is only an option if the serializer does not + /// load classes using reflection.
  • + ///
+ /// + /// Be sure to always use the PropertyManager for loading classes! This is necessary to + /// avoid strange match errors and inequalities which arise from different class loaders loading + /// the same class. ///
public abstract class Serializer { /// - /// The system + /// The actor system to associate with this serializer. /// protected readonly ExtendedActorSystem system; /// /// Initializes a new instance of the class. /// - /// The system. + /// The actor system to associate with this serializer. public Serializer(ExtendedActorSystem system) { this.system = system; } - /** - * Completely unique value to identify this implementation of Serializer, used to optimize network traffic - * Values from 0 to 16 is reserved for Akka internal usage - */ - /// - /// Gets the identifier. + /// Completely unique value to identify this implementation of Serializer, used to optimize network traffic + /// Values from 0 to 16 is reserved for Akka internal usage /// - /// The identifier. public abstract int Identifier { get; } - /** - * Returns whether this serializer needs a manifest in the fromBinary method - */ - /// - /// Gets a value indicating whether [include manifest]. + /// Returns whether this serializer needs a manifest in the fromBinary method /// - /// true if [include manifest]; otherwise, false. public abstract bool IncludeManifest { get; } - /** - * Serializes the given object into an Array of Byte - */ - /// - /// To the binary. + /// Serializes the given object into a byte array /// - /// The object. - /// System.Byte[][]. + /// The object to serialize + /// A byte array containing the serialized object public abstract byte[] ToBinary(object obj); - /** - * Produces an object from an array of bytes, with an optional type; - */ - /// - /// Froms the binary. + /// Deserializes a byte array into an object of type . /// - /// The bytes. - /// The type. - /// System.Object. + /// The array containing the serialized object + /// The type of object contained in the array + /// The object contained in the array public abstract object FromBinary(byte[] bytes, Type type); } - - /// - /// Class JavaSerializer. - /// - public class JavaSerializer : Serializer - { - /// - /// Initializes a new instance of the class. - /// - /// The system. - public JavaSerializer(ExtendedActorSystem system) : base(system) - { - } - - /// - /// Gets the identifier. - /// - /// The identifier. - /// Completely unique value to identify this implementation of Serializer, used to optimize network traffic - /// Values from 0 to 16 is reserved for Akka internal usage - public override int Identifier - { - get { return 1; } - } - - /// - /// Gets a value indicating whether [include manifest]. - /// - /// true if [include manifest]; otherwise, false. - /// - /// Returns whether this serializer needs a manifest in the fromBinary method - public override bool IncludeManifest - { - get { throw new NotSupportedException(); } - } - - /// - /// To the binary. - /// - /// The object. - /// System.Byte[][]. - /// - /// Serializes the given object into an Array of Byte - public override byte[] ToBinary(object obj) - { - throw new NotSupportedException(); - } - - /// - /// Froms the binary. - /// - /// The bytes. - /// The type. - /// System.Object. - /// - /// Produces an object from an array of bytes, with an optional type; - public override object FromBinary(byte[] bytes, Type type) - { - throw new NotSupportedException(); - } - } } From 9639f5d91778822cc72e57f5a22229bec203bb67 Mon Sep 17 00:00:00 2001 From: Aaron Stannard Date: Thu, 30 Apr 2015 12:34:17 -0700 Subject: [PATCH 07/66] more comments and docs --- src/core/Akka.Remote/EndpointManager.cs | 37 ++++++++++-- src/core/Akka.Remote/Remoting.cs | 9 ++- .../Transport/AkkaProtocolTransport.cs | 14 +++++ .../Transport/Helios/HeliosHelpers.cs | 1 + .../Transport/Helios/HeliosTcpTransport.cs | 35 +++++++++-- .../Transport/Helios/HeliosTransport.cs | 60 ------------------- src/examples/RemoteDeploy/System1/Program.cs | 3 - src/examples/RemoteDeploy/System2/Program.cs | 3 - 8 files changed, 84 insertions(+), 78 deletions(-) diff --git a/src/core/Akka.Remote/EndpointManager.cs b/src/core/Akka.Remote/EndpointManager.cs index 18c0221ff66..ead8d36a799 100644 --- a/src/core/Akka.Remote/EndpointManager.cs +++ b/src/core/Akka.Remote/EndpointManager.cs @@ -39,6 +39,9 @@ protected EndpointPolicy(bool isTombstone) } } + /// + /// We will always accept a + /// public class Pass : EndpointPolicy { public Pass(IActorRef endpoint, int? uid) @@ -53,6 +56,10 @@ public Pass(IActorRef endpoint, int? uid) public int? Uid { get; private set; } } + /// + /// A Gated node can't be connected to from this process for , + /// but we may accept an inbound connection from it if the remote node recovers on its own. + /// public class Gated : EndpointPolicy { public Gated(Deadline deadline) @@ -64,6 +71,9 @@ public Gated(Deadline deadline) public Deadline TimeOfRelease { get; private set; } } + /// + /// We do not accept connection attempts for a quarantined node until it restarts and resets its UID. + /// public class Quarantined : EndpointPolicy { public Quarantined(long uid, Deadline deadline) @@ -388,6 +398,13 @@ protected override void PostStop() protected override void OnReceive(object message) { message.Match() + /* + * the first command the EndpointManager receives. + * instructs the EndpointManager to fire off its "Listens" command, which starts + * up all inbound transports and binds them to specific addresses via configuration. + * those results will then be piped back to Remoting, who waits for the results of + * listen.AddressPromise. + * */ .With(listen => Listens.ContinueWith(listens => { if (listens.IsFaulted) @@ -427,8 +444,10 @@ into g listens.AddressesPromise.SetResult(transportsAndAddresses); }) .With(failure => failure.AddressesPromise.SetException(failure.Cause)) + // defer the inbound association until we can enter "Accepting" behavior .With(ia => Context.System.Scheduler.ScheduleTellOnce(TimeSpan.FromMilliseconds(10), Self, ia, Self)) .With(mc => Sender.Tell(new ManagementCommandAck(status:false))) + // transports are all started. Ready to start accepting inbound associations. .With(sf => Context.Become(Accepting)) .With(sf => { @@ -437,11 +456,21 @@ into g }); } + /// + /// Message-processing behavior when the is able to accept + /// inbound association requests. + /// + /// Messages from local system and the network. protected void Accepting(object message) { message.Match() .With(mc => { + /* + * applies a management command to all available transports. + * + * Useful for things like global restart + */ var sender = Sender; var allStatuses = _transportMapping.Values.Select(x => x.ManagementCommand(mc.Cmd)); Task.WhenAll(allStatuses) @@ -775,20 +804,20 @@ private IActorRef CreateEndpoint(Address remoteAddress, Address localAddress, Ak if (writing) { endpointActor = - Context.ActorOf( + Context.ActorOf(RARP.For(Context.System).ConfigureDispatcher( ReliableDeliverySupervisor.ReliableDeliverySupervisorProps(handleOption, localAddress, remoteAddress, refuseUid, transport, endpointSettings, new AkkaPduProtobuffCodec(), - _receiveBuffers).WithDeploy(Deploy.Local), + _receiveBuffers).WithDeploy(Deploy.Local)), string.Format("reliableEndpointWriter-{0}-{1}", AddressUrlEncoder.Encode(remoteAddress), endpointId.Next())); } else { endpointActor = - Context.ActorOf( + Context.ActorOf(RARP.For(Context.System).ConfigureDispatcher( EndpointWriter.EndpointWriterProps(handleOption, localAddress, remoteAddress, refuseUid, transport, endpointSettings, new AkkaPduProtobuffCodec(), _receiveBuffers, - reliableDeliverySupervisor: null).WithDeploy(Deploy.Local), + reliableDeliverySupervisor: null).WithDeploy(Deploy.Local)), string.Format("endpointWriter-{0}-{1}", AddressUrlEncoder.Encode(remoteAddress), endpointId.Next())); } diff --git a/src/core/Akka.Remote/Remoting.cs b/src/core/Akka.Remote/Remoting.cs index 336183cae8e..a9a70b8efed 100644 --- a/src/core/Akka.Remote/Remoting.cs +++ b/src/core/Akka.Remote/Remoting.cs @@ -135,14 +135,17 @@ public override void Start() if (_endpointManager == null) { _endpointManager = - System.SystemActorOf( - Props.Create(() => new EndpointManager(System.Settings.Config, log)).WithDeploy(Deploy.Local), + System.SystemActorOf(RARP.For(System).ConfigureDispatcher( + Props.Create(() => new EndpointManager(System.Settings.Config, log)).WithDeploy(Deploy.Local)), EndpointManagerName); try { var addressPromise = new TaskCompletionSource>(); - _endpointManager.Tell(new EndpointManager.Listen(addressPromise)); + + // tells the EndpointManager to start all transports and bind them to listenable addresses, and then set the results + // of this promise to include them. + _endpointManager.Tell(new EndpointManager.Listen(addressPromise)); addressPromise.Task.Wait(Provider.RemoteSettings.StartupTimeout); var akkaProtocolTransports = addressPromise.Task.Result; diff --git a/src/core/Akka.Remote/Transport/AkkaProtocolTransport.cs b/src/core/Akka.Remote/Transport/AkkaProtocolTransport.cs index 30773567cf2..e457896d2be 100644 --- a/src/core/Akka.Remote/Transport/AkkaProtocolTransport.cs +++ b/src/core/Akka.Remote/Transport/AkkaProtocolTransport.cs @@ -17,6 +17,12 @@ namespace Akka.Remote.Transport { + /// + /// Pairs an with its binding. + /// + /// This is the information that's used to allow external messages to address + /// this system over the network. + /// internal class ProtocolTransportAddressPair { public ProtocolTransportAddressPair(AkkaProtocolTransport protocolTransport, Address address) @@ -30,8 +36,16 @@ public ProtocolTransportAddressPair(AkkaProtocolTransport protocolTransport, Add public Address Address { get; private set; } } + /// + /// An that can occur during the course of an Akka Protocol handshake. + /// public class AkkaProtocolException : AkkaException { + /// + /// Constructor. + /// + /// The error message. + /// The internal exception (null by default.) public AkkaProtocolException(string message, Exception cause = null) : base(message, cause) { } protected AkkaProtocolException(SerializationInfo info, StreamingContext context) diff --git a/src/core/Akka.Remote/Transport/Helios/HeliosHelpers.cs b/src/core/Akka.Remote/Transport/Helios/HeliosHelpers.cs index 2cb1c910a5a..2e7e12ce3d9 100644 --- a/src/core/Akka.Remote/Transport/Helios/HeliosHelpers.cs +++ b/src/core/Akka.Remote/Transport/Helios/HeliosHelpers.cs @@ -260,6 +260,7 @@ protected void Init(IConnection channel, INode remoteSocketAddress, Address remo public override void Dispose() { WrappedTransport.ConnectionGroup.TryRemove(this); + base.Dispose(); } } } diff --git a/src/core/Akka.Remote/Transport/Helios/HeliosTcpTransport.cs b/src/core/Akka.Remote/Transport/Helios/HeliosTcpTransport.cs index 9ed83444b9b..47fb15193b3 100644 --- a/src/core/Akka.Remote/Transport/Helios/HeliosTcpTransport.cs +++ b/src/core/Akka.Remote/Transport/Helios/HeliosTcpTransport.cs @@ -67,6 +67,15 @@ protected override AssociationHandle CreateHandle(IConnection channel, Address l return new TcpAssociationHandle(localAddress, remoteAddress, WrappedTransport, channel); } + /// + /// Fires whenever a Helios gets closed. + /// + /// Two possible causes for this event handler to fire: + /// * The other end of the connection has closed. We don't make any distinctions between graceful / unplanned shutdown. + /// * This end of the connection experienced an error. + /// + /// An exception describing why the socket was closed. + /// The handle to the socket channel that closed. protected override void OnDisconnect(HeliosConnectionException cause, IConnection closedChannel) { if(cause != null) @@ -81,6 +90,14 @@ protected override void OnDisconnect(HeliosConnectionException cause, IConnectio ChannelLocalActor.Remove(closedChannel); } + /// + /// Fires whenever a Helios received data from the network. + /// + /// The message playload. + /// + /// The channel responsible for sending the message. + /// Can be used to send replies back. + /// protected override void OnMessage(NetworkData data, IConnection responseChannel) { if (data.Length > 0) @@ -89,6 +106,13 @@ protected override void OnMessage(NetworkData data, IConnection responseChannel) } } + /// + /// Fires whenever a Helios experienced a non-fatal error. + /// + /// The connection should still be open even after this event fires. + /// + /// The execption that triggered this event. + /// The handle to the Helios channel that errored. protected override void OnException(Exception ex, IConnection erroredChannel) { ChannelLocalActor.Notify(erroredChannel, new UnderlyingTransportError(ex, "Non-fatal network error occurred inside underlying transport")); @@ -102,6 +126,9 @@ public override void Dispose() } } + /// + /// TCP handlers for inbound connections + /// class TcpServerHandler : TcpHandlers { private Task _associationListenerTask; @@ -135,6 +162,9 @@ protected override void OnConnect(INode remoteAddress, IConnection responseChann } } + /// + /// TCP handlers for outbound connections + /// class TcpClientHandler : TcpHandlers { protected readonly TaskCompletionSource StatusPromise = new TaskCompletionSource(); @@ -161,11 +191,6 @@ protected override void OnConnect(INode remoteAddress, IConnection responseChann { InitOutbound(responseChannel, remoteAddress, NetworkData.Create(Node.Empty(), new byte[0], 0)); } - - protected override void OnDisconnect(HeliosConnectionException cause, IConnection closedChannel) - { - base.OnDisconnect(cause, closedChannel); - } } /// diff --git a/src/core/Akka.Remote/Transport/Helios/HeliosTransport.cs b/src/core/Akka.Remote/Transport/Helios/HeliosTransport.cs index 0b7dbf6f775..089efed3d72 100644 --- a/src/core/Akka.Remote/Transport/Helios/HeliosTransport.cs +++ b/src/core/Akka.Remote/Transport/Helios/HeliosTransport.cs @@ -340,65 +340,5 @@ public static Address NodeToAddress(INode node, string schemeIdentifier, string #endregion } - - - /// - /// INTERNAL API - /// - /// Used for accepting inbound connections - /// - abstract class ServerHandler : CommonHandlers - { - private Task _associationListenerTask; - - protected ServerHandler(HeliosTransport wrappedTransport, Task associationListenerTask, IConnection underlyingConnection) : base(underlyingConnection) - { - WrappedTransport = wrappedTransport; - _associationListenerTask = associationListenerTask; - } - - protected void InitInbound(IConnection connection, INode remoteSocketAddress, NetworkData msg) - { - connection.StopReceive(); - _associationListenerTask.ContinueWith(r => - { - var listener = r.Result; - var remoteAddress = HeliosTransport.NodeToAddress(remoteSocketAddress, WrappedTransport.SchemeIdentifier, - WrappedTransport.System.Name); - - if(remoteAddress == null) throw new HeliosNodeException("Unknown inbound remote address type {0}", remoteSocketAddress); - AssociationHandle handle; - Init(connection, remoteSocketAddress, remoteAddress, msg, out handle); - listener.Notify(new InboundAssociation(handle)); - - }, TaskContinuationOptions.AttachedToParent & TaskContinuationOptions.ExecuteSynchronously & TaskContinuationOptions.NotOnCanceled & TaskContinuationOptions.NotOnFaulted); - } - } - - /// - /// INTERNAL API - /// - /// Used for creating outbound connections - /// - abstract class ClientHandler : CommonHandlers - { - protected readonly TaskCompletionSource StatusPromise = new TaskCompletionSource(); - public Task StatusFuture { get { return StatusPromise.Task; } } - - protected Address RemoteAddress; - - protected ClientHandler(HeliosTransport heliosWrappedTransport, Address remoteAddress, IConnection underlyingConnection) : base(underlyingConnection) - { - WrappedTransport = heliosWrappedTransport; - RemoteAddress = remoteAddress; - } - - protected void InitOutbound(IConnection channel, INode remoteSocketAddress, NetworkData msg) - { - AssociationHandle handle; - Init(channel, remoteSocketAddress, RemoteAddress, msg, out handle); - StatusPromise.SetResult(handle); - } - } } diff --git a/src/examples/RemoteDeploy/System1/Program.cs b/src/examples/RemoteDeploy/System1/Program.cs index 82d2b4117b7..0a27f00ad3c 100644 --- a/src/examples/RemoteDeploy/System1/Program.cs +++ b/src/examples/RemoteDeploy/System1/Program.cs @@ -55,9 +55,6 @@ private static void Main(string[] args) } remote { helios.tcp { - transport-class = ""Akka.Remote.Transport.Helios.HeliosTcpTransport, Akka.Remote"" - applied-adapters = [] - transport-protocol = tcp port = 8090 hostname = localhost } diff --git a/src/examples/RemoteDeploy/System2/Program.cs b/src/examples/RemoteDeploy/System2/Program.cs index c9502e0594a..fdc0b088e26 100644 --- a/src/examples/RemoteDeploy/System2/Program.cs +++ b/src/examples/RemoteDeploy/System2/Program.cs @@ -33,9 +33,6 @@ private static void Main(string[] args) } remote { helios.tcp { - transport-class = ""Akka.Remote.Transport.Helios.HeliosTcpTransport, Akka.Remote"" - applied-adapters = [] - transport-protocol = tcp port = 8080 hostname = localhost } From 147a9a0b2fa25b480bc2f4af0b14dd3fd4653e43 Mon Sep 17 00:00:00 2001 From: Aaron Stannard Date: Thu, 30 Apr 2015 13:57:02 -0700 Subject: [PATCH 08/66] added clearer logging and validation to Remote / Cluster Added a null argument check for supervisor strategy inside ClusterPoolRouters - saw this come up inside the WebCrawler demo. Made it so heartbeat timeouts get logged to the error log. --- src/core/Akka.Cluster/Routing/ClusterRoutingConfig.cs | 1 + src/core/Akka.Remote/EndpointManager.cs | 2 +- src/core/Akka.Remote/Transport/AkkaProtocolTransport.cs | 2 +- 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/src/core/Akka.Cluster/Routing/ClusterRoutingConfig.cs b/src/core/Akka.Cluster/Routing/ClusterRoutingConfig.cs index 72a8cf7408a..421f72153eb 100644 --- a/src/core/Akka.Cluster/Routing/ClusterRoutingConfig.cs +++ b/src/core/Akka.Cluster/Routing/ClusterRoutingConfig.cs @@ -553,6 +553,7 @@ public ClusterRouterPoolActor(SupervisorStrategy supervisorStrategy, ClusterRout { Settings = settings; _supervisorStrategy = supervisorStrategy; + if (_supervisorStrategy == null) throw new ArgumentNullException("supervisorStrategy"); _pool = (Pool)Cell.RouterConfig; } diff --git a/src/core/Akka.Remote/EndpointManager.cs b/src/core/Akka.Remote/EndpointManager.cs index ead8d36a799..993bd18f030 100644 --- a/src/core/Akka.Remote/EndpointManager.cs +++ b/src/core/Akka.Remote/EndpointManager.cs @@ -347,7 +347,7 @@ protected override SupervisorStrategy SupervisorStrategy() .With(shutdown => { log.Debug("Remote system with address [{0}] has shut down. " + - "Address is not gated for {1}ms, all messages to this address will be delivered to dead letters.", + "Address is now gated for {1}ms, all messages to this address will be delivered to dead letters.", shutdown.RemoteAddress, settings.RetryGateClosedFor.TotalMilliseconds); endpoints.MarkAsFailed(Sender, Deadline.Now + settings.RetryGateClosedFor); AddressTerminatedTopic.Get(Context.System).Publish(new AddressTerminated(shutdown.RemoteAddress)); diff --git a/src/core/Akka.Remote/Transport/AkkaProtocolTransport.cs b/src/core/Akka.Remote/Transport/AkkaProtocolTransport.cs index e457896d2be..1ea6854e00a 100644 --- a/src/core/Akka.Remote/Transport/AkkaProtocolTransport.cs +++ b/src/core/Akka.Remote/Transport/AkkaProtocolTransport.cs @@ -836,7 +836,7 @@ protected override void LogTermination(Reason reason) failure.Cause.Match() .With(() => { }) //no logging .With(() => { }) //no logging - .With(timeoutReason => _log.Info(timeoutReason.ErrorMessage)); + .With(timeoutReason => _log.Error(timeoutReason.ErrorMessage)); } else base.LogTermination(reason); From 04de7845be10816aa90b57731c303f5cc3f9e8b8 Mon Sep 17 00:00:00 2001 From: Aaron Stannard Date: Thu, 30 Apr 2015 15:52:07 -0700 Subject: [PATCH 09/66] All Pool routers from configuration now use DefaultSupervisoryStrategy Fixed a bug that caused clustered pool routers to have a null supervisor strategy by default when loaded from configuration. --- .../Routing/ClusterRouterSupervisorSpec.cs | 41 +++++++++++++++++++ src/core/Akka/Routing/RouterConfig.cs | 1 + 2 files changed, 42 insertions(+) diff --git a/src/core/Akka.Cluster.Tests/Routing/ClusterRouterSupervisorSpec.cs b/src/core/Akka.Cluster.Tests/Routing/ClusterRouterSupervisorSpec.cs index 72162a8db76..388176d2f0b 100644 --- a/src/core/Akka.Cluster.Tests/Routing/ClusterRouterSupervisorSpec.cs +++ b/src/core/Akka.Cluster.Tests/Routing/ClusterRouterSupervisorSpec.cs @@ -21,7 +21,19 @@ public ClusterRouterSupervisorSpec() akka{ actor{ provider = ""Akka.Cluster.ClusterActorRefProvider, Akka.Cluster"" + deployment { + /router1 { + router = round-robin-pool + nr-of-instances = 1 + cluster { + enabled = on + max-nr-of-instances-per-node = 1 + allow-local-routees = true + } + } + } } + remote.helios.tcp.port = 0 }") { } @@ -41,6 +53,26 @@ public KillableActor(IActorRef testActor) } } + class RestartableActor : ReceiveActor + { + private readonly IActorRef TestActor; + + protected override void PostRestart(Exception reason) + { + base.PostRestart(reason); + TestActor.Tell("restarted"); + } + + public RestartableActor(IActorRef testActor) + { + TestActor = testActor; + Receive(s => s == "go away", s => + { + throw new ArgumentException("Goodbye then!"); + }); + } + } + #endregion #region Tests @@ -59,6 +91,15 @@ public void Cluster_aware_routers_must_use_provided_supervisor_strategy() ExpectMsg("supervised", TimeSpan.FromSeconds(2)); } + [Fact] + public void Cluster_aware_routers_must_use_default_supervisor_strategy_when_none_specified() + { + var router = Sys.ActorOf(Props.Create(() => new RestartableActor(TestActor)).WithRouter(FromConfig.Instance), "router1"); + + router.Tell("go away"); + ExpectMsg("restarted", TimeSpan.FromSeconds(2)); + } + #endregion } } diff --git a/src/core/Akka/Routing/RouterConfig.cs b/src/core/Akka/Routing/RouterConfig.cs index fbb372a20ae..737b02dc01b 100644 --- a/src/core/Akka/Routing/RouterConfig.cs +++ b/src/core/Akka/Routing/RouterConfig.cs @@ -267,6 +267,7 @@ protected Pool(Config config) : base(Dispatchers.DefaultDispatcherId) _nrOfInstances = config.GetInt("nr-of-instances"); _resizer = DefaultResizer.FromConfig(config); _usePoolDispatcher = config.HasPath("pool-dispatcher"); + _supervisorStrategy = DefaultStrategy; // ReSharper restore DoNotCallOverridableMethodsInConstructor } From 2c95308a4e51b76b66d7a1c6620bb56d1d04f1bd Mon Sep 17 00:00:00 2001 From: Aaron Stannard Date: Thu, 30 Apr 2015 15:28:21 -0700 Subject: [PATCH 10/66] changed the HeliosTcpHandler so it reports disassociations for reasons unknown Causes the EndpointManager to apply a less severe policy to the failed node, which should allow for reconnects fixed termination issue inside AkkaProtocolTransport improperly nested `PatternMatch` was responsible for throwing a null exception when trying to set an exception --- .../Transport/AkkaProtocolTransport.cs | 6 +++--- .../Transport/Helios/HeliosTcpTransport.cs | 17 ++++++----------- 2 files changed, 9 insertions(+), 14 deletions(-) diff --git a/src/core/Akka.Remote/Transport/AkkaProtocolTransport.cs b/src/core/Akka.Remote/Transport/AkkaProtocolTransport.cs index 1ea6854e00a..809c308e4df 100644 --- a/src/core/Akka.Remote/Transport/AkkaProtocolTransport.cs +++ b/src/core/Akka.Remote/Transport/AkkaProtocolTransport.cs @@ -762,12 +762,12 @@ private void InitializeFSM() associationFailure = new AkkaProtocolException( "The remote system has a UID that has been quarantined. Association aborted.")) - .With(info => associationFailure = DisassociateException(info)) - .Default( + .With(info => associationFailure = DisassociateException(info))) + .Default( msg => associationFailure = new AkkaProtocolException( - "Transport disassociated before handshake finished"))); + "Transport disassociated before handshake finished")); oua.StatusCompletionSource.TrySetException(associationFailure); oua.WrappedHandle.Disassociate(); diff --git a/src/core/Akka.Remote/Transport/Helios/HeliosTcpTransport.cs b/src/core/Akka.Remote/Transport/Helios/HeliosTcpTransport.cs index 47fb15193b3..60c8b2d5d37 100644 --- a/src/core/Akka.Remote/Transport/Helios/HeliosTcpTransport.cs +++ b/src/core/Akka.Remote/Transport/Helios/HeliosTcpTransport.cs @@ -78,15 +78,10 @@ protected override AssociationHandle CreateHandle(IConnection channel, Address l /// The handle to the socket channel that closed. protected override void OnDisconnect(HeliosConnectionException cause, IConnection closedChannel) { - if(cause != null) + if (cause != null) ChannelLocalActor.Notify(closedChannel, new UnderlyingTransportError(cause, "Underlying transport closed.")); - if (cause != null && cause.Type == ExceptionType.Closed) - ChannelLocalActor.Notify(closedChannel, new Disassociated(DisassociateInfo.Shutdown)); - else - { - ChannelLocalActor.Notify(closedChannel, new Disassociated(DisassociateInfo.Unknown)); - } - + + ChannelLocalActor.Notify(closedChannel, new Disassociated(DisassociateInfo.Unknown)); ChannelLocalActor.Remove(closedChannel); } @@ -120,7 +115,7 @@ protected override void OnException(Exception ex, IConnection erroredChannel) public override void Dispose() { - + ChannelLocalActor.Remove(UnderlyingConnection); base.Dispose(); } @@ -220,7 +215,7 @@ public override bool Write(ByteString payload) public override void Disassociate() { - if(!_channel.WasDisposed) + if (!_channel.WasDisposed) _channel.Close(); } } @@ -247,7 +242,7 @@ protected override Task AssociateInternal(Address remoteAddre var socketAddress = client.RemoteHost; client.Open(); - return ((TcpClientHandler) client).StatusFuture; + return ((TcpClientHandler)client).StatusFuture; } } } From ce2b5fd0c7511f0d6477036b76e7ca7404bcec18 Mon Sep 17 00:00:00 2001 From: Nikita Tsukanov Date: Wed, 29 Apr 2015 00:04:44 +0300 Subject: [PATCH 11/66] Update .editorconfig --- .editorconfig | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.editorconfig b/.editorconfig index 92314f0e6c2..64fe33bbaea 100644 --- a/.editorconfig +++ b/.editorconfig @@ -7,4 +7,5 @@ root = true end_of_line = CRLF [*.cs] -indent_style = tab \ No newline at end of file +indent_style = space +indent_size = 4 From 72d42d052e61abfb6ebed022b1faba646a39db71 Mon Sep 17 00:00:00 2001 From: andrewchaa Date: Fri, 1 May 2015 13:49:38 +0100 Subject: [PATCH 12/66] specify nuget config file to avoid accessing private nuget repo --- build.cmd | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/build.cmd b/build.cmd index 76b0ef95e84..2293572349c 100644 --- a/build.cmd +++ b/build.cmd @@ -4,13 +4,13 @@ pushd %~dp0 src\.nuget\NuGet.exe update -self -src\.nuget\NuGet.exe install FAKE -OutputDirectory src\packages -ExcludeVersion -Version 3.4.1 +src\.nuget\NuGet.exe install FAKE -ConfigFile src\.nuget\Nuget.Config -OutputDirectory src\packages -ExcludeVersion -Version 3.4.1 -src\.nuget\NuGet.exe install xunit.runners -OutputDirectory src\packages\FAKE -ExcludeVersion -Version 1.9.2 -src\.nuget\NuGet.exe install nunit.runners -OutputDirectory src\packages\FAKE -ExcludeVersion -Version 2.6.4 +src\.nuget\NuGet.exe install xunit.runners -ConfigFile src\.nuget\Nuget.Config -OutputDirectory src\packages\FAKE -ExcludeVersion -Version 1.9.2 +src\.nuget\NuGet.exe install nunit.runners -ConfigFile src\.nuget\Nuget.Config -OutputDirectory src\packages\FAKE -ExcludeVersion -Version 2.6.4 if not exist src\packages\SourceLink.Fake\tools\SourceLink.fsx ( - src\.nuget\nuget.exe install SourceLink.Fake -OutputDirectory src\packages -ExcludeVersion + src\.nuget\nuget.exe install SourceLink.Fake -ConfigFile src\.nuget\Nuget.Config -OutputDirectory src\packages -ExcludeVersion ) rem cls From 1c949d5ee075cd0089179afa286dbf428919139b Mon Sep 17 00:00:00 2001 From: Aaron Stannard Date: Fri, 1 May 2015 09:13:56 -0700 Subject: [PATCH 13/66] Pre-release version notes so new NuGet packages will show up correctly in nightly builds --- RELEASE_NOTES.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/RELEASE_NOTES.md b/RELEASE_NOTES.md index 7146021d351..5e09caa130d 100644 --- a/RELEASE_NOTES.md +++ b/RELEASE_NOTES.md @@ -1,3 +1,5 @@ +#### 1.0.2 May 1 2015 + #### 1.0.1 Apr 28 2015 **Bugfix release for Akka.NET v1.0.** From 3ab8ff9f6b068dbbec076179f14fed18fa4f8581 Mon Sep 17 00:00:00 2001 From: Bartosz Sypytkowski Date: Tue, 5 May 2015 20:41:09 +0300 Subject: [PATCH 14/66] dynamic resolution of snapshot stores and journals --- src/core/Akka.Persistence/Persistence.cs | 81 +++++++++++++++++++----- 1 file changed, 64 insertions(+), 17 deletions(-) diff --git a/src/core/Akka.Persistence/Persistence.cs b/src/core/Akka.Persistence/Persistence.cs index 28a3e92d1d4..f6416d0c08f 100644 --- a/src/core/Akka.Persistence/Persistence.cs +++ b/src/core/Akka.Persistence/Persistence.cs @@ -6,6 +6,7 @@ //----------------------------------------------------------------------- using System; +using System.Collections.Concurrent; using Akka.Actor; using Akka.Configuration; using Akka.Dispatch; @@ -19,51 +20,97 @@ public class PersistenceExtension : IExtension private readonly Config _config; private readonly ExtendedActorSystem _system; - private readonly IActorRef _journal; - private readonly IActorRef _snapshotStore; - public PersistenceSettings Settings { get; private set; } + // both defaults are lazy, so that they don't need to be configured if they're not used + private readonly Lazy _defaultJournalPluginId; + private readonly Lazy _defaultSnapshotPluginId; + + private readonly ConcurrentDictionary> _journalPluginExtensionIds = new ConcurrentDictionary>(); + private readonly ConcurrentDictionary> _snapshotPluginExtensionIds = new ConcurrentDictionary>(); public PersistenceExtension(ExtendedActorSystem system) { _system = system; _system.Settings.InjectTopLevelFallback(Persistence.DefaultConfig()); _config = system.Settings.Config.GetConfig("akka.persistence"); - _journal = CreatePlugin("journal", type => typeof (AsyncWriteJournal).IsAssignableFrom(type) - ? Dispatchers.DefaultDispatcherId - : DefaultPluginDispatcherId); - _snapshotStore = CreatePlugin("snapshot-store", _ => DefaultPluginDispatcherId); + + _defaultJournalPluginId = new Lazy(() => + { + var configPath = _config.GetString("journal.plugin"); + if (string.IsNullOrEmpty(configPath)) throw new NullReferenceException("Default journal plugin is not configured"); + return configPath; + }); + + _defaultSnapshotPluginId = new Lazy(() => + { + var configPath = _config.GetString("snapshot-store.plugin"); + if (string.IsNullOrEmpty(configPath)) throw new NullReferenceException("Default snapshot-store plugin is not configured"); + return configPath; + }); Settings = new PersistenceSettings(_system, _config); } + public PersistenceSettings Settings { get; private set; } + public string PersistenceId(IActorRef actor) { return actor.Path.ToStringWithoutAddress(); } - public IActorRef SnapshotStoreFor(string persistenceId) + /// + /// Returns a snapshot store plugin actor identified by . + /// When empty looks for default path under "akka.persistence.snapshot-store.plugin". + /// + public IActorRef SnapshotStoreFor(string snapshotPluginId) { - // currently always returns _snapshotStore, but in future it may return dedicated actor for each persistence id - return _snapshotStore; + var configPath = string.IsNullOrEmpty(snapshotPluginId) ? _defaultSnapshotPluginId.Value : snapshotPluginId; + Lazy pluginContainer; + if (!_snapshotPluginExtensionIds.TryGetValue(configPath, out pluginContainer)) + { + pluginContainer = _snapshotPluginExtensionIds.AddOrUpdate(configPath, new Lazy(() => CreatePlugin(configPath, _ => DefaultPluginDispatcherId)), (key, old) => old); + } + + return pluginContainer.Value; } - public IActorRef JournalFor(string persistenceId) + /// + /// Returns a journal plugin actor identified by . + /// When empty looks for default path under "akka.persistence.journal.plugin". + /// + public IActorRef JournalFor(string journalPluginId) { - // currently always returns _journal, but in future it may return dedicated actor for each persistence id - return _journal; + var configPath = string.IsNullOrEmpty(journalPluginId) ? _defaultJournalPluginId.Value : journalPluginId; + Lazy pluginContainer; + if (!_journalPluginExtensionIds.TryGetValue(configPath, out pluginContainer)) + { + pluginContainer = _journalPluginExtensionIds.AddOrUpdate(configPath, new Lazy(() => CreatePlugin(configPath, type => + typeof(AsyncWriteJournal).IsAssignableFrom(type) ? Dispatchers.DefaultDispatcherId : DefaultPluginDispatcherId)), (key, old) => old); + } + + return pluginContainer.Value; } - private IActorRef CreatePlugin(string type, Func dispatcherSelector) + private IActorRef CreatePlugin(string configPath, Func dispatcherSelector) { - var pluginConfigPath = _config.GetString(type + ".plugin"); - var pluginConfig = _system.Settings.Config.GetConfig(pluginConfigPath); + if (string.IsNullOrEmpty(configPath) || !_system.Settings.Config.HasPath(configPath)) + { + throw new ArgumentException("Persistence config is missing plugin config path for: " + configPath, "configPath"); + } + + var pluginConfig = _system.Settings.Config.GetConfig(configPath); var pluginTypeName = pluginConfig.GetString("class"); var pluginType = Type.GetType(pluginTypeName); + + var shouldInjectConfig = pluginConfig.HasPath("inject-config") && pluginConfig.GetBoolean("inject-config"); var pluginDispatcherId = pluginConfig.HasPath("plugin-dispatcher") ? pluginConfig.GetString("plugin-dispatcher") : dispatcherSelector(pluginType); - return _system.SystemActorOf(Props.Create(pluginType).WithDispatcher(pluginDispatcherId), type); + var pluginActorArgs = shouldInjectConfig ? new object[] { pluginConfig } : null; + var pluginActorProps = new Props(pluginType, pluginActorArgs).WithDispatcher(pluginDispatcherId); + + var pluginRef = _system.SystemActorOf(pluginActorProps, configPath); + return pluginRef; } } From 22e5d3a794df0f93d4d789cbd3f9976cf2c1d797 Mon Sep 17 00:00:00 2001 From: Stefan Sedich Date: Wed, 6 May 2015 09:01:13 +1000 Subject: [PATCH 15/66] Cleaning up the Event namespace --- src/core/Akka.Cluster/ClusterDaemon.cs | 4 +- src/core/Akka.Remote.TestKit/Conductor.cs | 2 +- src/core/Akka/Event/ActorEventBus.cs | 6 +- src/core/Akka/Event/BusLogging.cs | 74 +++++----- src/core/Akka/Event/DeadLetter.cs | 13 +- src/core/Akka/Event/DeadLetterListener.cs | 32 ++--- src/core/Akka/Event/Debug.cs | 8 +- .../Akka/Event/DefaultLogMessageFormatter.cs | 9 ++ src/core/Akka/Event/DefaultLogger.cs | 21 ++- src/core/Akka/Event/Error.cs | 11 +- src/core/Akka/Event/EventBus.cs | 111 +++++++-------- src/core/Akka/Event/EventStream.cs | 43 +++--- src/core/Akka/Event/ILogMessageFormatter.cs | 9 ++ src/core/Akka/Event/ILoggingAdapter.cs | 8 +- src/core/Akka/Event/Info.cs | 8 +- src/core/Akka/Event/InitializeLogger.cs | 8 +- src/core/Akka/Event/LogEvent.cs | 22 +-- src/core/Akka/Event/LogLevel.cs | 10 +- src/core/Akka/Event/LogMessage.cs | 16 +++ src/core/Akka/Event/LoggerInitialized.cs | 2 +- src/core/Akka/Event/Logging.cs | 38 ++++-- src/core/Akka/Event/LoggingAdapterBase.cs | 127 ++++++++++-------- src/core/Akka/Event/LoggingBus.cs | 74 +++++----- src/core/Akka/Event/StandardOutLogger.cs | 54 ++++++-- src/core/Akka/Event/Subscription.cs | 11 +- src/core/Akka/Event/TraceLogger.cs | 7 +- src/core/Akka/Event/UnhandledMessage.cs | 14 +- src/core/Akka/Event/Warning.cs | 8 +- 28 files changed, 408 insertions(+), 342 deletions(-) diff --git a/src/core/Akka.Cluster/ClusterDaemon.cs b/src/core/Akka.Cluster/ClusterDaemon.cs index 6e83f3347b2..43e10a175c2 100644 --- a/src/core/Akka.Cluster/ClusterDaemon.cs +++ b/src/core/Akka.Cluster/ClusterDaemon.cs @@ -1622,11 +1622,11 @@ public void ReapUnreachableMembers() var nonExiting = partitioned.Item2; if (nonExiting.Any()) - _log.Warn("Cluster Node [{0}] - Marking node(s) as UNREACHABLE [{1}]", + _log.Warning("Cluster Node [{0}] - Marking node(s) as UNREACHABLE [{1}]", _cluster.SelfAddress, nonExiting.Select(m => m.ToString()).Aggregate((a, b) => a + ", " + b)); if (exiting.Any()) - _log.Warn("Marking exiting node(s) as UNREACHABLE [{0}]. This is expected and they will be removed.", + _log.Warning("Marking exiting node(s) as UNREACHABLE [{0}]. This is expected and they will be removed.", _cluster.SelfAddress, exiting.Select(m => m.ToString()).Aggregate((a, b) => a + ", " + b)); if (newlyDetectedReachableMembers.Any()) diff --git a/src/core/Akka.Remote.TestKit/Conductor.cs b/src/core/Akka.Remote.TestKit/Conductor.cs index a2cb6545e1e..04342f69b94 100644 --- a/src/core/Akka.Remote.TestKit/Conductor.cs +++ b/src/core/Akka.Remote.TestKit/Conductor.cs @@ -295,7 +295,7 @@ public void OnMessage(object message, IConnection responseChannel) public void OnException(Exception ex, IConnection erroredChannel) { - _log.Warn(string.Format("handled network error from {0}: {1}", erroredChannel.RemoteHost, ex.Message)); + _log.Warning(string.Format("handled network error from {0}: {1}", erroredChannel.RemoteHost, ex.Message)); } } diff --git a/src/core/Akka/Event/ActorEventBus.cs b/src/core/Akka/Event/ActorEventBus.cs index 2fce2a01da8..e6d3f8819c8 100644 --- a/src/core/Akka/Event/ActorEventBus.cs +++ b/src/core/Akka/Event/ActorEventBus.cs @@ -10,10 +10,10 @@ namespace Akka.Event { /// - /// Class ActorEventBus. + /// Represents an EventBus where the Subscriber type is ActorRef. /// - /// The type of the t event. - /// The type of the t classifier. + /// The event type. + /// The classifier type. public abstract class ActorEventBus : EventBus { } diff --git a/src/core/Akka/Event/BusLogging.cs b/src/core/Akka/Event/BusLogging.cs index 682d005cefd..4b39c0b6b1c 100644 --- a/src/core/Akka/Event/BusLogging.cs +++ b/src/core/Akka/Event/BusLogging.cs @@ -10,38 +10,27 @@ namespace Akka.Event { /// - /// Class BusLogging. + /// A logging adapter implementation publishing log events to the event stream. /// public class BusLogging : LoggingAdapterBase { + private readonly LoggingBus _bus; + private readonly Type _logClass; + private readonly string _logSource; + /// - /// The bus + /// Initializes a new instance of the class. /// - private readonly LoggingBus bus; - - /// - /// The log class - /// - private readonly Type logClass; - - /// - /// The log source - /// - private readonly string logSource; - - /// - /// Initializes a new instance of the class. - /// - /// The bus. + /// The logging bus instance that messages will be published to. /// The log source. /// The log class. /// The log message formatter. public BusLogging(LoggingBus bus, string logSource, Type logClass, ILogMessageFormatter logMessageFormatter) : base(logMessageFormatter) { - this.bus = bus; - this.logSource = logSource; - this.logClass = logClass; + _bus = bus; + _logSource = logSource; + _logClass = logClass; _isErrorEnabled = bus.LogLevel <= LogLevel.ErrorLevel; _isWarningEnabled = bus.LogLevel <= LogLevel.WarningLevel; @@ -50,62 +39,61 @@ public BusLogging(LoggingBus bus, string logSource, Type logClass, ILogMessageFo } private readonly bool _isDebugEnabled; - public override bool IsDebugEnabled { get { return _isDebugEnabled; }} + public override bool IsDebugEnabled { get { return _isDebugEnabled; } } private readonly bool _isErrorEnabled; - public override bool IsErrorEnabled { get { return _isErrorEnabled; }} + public override bool IsErrorEnabled { get { return _isErrorEnabled; } } private readonly bool _isInfoEnabled; - public override bool IsInfoEnabled{ get { return _isInfoEnabled; }} + public override bool IsInfoEnabled { get { return _isInfoEnabled; } } private readonly bool _isWarningEnabled; - public override bool IsWarningEnabled { get { return _isWarningEnabled; }} + public override bool IsWarningEnabled { get { return _isWarningEnabled; } } /// - /// Notifies the error. + /// Publishes the error message onto the LoggingBus. /// - /// The message. + /// The error message. protected override void NotifyError(object message) { - bus.Publish(new Error(null, logSource, logClass, message)); + _bus.Publish(new Error(null, _logSource, _logClass, message)); } /// - /// Notifies the error. + /// Publishes the error message and exception onto the LoggingBus. /// - /// The cause. - /// The message. + /// The exception that caused this error. + /// The error message. protected override void NotifyError(Exception cause, object message) { - bus.Publish(new Error(cause, logSource, logClass, message)); + _bus.Publish(new Error(cause, _logSource, _logClass, message)); } /// - /// Notifies the warning. + /// Publishes the the warning message onto the LoggingBus. /// - /// The message. + /// The warning message. protected override void NotifyWarning(object message) { - bus.Publish(new Warning(logSource, logClass, message)); + _bus.Publish(new Warning(_logSource, _logClass, message)); } /// - /// Notifies the information. + /// Publishes the the info message onto the LoggingBus. /// - /// The message. + /// The info message. protected override void NotifyInfo(object message) { - bus.Publish(new Info(logSource, logClass, message)); + _bus.Publish(new Info(_logSource, _logClass, message)); } /// - /// Notifies the debug. + /// Publishes the the debug message onto the LoggingBus. /// - /// The message. + /// The debug message. protected override void NotifyDebug(object message) { - bus.Publish(new Debug(logSource, logClass, message)); + _bus.Publish(new Debug(_logSource, _logClass, message)); } } -} - +} \ No newline at end of file diff --git a/src/core/Akka/Event/DeadLetter.cs b/src/core/Akka/Event/DeadLetter.cs index 84e6771a88d..a45020843e0 100644 --- a/src/core/Akka/Event/DeadLetter.cs +++ b/src/core/Akka/Event/DeadLetter.cs @@ -10,7 +10,8 @@ namespace Akka.Event { /// - /// Class DeadLetter. + /// Represents a message that could not be delivered to it's recipient. + /// This message wraps the original message, the sender and the intended recipient of the message. /// public class DeadLetter { @@ -28,21 +29,21 @@ public DeadLetter(object message, IActorRef sender, IActorRef recipient) } /// - /// Gets the message. + /// Gets the original message that could not be delivered. /// /// The message. public object Message { get; private set; } /// - /// Gets the recipient. + /// Gets the recipient of the message. /// - /// The recipient. + /// The recipient of the message. public IActorRef Recipient { get; private set; } /// - /// Gets the sender. + /// Gets the sender of the message. /// - /// The sender. + /// The sender of the message. public IActorRef Sender { get; private set; } public override string ToString() diff --git a/src/core/Akka/Event/DeadLetterListener.cs b/src/core/Akka/Event/DeadLetterListener.cs index 2ada522824e..b8fa82d82e4 100644 --- a/src/core/Akka/Event/DeadLetterListener.cs +++ b/src/core/Akka/Event/DeadLetterListener.cs @@ -11,23 +11,12 @@ namespace Akka.Event { /// - /// Class DeadLetterListener. + /// Actor responsible for listening to DeadLetter messages and logging them using the EventStream. /// public class DeadLetterListener : ActorBase { - /// - /// The event stream - /// private readonly EventStream _eventStream = Context.System.EventStream; - - /// - /// The maximum count - /// private readonly int _maxCount = Context.System.Settings.LogDeadLetters; - - /// - /// The count - /// private int _count; protected override void PostRestart(Exception reason) @@ -44,18 +33,17 @@ protected override void PostStop() _eventStream.Unsubscribe(Self); } - /// - /// Processor for user defined messages. - /// - /// The message. protected override bool Receive(object message) { var deadLetter = (DeadLetter)message; - IActorRef snd = deadLetter.Sender; - IActorRef rcp = deadLetter.Recipient; + var snd = deadLetter.Sender; + var rcp = deadLetter.Recipient; + _count++; - bool done = _maxCount != int.MaxValue && _count >= _maxCount; - string doneMsg = done ? ", no more dead letters will be logged" : ""; + + var done = _maxCount != int.MaxValue && _count >= _maxCount; + var doneMsg = done ? ", no more dead letters will be logged" : ""; + if (!done) { var rcpPath = rcp == ActorRefs.NoSender ? "NoSender" : rcp.Path.ToString(); @@ -65,10 +53,12 @@ protected override bool Receive(object message) string.Format("Message {0} from {1} to {2} was not delivered. {3} dead letters encountered.{4}", deadLetter.Message.GetType().Name, sndPath, rcpPath, _count, doneMsg))); } + if (done) { - ((IInternalActorRef)Self).Stop(); + ((IInternalActorRef) Self).Stop(); } + return true; } } diff --git a/src/core/Akka/Event/Debug.cs b/src/core/Akka/Event/Debug.cs index 471423eaf1f..a24a871bbb6 100644 --- a/src/core/Akka/Event/Debug.cs +++ b/src/core/Akka/Event/Debug.cs @@ -10,12 +10,12 @@ namespace Akka.Event { /// - /// Class Debug. + /// Represents an Debug log event. /// public class Debug : LogEvent { /// - /// Initializes a new instance of the class. + /// Initializes a new instance of the class. /// /// The log source. /// The log class. @@ -27,10 +27,6 @@ public Debug(string logSource, Type logClass, object message) Message = message; } - /// - /// Logs the level. - /// - /// LogLevel. public override LogLevel LogLevel() { return Event.LogLevel.DebugLevel; diff --git a/src/core/Akka/Event/DefaultLogMessageFormatter.cs b/src/core/Akka/Event/DefaultLogMessageFormatter.cs index 730fb83ab59..8f9610b6e15 100644 --- a/src/core/Akka/Event/DefaultLogMessageFormatter.cs +++ b/src/core/Akka/Event/DefaultLogMessageFormatter.cs @@ -7,8 +7,17 @@ namespace Akka.Event { + /// + /// Default implementation of the ILogMessageFormatter that uses string.Format to format a log message. + /// public class DefaultLogMessageFormatter : ILogMessageFormatter { + /// + /// Formats the log message using string.Format providing the format and specified args. + /// + /// The format string of the message. + /// The arguments used to format the message. + /// public string Format(string format, params object[] args) { return string.Format(format, args); diff --git a/src/core/Akka/Event/DefaultLogger.cs b/src/core/Akka/Event/DefaultLogger.cs index 07b3bf37a32..cc7370a6e2c 100644 --- a/src/core/Akka/Event/DefaultLogger.cs +++ b/src/core/Akka/Event/DefaultLogger.cs @@ -10,14 +10,10 @@ namespace Akka.Event { /// - /// Class DefaultLogger. + /// Default logger implementation that outputs logs to the Console. /// public class DefaultLogger : ActorBase { - /// - /// Processor for user defined messages. - /// - /// The message. protected override bool Receive(object message) { if(message is InitializeLogger) @@ -26,14 +22,17 @@ protected override bool Receive(object message) return true; } var logEvent = message as LogEvent; - if(logEvent != null) - { - Print(logEvent); - return true; - } - return false; + if (logEvent == null) + return false; + + Print(logEvent); + return true; } + /// + /// Print the specified log event. + /// + /// The log event that is to be output. protected virtual void Print(LogEvent logEvent) { StandardOutLogger.PrintLogEvent(logEvent); diff --git a/src/core/Akka/Event/Error.cs b/src/core/Akka/Event/Error.cs index 80ecf16b67d..bcaa1cd3954 100644 --- a/src/core/Akka/Event/Error.cs +++ b/src/core/Akka/Event/Error.cs @@ -9,14 +9,13 @@ namespace Akka.Event { - /// - /// Class Error. + /// Represents an Error log event. /// public class Error : LogEvent { /// - /// Initializes a new instance of the class. + /// Initializes a new instance of the class. /// /// The cause. /// The log source. @@ -31,15 +30,11 @@ public Error(Exception cause, string logSource, Type logClass, object message) } /// - /// Gets the cause. + /// Gets the cause of the error. /// /// The cause. public Exception Cause { get; private set; } - /// - /// Logs the level. - /// - /// LogLevel. public override LogLevel LogLevel() { return Event.LogLevel.ErrorLevel; diff --git a/src/core/Akka/Event/EventBus.cs b/src/core/Akka/Event/EventBus.cs index 874c02dc943..cb4002339fd 100644 --- a/src/core/Akka/Event/EventBus.cs +++ b/src/core/Akka/Event/EventBus.cs @@ -13,23 +13,17 @@ namespace Akka.Event { /// - /// Class EventBus. + /// Represents the base event bus, internally manages subscriptions using the event type, classifier type and subscriber type. /// - /// The type of the t event. - /// The type of the t classifier. - /// The type of the t subscriber. + /// The type of the event. + /// The type of the classifier. + /// The type of the subscriber. public abstract class EventBus { - /// - /// The classifiers - /// - private readonly Dictionary>> classifiers = + private readonly Dictionary>> _classifiers = new Dictionary>>(); - /// - /// The cache - /// - private volatile ConcurrentDictionary> cache = + private volatile ConcurrentDictionary> _cache = new ConcurrentDictionary>(); /// @@ -60,21 +54,22 @@ protected string SimpleName(Type source) /// true if XXXX, false otherwise. public virtual bool Subscribe(TSubscriber subscriber, TClassifier classifier) { - lock (classifiers) + lock (_classifiers) { List> subscribers; - if (!classifiers.TryGetValue(classifier, out subscribers)) + if (!_classifiers.TryGetValue(classifier, out subscribers)) { subscribers = new List>(); - classifiers.Add(classifier, subscribers); + _classifiers.Add(classifier, subscribers); } + //already subscribed if (subscribers.Any(s => s.Subscriber.Equals(subscriber))) return false; var subscription = new Subscription(subscriber); - subscribers.Add(subscription); + ClearCache(); return true; } @@ -87,18 +82,20 @@ public virtual bool Subscribe(TSubscriber subscriber, TClassifier classifier) /// true if XXXX, false otherwise. public virtual bool Unsubscribe(TSubscriber subscriber) { - lock (classifiers) + lock (_classifiers) { - bool res = false; - List> subscribers; - foreach (TClassifier classifier in classifiers.Keys) + var res = false; + + foreach (var classifier in _classifiers.Keys) { - if (classifiers.TryGetValue(classifier, out subscribers)) - { - if (subscribers.RemoveAll(s => s.Subscriber.Equals(subscriber)) > 0) - res = true; - } + List> subscribers; + if (!_classifiers.TryGetValue(classifier, out subscribers)) + continue; + + if (subscribers.RemoveAll(s => s.Subscriber.Equals(subscriber)) > 0) + res = true; } + ClearCache(); return res; } @@ -112,32 +109,32 @@ public virtual bool Unsubscribe(TSubscriber subscriber) /// true if XXXX, false otherwise. public virtual bool Unsubscribe(TSubscriber subscriber, TClassifier classifier) { - lock (classifiers) + lock (_classifiers) { - bool res = false; + var res = false; + List> subscribers; - if (classifiers.TryGetValue(classifier, out subscribers)) + if (_classifiers.TryGetValue(classifier, out subscribers)) { if (subscribers.RemoveAll(s => s.Subscriber.Equals(subscriber)) > 0) res = true; } else { - foreach (var kvp in classifiers) + foreach (var kvp in _classifiers) { - if (IsSubClassification(kvp.Key, classifier)) + if (!IsSubClassification(kvp.Key, classifier)) + continue; + + var subscriptions = kvp.Value.Where(ss => ss.Subscriber.Equals(subscriber)).ToList(); + foreach (var existingSubscriber in subscriptions) { - List> s = kvp.Value; - List> subscriptions = - s.Where(ss => ss.Subscriber.Equals(subscriber)).ToList(); - foreach (var existingSubscriber in subscriptions) - { - existingSubscriber.Unsubscriptions.Add(classifier); - res = true; - } + existingSubscriber.Unsubscriptions.Add(classifier); + res = true; } } } + ClearCache(); return res; } @@ -148,7 +145,7 @@ public virtual bool Unsubscribe(TSubscriber subscriber, TClassifier classifier) /// private void ClearCache() { - cache = new ConcurrentDictionary>(); + _cache = new ConcurrentDictionary>(); } /// @@ -187,10 +184,10 @@ private void ClearCache() /// The event. public virtual void Publish(TEvent @event) { - TClassifier eventClass = GetClassifier(@event); + var eventClass = GetClassifier(@event); List cachedSubscribers; - if (cache.TryGetValue(eventClass, out cachedSubscribers)) + if (_cache.TryGetValue(eventClass, out cachedSubscribers)) { PublishToSubscribers(@event, cachedSubscribers); } @@ -208,7 +205,7 @@ public virtual void Publish(TEvent @event) /// The cached subscribers. private void PublishToSubscribers(TEvent @event, List cachedSubscribers) { - foreach (TSubscriber subscriber in cachedSubscribers) + foreach (var subscriber in cachedSubscribers) { Publish(@event, subscriber); } @@ -222,27 +219,31 @@ private void PublishToSubscribers(TEvent @event, List cachedSubscri /// List{`2}. private List UpdateCacheForEventClassifier(TEvent @event, TClassifier eventClass) { - lock (classifiers) + lock (_classifiers) { var cachedSubscribers = new HashSet(); - foreach (var kvp in classifiers) + + foreach (var kvp in _classifiers) { - TClassifier classifier = kvp.Key; - List> set = kvp.Value; - if (Classify(@event, classifier)) + var classifier = kvp.Key; + var set = kvp.Value; + + if (!Classify(@event, classifier)) + continue; + + foreach (var subscriber in set) { - foreach (var subscriber in set) - { - if (subscriber.Unsubscriptions.Any(u => IsSubClassification(u, eventClass))) - continue; + if (subscriber.Unsubscriptions.Any(u => IsSubClassification(u, eventClass))) + continue; - cachedSubscribers.Add(subscriber.Subscriber); - } + cachedSubscribers.Add(subscriber.Subscriber); } } + //finds a distinct list of subscribers for the given event type - List list = cachedSubscribers.ToList(); - cache[eventClass] = list; + var list = cachedSubscribers.ToList(); + _cache[eventClass] = list; + return list; } } diff --git a/src/core/Akka/Event/EventStream.cs b/src/core/Akka/Event/EventStream.cs index cf00abea456..91f6bf6cbb9 100644 --- a/src/core/Akka/Event/EventStream.cs +++ b/src/core/Akka/Event/EventStream.cs @@ -11,14 +11,19 @@ namespace Akka.Event { /// - /// Class EventStream. + /// The EventStream is a pub-sub stream of events that can be both system and user generated. + /// + /// The subscribers are IActorRef instances and events can be any object. Subscriptions are hierarchical meaning that if you listen to + /// an event for a particular type you will receive events for that type and any sub types. + /// + /// If the debug flag is activated any operations on the event stream will be published as debug level events. /// public class EventStream : LoggingBus { /// - /// Determines if subscription logging is enabled + /// Determines if subscription logging is enabled. /// - private readonly bool debug; + private readonly bool _debug; /// /// Initializes a new instance of the class. @@ -26,7 +31,7 @@ public class EventStream : LoggingBus /// if set to true [debug]. public EventStream(bool debug) { - this.debug = debug; + _debug = debug; } /// @@ -34,15 +39,18 @@ public EventStream(bool debug) /// /// The subscriber. /// The channel. - /// true if XXXX, false otherwise. + /// true if subscription was successful, false otherwise. /// subscriber public override bool Subscribe(IActorRef subscriber, Type channel) { if (subscriber == null) throw new ArgumentNullException("subscriber"); - if (debug) + if (_debug) + { Publish(new Debug(SimpleName(this), GetType(), "subscribing " + subscriber + " to channel " + channel)); + } + return base.Subscribe(subscriber, channel); } @@ -51,35 +59,38 @@ public override bool Subscribe(IActorRef subscriber, Type channel) /// /// The subscriber. /// The channel. - /// true if XXXX, false otherwise. + /// true if unsubscription was successful, false otherwise. /// subscriber public override bool Unsubscribe(IActorRef subscriber, Type channel) { if (subscriber == null) throw new ArgumentNullException("subscriber"); - bool res = base.Unsubscribe(subscriber, channel); - if (debug) - Publish(new Debug(SimpleName(this), GetType(), - "unsubscribing " + subscriber + " from channel " + channel)); - return res; + if (_debug) + { + Publish(new Debug(SimpleName(this), GetType(), "unsubscribing " + subscriber + " from channel " + channel)); + } + + return base.Unsubscribe(subscriber, channel); } /// /// Unsubscribes the specified subscriber. /// /// The subscriber. - /// true if XXXX, false otherwise. + /// true if unsubscription was successful, false otherwise. /// subscriber public override bool Unsubscribe(IActorRef subscriber) { if (subscriber == null) throw new ArgumentNullException("subscriber"); - bool res = base.Unsubscribe(subscriber); - if (debug) + if (_debug) + { Publish(new Debug(SimpleName(this), GetType(), "unsubscribing " + subscriber + " from all channels")); - return res; + } + + return base.Unsubscribe(subscriber); } } } diff --git a/src/core/Akka/Event/ILogMessageFormatter.cs b/src/core/Akka/Event/ILogMessageFormatter.cs index b5b7327156a..3bb63c29f2b 100644 --- a/src/core/Akka/Event/ILogMessageFormatter.cs +++ b/src/core/Akka/Event/ILogMessageFormatter.cs @@ -7,8 +7,17 @@ namespace Akka.Event { + /// + /// Represents a log message formatter, these are used to format log messages based on a string format and an array of format args. + /// public interface ILogMessageFormatter { + /// + /// Format the specified format string using the format args. + /// + /// The format string of the message. + /// The format args used to format the message. + /// string Format(string format, params object[] args); } } diff --git a/src/core/Akka/Event/ILoggingAdapter.cs b/src/core/Akka/Event/ILoggingAdapter.cs index d7d95fa96e0..1493f072f5f 100644 --- a/src/core/Akka/Event/ILoggingAdapter.cs +++ b/src/core/Akka/Event/ILoggingAdapter.cs @@ -10,9 +10,8 @@ namespace Akka.Event { /// - /// Capable of logging + /// Provides a logging adapter used to log events within the system. /// - // ReSharper disable once InconsistentNaming public interface ILoggingAdapter { /// Returns true if Debug level is enabled. @@ -30,7 +29,6 @@ public interface ILoggingAdapter /// Returns true if the specified level is enabled. bool IsEnabled(LogLevel logLevel); - /// Logs a message with the Debug level. /// The format. /// The arguments. @@ -46,10 +44,6 @@ public interface ILoggingAdapter /// The arguments. void Warning(string format, params object[] args); - [Obsolete("Use Warning instead!")] - void Warn(string format, params object[] args); - - /// Logs a message with the Error level. /// The format. /// The arguments. diff --git a/src/core/Akka/Event/Info.cs b/src/core/Akka/Event/Info.cs index 814d3b6db5a..2363a7b84a7 100644 --- a/src/core/Akka/Event/Info.cs +++ b/src/core/Akka/Event/Info.cs @@ -10,12 +10,12 @@ namespace Akka.Event { /// - /// Class Info. + /// Represents an Info log event. /// public class Info : LogEvent { /// - /// Initializes a new instance of the class. + /// Initializes a new instance of the class. /// /// The log source. /// The log class. @@ -27,10 +27,6 @@ public Info(string logSource, Type logClass, object message) Message = message; } - /// - /// Logs the level. - /// - /// LogLevel. public override LogLevel LogLevel() { return Event.LogLevel.InfoLevel; diff --git a/src/core/Akka/Event/InitializeLogger.cs b/src/core/Akka/Event/InitializeLogger.cs index 24808dafe95..390b6f123f7 100644 --- a/src/core/Akka/Event/InitializeLogger.cs +++ b/src/core/Akka/Event/InitializeLogger.cs @@ -10,12 +10,12 @@ namespace Akka.Event { /// - /// Class InitializeLogger. + /// Message used to initialize a logger. /// public class InitializeLogger : INoSerializationVerificationNeeded { /// - /// Initializes a new instance of the class. + /// Initializes a new instance of the message. /// /// The logging bus. public InitializeLogger(LoggingBus loggingBus) @@ -24,9 +24,9 @@ public InitializeLogger(LoggingBus loggingBus) } /// - /// Gets the logging bus. + /// Gets the logging bus instance. /// - /// The logging bus. + /// The logging bus instance. public LoggingBus LoggingBus { get; private set; } } } diff --git a/src/core/Akka/Event/LogEvent.cs b/src/core/Akka/Event/LogEvent.cs index 9a6c8cb345f..65c6c00e8ea 100644 --- a/src/core/Akka/Event/LogEvent.cs +++ b/src/core/Akka/Event/LogEvent.cs @@ -12,59 +12,59 @@ namespace Akka.Event { /// - /// Class LogEvent. + /// Represents a LogEvent in the system. /// public abstract class LogEvent : INoSerializationVerificationNeeded { /// - /// Initializes a new instance of the class. + /// Initializes a new instance of the class. /// - public LogEvent() + protected LogEvent() { Timestamp = DateTime.Now; Thread = Thread.CurrentThread; } /// - /// Gets the timestamp. + /// Gets the timestamp of this LogEvent. /// /// The timestamp. public DateTime Timestamp { get; private set; } /// - /// Gets the thread. + /// Gets the thread of this LogEvent. /// /// The thread. public Thread Thread { get; private set; } /// - /// Gets or sets the log source. + /// Gets the log source of this LogEvent. /// /// The log source. public string LogSource { get; protected set; } /// - /// Gets or sets the log class. + /// Gets the log class of this LogEvent. /// /// The log class. public Type LogClass { get; protected set; } /// - /// Gets or sets the message. + /// Gets the message of this LogEvent. /// /// The message. public object Message { get; protected set; } /// - /// Logs the level. + /// Gets the specified LogLevel for this LogEvent. /// /// LogLevel. public abstract LogLevel LogLevel(); /// - /// Returns a that represents this instance. + /// Returns a that represents this LogEvent. /// - /// A that represents this instance. + /// A that represents this LogEvent. public override string ToString() { return string.Format("[{0}][{1}][Thread {2}][{3}] {4}", LogLevel().ToString().Replace("Level", "").ToUpperInvariant(), Timestamp, Thread.ManagedThreadId.ToString().PadLeft(4, '0'), LogSource, Message); diff --git a/src/core/Akka/Event/LogLevel.cs b/src/core/Akka/Event/LogLevel.cs index 867daffa530..d502ccfc41c 100644 --- a/src/core/Akka/Event/LogLevel.cs +++ b/src/core/Akka/Event/LogLevel.cs @@ -8,27 +8,27 @@ namespace Akka.Event { /// - /// Enum LogLevel + /// Enumeration representing the various log levels in the system. /// public enum LogLevel { /// - /// The debug level + /// The debug log level. /// DebugLevel, /// - /// The information level + /// The information log level. /// InfoLevel, /// - /// The warning level + /// The warning log level. /// WarningLevel, /// - /// The error level + /// The error log level. /// ErrorLevel, } diff --git a/src/core/Akka/Event/LogMessage.cs b/src/core/Akka/Event/LogMessage.cs index bd5bfd4ea1d..729a4091d20 100644 --- a/src/core/Akka/Event/LogMessage.cs +++ b/src/core/Akka/Event/LogMessage.cs @@ -7,13 +7,29 @@ namespace Akka.Event { + /// + /// Represents a log message which is composed of a format string and format args. + /// public class LogMessage { private readonly ILogMessageFormatter _formatter; + /// + /// Gets the format string of this log message. + /// public string Format { get; private set; } + + /// + /// Gets the format args of this log message. + /// public object[] Args { get; private set; } + /// + /// Initializes an instance of the LogMessage with the specified formatter, format and args. + /// + /// The formatter for the LogMessage. + /// The string format of the LogMessage. + /// The format args of the LogMessage. public LogMessage(ILogMessageFormatter formatter, string format, params object[] args) { _formatter = formatter; diff --git a/src/core/Akka/Event/LoggerInitialized.cs b/src/core/Akka/Event/LoggerInitialized.cs index ca5b77ab727..80b5379efb5 100644 --- a/src/core/Akka/Event/LoggerInitialized.cs +++ b/src/core/Akka/Event/LoggerInitialized.cs @@ -10,7 +10,7 @@ namespace Akka.Event { /// - /// Class LoggerInitialized. + /// Message used to notify that a logger has been initialized. /// public class LoggerInitialized : INoSerializationVerificationNeeded { diff --git a/src/core/Akka/Event/Logging.cs b/src/core/Akka/Event/Logging.cs index 985353f7caf..6b50c0ea32a 100644 --- a/src/core/Akka/Event/Logging.cs +++ b/src/core/Akka/Event/Logging.cs @@ -17,7 +17,7 @@ namespace Akka.Event public class DummyClassForStringSources { } /// - /// Class Logging. + /// Provides the functionality for creating logger instances and helpers for converting to/from LogLevel values. /// public static class Logging { @@ -27,12 +27,12 @@ public static class Logging private const string Error = "ERROR"; /// - /// The standard out logger + /// Returns an instance of the standard out logger. /// public static readonly StandardOutLogger StandardOutLogger = new StandardOutLogger(); /// - /// Classes for. + /// Classes for. /// /// The log level. /// Type. @@ -54,9 +54,14 @@ public static Type ClassFor(this LogLevel logLevel) } } + /// + /// Returns the string representation for a particular LogLevel. + /// + /// The log level to get the string representation for. + /// + /// public static string StringFor(this LogLevel logLevel) { - switch (logLevel) { case LogLevel.DebugLevel: @@ -73,11 +78,11 @@ public static string StringFor(this LogLevel logLevel) } /// - /// Gets the logger. + /// Gets an instance of the logger. /// - /// The cell. + /// The context. /// The log message formatter. - /// ILoggingAdapter. + /// A logging adapter instance. public static ILoggingAdapter GetLogger(this IActorContext context, ILogMessageFormatter logMessageFormatter = null) { var logSource = context.Self.ToString(); @@ -87,17 +92,24 @@ public static ILoggingAdapter GetLogger(this IActorContext context, ILogMessageF } /// - /// Gets the logger. + /// Gets an instance of the logger. /// - /// The system. + /// The actor system. /// The log source object. /// The log message formatter. - /// ILoggingAdapter. + /// A logging adapter instance. public static ILoggingAdapter GetLogger(ActorSystem system, object logSourceObj, ILogMessageFormatter logMessageFormatter = null) { return GetLogger(system.EventStream, logSourceObj, logMessageFormatter); } + /// + /// Gets an instance of the logger. + /// + /// The logging bus this logger will write to. + /// The log source object. + /// The log message formatter. + /// A logging adapter instance. public static ILoggingAdapter GetLogger(LoggingBus loggingBus, object logSourceObj, ILogMessageFormatter logMessageFormatter = null) { //TODO: refine this @@ -120,10 +132,10 @@ public static ILoggingAdapter GetLogger(LoggingBus loggingBus, object logSourceO } /// - /// Logs the level for. + /// Gets the LogLevel for a particular log level string. /// - /// The log level. - /// LogLevel. + /// The log level string. + /// The LogLevel mapping to the string based log level. /// Unknown LogLevel;logLevel public static LogLevel LogLevelFor(string logLevel) { diff --git a/src/core/Akka/Event/LoggingAdapterBase.cs b/src/core/Akka/Event/LoggingAdapterBase.cs index 266bdaad74a..bc46e1049ed 100644 --- a/src/core/Akka/Event/LoggingAdapterBase.cs +++ b/src/core/Akka/Event/LoggingAdapterBase.cs @@ -9,37 +9,29 @@ namespace Akka.Event { + /// + /// Represents a base logging adapter implementation which can be used by logging adapter implementations. + /// public abstract class LoggingAdapterBase : ILoggingAdapter { private readonly ILogMessageFormatter _logMessageFormatter; - public abstract bool IsDebugEnabled { get; } - - public abstract bool IsErrorEnabled { get; } - - public abstract bool IsInfoEnabled { get; } - - public abstract bool IsWarningEnabled { get; } - protected abstract void NotifyError(object message); - - protected abstract void NotifyError(Exception cause, object message); - - protected abstract void NotifyWarning(object message); - - protected abstract void NotifyInfo(object message); - - protected abstract void NotifyDebug(object message); + /// + /// Creates an instance of the LoggingAdapterBase. + /// + /// The log message formatter used by this logging adapter. + /// protected LoggingAdapterBase(ILogMessageFormatter logMessageFormatter) { if(logMessageFormatter == null) @@ -47,8 +39,7 @@ protected LoggingAdapterBase(ILogMessageFormatter logMessageFormatter) _logMessageFormatter = logMessageFormatter; } - - + public bool IsEnabled(LogLevel logLevel) { switch(logLevel) @@ -66,7 +57,12 @@ public bool IsEnabled(LogLevel logLevel) } } - + /// + /// Handles logging a log event for a particular level if that level is enabled. + /// + /// The log level of the log event. + /// The log message of the log event. + /// protected void NotifyLog(LogLevel logLevel, object message) { switch(logLevel) @@ -87,67 +83,92 @@ protected void NotifyLog(LogLevel logLevel, object message) throw new NotSupportedException("Unknown LogLevel " + logLevel); } } - - + public void Debug(string format, params object[] args) { - if(IsDebugEnabled) + if (!IsDebugEnabled) + return; + + if (args == null || args.Length == 0) { - if(args == null || args.Length == 0) - NotifyDebug(format); - else - NotifyDebug(new LogMessage(_logMessageFormatter, format, args)); + NotifyDebug(format); + } + else + { + NotifyDebug(new LogMessage(_logMessageFormatter, format, args)); } - } - - [Obsolete("Use Warning instead")] - public void Warn(string format, params object[] args) - { - Warning(format, args); } public void Warning(string format, params object[] args) { - if(IsWarningEnabled) - if(args == null || args.Length == 0) - NotifyWarning(format); - else - NotifyWarning(new LogMessage(_logMessageFormatter, format, args)); + if (!IsWarningEnabled) + return; + + if (args == null || args.Length == 0) + { + NotifyWarning(format); + } + else + { + NotifyWarning(new LogMessage(_logMessageFormatter, format, args)); + } } public void Error(Exception cause, string format, params object[] args) { - if(IsErrorEnabled) - if(args == null || args.Length == 0) - NotifyError(cause, format); - else - NotifyError(cause, new LogMessage(_logMessageFormatter, format, args)); + if (!IsErrorEnabled) + return; + + if (args == null || args.Length == 0) + { + NotifyError(cause, format); + } + else + { + NotifyError(cause, new LogMessage(_logMessageFormatter, format, args)); + } } public void Error(string format, params object[] args) { - if(IsErrorEnabled) - if(args == null || args.Length == 0) - NotifyError(format); - else - NotifyError(new LogMessage(_logMessageFormatter, format, args)); + if (!IsErrorEnabled) + return; + + if (args == null || args.Length == 0) + { + NotifyError(format); + } + else + { + NotifyError(new LogMessage(_logMessageFormatter, format, args)); + } } public void Info(string format, params object[] args) { - if(IsInfoEnabled) - if(args == null || args.Length == 0) - NotifyInfo(format); - else - NotifyInfo(new LogMessage(_logMessageFormatter, format, args)); + if (!IsInfoEnabled) + return; + + if (args == null || args.Length == 0) + { + NotifyInfo(format); + } + else + { + NotifyInfo(new LogMessage(_logMessageFormatter, format, args)); + } } public void Log(LogLevel logLevel, string format, params object[] args) { - if(args == null || args.Length == 0) + if (args == null || args.Length == 0) + { NotifyLog(logLevel, format); + } else + { NotifyLog(logLevel, new LogMessage(_logMessageFormatter, format, args)); + } } } } diff --git a/src/core/Akka/Event/LoggingBus.cs b/src/core/Akka/Event/LoggingBus.cs index 0f401bbc37c..92fc636a155 100644 --- a/src/core/Akka/Event/LoggingBus.cs +++ b/src/core/Akka/Event/LoggingBus.cs @@ -17,24 +17,23 @@ namespace Akka.Event { /// - /// Class LoggingBus. + /// Represents a logging bus which subscribes loggers to the system LogEvents for the desired minimum level. /// public class LoggingBus : ActorEventBus { - private static int _loggerId = 0; - private static readonly LogLevel[] _allLogLevels = Enum.GetValues(typeof(LogLevel)).Cast().ToArray(); - private readonly List _loggers = new List(); + private static readonly LogLevel[] AllLogLevels = Enum.GetValues(typeof(LogLevel)).Cast().ToArray(); - private LogLevel _logLevel; + private static int _loggerId; + private readonly List _loggers = new List(); /// - /// Gets the log level. + /// Gets the minimum log level that this LoggingBus will subscribe to, any LogEvents with a log level below will not be subscribed to. /// /// The log level. - public LogLevel LogLevel { get { return _logLevel; } } + public LogLevel LogLevel { get; private set; } /// - /// Determines whether [is sub classification] [the specified parent]. + /// Determines whether [is sub classification] [the specified parent]. /// /// The parent. /// The child. @@ -45,7 +44,7 @@ protected override bool IsSubClassification(Type parent, Type child) } /// - /// Publishes the specified event. + /// Publishes the specified event. /// /// The event. /// The subscriber. @@ -55,7 +54,7 @@ protected override void Publish(object @event, IActorRef subscriber) } /// - /// Classifies the specified event. + /// Classifies the specified event. /// /// The event. /// The classifier. @@ -66,7 +65,7 @@ protected override bool Classify(object @event, Type classifier) } /// - /// Gets the classifier. + /// Gets the classifier for the LogEvent. /// /// The event. /// Type. @@ -76,30 +75,32 @@ protected override Type GetClassifier(object @event) } /// - /// Starts the default loggers. + /// Starts the default loggers. /// /// The system. /// Can not use logger of type: + loggerType - public void StartDefaultLoggers(ActorSystemImpl system) //TODO: Should be internal + internal void StartDefaultLoggers(ActorSystemImpl system) { var logName = SimpleName(this) + "(" + system.Name + ")"; var logLevel = Logging.LogLevelFor(system.Settings.LogLevel); var loggerTypes = system.Settings.Loggers; var timeout = system.Settings.LoggerStartTimeout; var shouldRemoveStandardOutLogger = true; + foreach (var strLoggerType in loggerTypes) { var loggerType = Type.GetType(strLoggerType); - if (loggerType == null) { throw new ConfigurationException("Logger specified in config cannot be found: \"" + strLoggerType + "\""); } + if (loggerType == typeof(StandardOutLogger)) { shouldRemoveStandardOutLogger = false; continue; } + try { AddLogger(system, loggerType, logLevel, logName, timeout); @@ -109,7 +110,8 @@ public void StartDefaultLoggers(ActorSystemImpl system) //TODO: Should be intern throw new ConfigurationException(string.Format("Logger [{0}] specified in config cannot be loaded: {1}", strLoggerType, e),e); } } - _logLevel = logLevel; + + LogLevel = logLevel; if (system.Settings.DebugUnhandledMessage) { @@ -122,6 +124,7 @@ public void StartDefaultLoggers(ActorSystemImpl system) //TODO: Should be intern Publish(new Debug(logName, GetType(), "StandardOutLogger being removed")); Unsubscribe(Logging.StandardOutLogger); } + Publish(new Debug(logName, GetType(), "Default Loggers started")); } @@ -134,7 +137,6 @@ private void AddLogger(ActorSystemImpl system, Type loggerType, LogLevel logLeve { var loggerName = CreateLoggerName(loggerType); var logger = system.SystemActorOf(Props.Create(loggerType), loggerName); - var askTask = logger.Ask(new InitializeLogger(this)); if (!askTask.Wait(timeout)) @@ -149,6 +151,7 @@ private void AddLogger(ActorSystemImpl system, Type loggerType, LogLevel logLeve { throw new LoggerInitializationException(string.Format("Logger {0} [{2}] did not respond with LoggerInitialized, sent instead {1}", loggerName, response, loggerType.FullName)); } + _loggers.Add(logger); SubscribeLogLevelAndAbove(logLevel, logger); Publish(new Debug(loggingBusName, GetType(), string.Format("Logger {0} [{1}] started", loggerName, loggerType.Name))); @@ -163,7 +166,7 @@ private string CreateLoggerName(Type actorClass) } /// - /// Starts the stdout logger. + /// Starts the StandardOutLogger logger. /// /// The configuration. public void StartStdoutLogger(Settings config) @@ -173,7 +176,7 @@ public void StartStdoutLogger(Settings config) } /// - /// Sets up stdout logger. + /// Sets up StandardOutLogger logger. /// /// The configuration. private void SetUpStdoutLogger(Settings config) @@ -183,21 +186,22 @@ private void SetUpStdoutLogger(Settings config) } /// - /// Sets the log level. + /// Sets the minimum log level for the LoggingBus, any LogEvents below this level will not be listened to. /// /// The log level. public void SetLogLevel(LogLevel logLevel) { - _logLevel = logLevel; - foreach (IActorRef logger in _loggers) + LogLevel = logLevel; + + foreach (var logger in _loggers) { //subscribe to given log level and above SubscribeLogLevelAndAbove(logLevel, logger); //unsubscribe to all levels below loglevel - foreach (LogLevel level in _allLogLevels.Where(l => l < logLevel)) + foreach (var level in AllLogLevels.Where(l => l < logLevel)) { - Unsubscribe(logger, Logging.ClassFor(level)); + Unsubscribe(logger, level.ClassFor()); } } } @@ -205,9 +209,9 @@ public void SetLogLevel(LogLevel logLevel) private void SubscribeLogLevelAndAbove(LogLevel logLevel, IActorRef logger) { //subscribe to given log level and above - foreach (LogLevel level in _allLogLevels.Where(l => l >= logLevel)) + foreach (var level in AllLogLevels.Where(l => l >= logLevel)) { - Subscribe(logger, Logging.ClassFor(level)); + Subscribe(logger, level.ClassFor()); } } @@ -216,22 +220,22 @@ private class UnhandledMessageForwarder : ActorBase protected override bool Receive(object message) { var msg = message as UnhandledMessage; - if (msg != null) - { - Context.System.EventStream.Publish(ToDebug(msg)); - return true; - } + if (msg == null) + return false; - return false; + Context.System.EventStream.Publish(ToDebug(msg)); + return true; } private static Debug ToDebug(UnhandledMessage message) { - var msg = string.Format(CultureInfo.InvariantCulture, "Unhandled message from {0} : {1}", - message.Sender.Path, message.Message); + var msg = string.Format( + CultureInfo.InvariantCulture, "Unhandled message from {0} : {1}", + message.Sender.Path, + message.Message + ); - return new Debug(message.Recipient.Path.ToString(), message.Recipient.GetType(), - msg); + return new Debug(message.Recipient.Path.ToString(), message.Recipient.GetType(), msg); } } } diff --git a/src/core/Akka/Event/StandardOutLogger.cs b/src/core/Akka/Event/StandardOutLogger.cs index 5a0611c56d6..5ab854177f1 100644 --- a/src/core/Akka/Event/StandardOutLogger.cs +++ b/src/core/Akka/Event/StandardOutLogger.cs @@ -12,7 +12,8 @@ namespace Akka.Event { /// - /// Class StandardOutLogger. + /// Represents a logger that logs using the StandardOutWriter. + /// The logger can also be configured to use colors for the various log event types. /// public class StandardOutLogger : MinimalActorRef { @@ -26,11 +27,9 @@ static StandardOutLogger() ErrorColor = ConsoleColor.Red; UseColors = true; } - - - + /// - /// Gets the provider. + /// Gets the provider. /// /// The provider. /// StandardOutLogged does not provide @@ -39,13 +38,16 @@ public override IActorRefProvider Provider get { throw new Exception("StandardOutLogger does not provide"); } } + /// + /// Gets the path of this actor. + /// public override ActorPath Path { get { return _path; } } /// - /// Tells the internal. + /// Handles log events printing them to the Console. /// /// The message. /// The sender. @@ -54,25 +56,51 @@ protected override void TellInternal(object message, IActorRef sender) { if(message == null) throw new ArgumentNullException("message"); + var logEvent = message as LogEvent; - if(logEvent != null) + if (logEvent != null) + { PrintLogEvent(logEvent); + } else + { Console.WriteLine(message); + } } - - - - + + /// + /// Gets or Sets the color of Debug events. + /// public static ConsoleColor DebugColor { get; set; } + + /// + /// Gets or Sets the color of Info events. + /// public static ConsoleColor InfoColor { get; set; } + + /// + /// Gets or Sets the color of Warning events. + /// public static ConsoleColor WarningColor { get; set; } + + /// + /// Gets or Sets the color of Error events. + /// public static ConsoleColor ErrorColor { get; set; } + + /// + /// Gets or Sets whether or not to use colors when printing events. + /// public static bool UseColors { get; set; } + /// + /// Prints the LogEvent using the StandardOutWriter. + /// + /// public static void PrintLogEvent(LogEvent logEvent) { ConsoleColor? color = null; + if(UseColors) { var logLevel = logEvent.LogLevel(); @@ -92,8 +120,8 @@ public static void PrintLogEvent(LogEvent logEvent) break; } } + StandardOutWriter.WriteLine(logEvent.ToString(), color); } } -} - +} \ No newline at end of file diff --git a/src/core/Akka/Event/Subscription.cs b/src/core/Akka/Event/Subscription.cs index 89ab422647f..afdbe82b890 100644 --- a/src/core/Akka/Event/Subscription.cs +++ b/src/core/Akka/Event/Subscription.cs @@ -10,10 +10,10 @@ namespace Akka.Event { /// - /// Class Subscription. + /// Represents a Subscription to the EventBus. /// - /// The type of the t subscriber. - /// The type of the t classifier. + /// The type of the subscriber. + /// The type of the classifier. public class Subscription { /// @@ -38,12 +38,13 @@ public Subscription(TSubscriber subscriber) } /// - /// Gets the subscriber. + /// Gets the subscriber attached to this subscription. /// /// The subscriber. public TSubscriber Subscriber { get; private set; } + /// - /// Gets the unsubscriptions. + /// Gets the unsubscriptions of this particular subscription. /// /// The unsubscriptions. public ISet Unsubscriptions { get; private set; } diff --git a/src/core/Akka/Event/TraceLogger.cs b/src/core/Akka/Event/TraceLogger.cs index 34b688048f8..8f1a0c165d0 100644 --- a/src/core/Akka/Event/TraceLogger.cs +++ b/src/core/Akka/Event/TraceLogger.cs @@ -19,13 +19,12 @@ public class TraceLogger : UntypedActor { protected override void OnReceive(object message) { - PatternMatch.Match(message) + message.Match() .With(m => Sender.Tell(new LoggerInitialized())) - .With(m => - Trace.TraceError(m.ToString())) + .With(m => Trace.TraceError(m.ToString())) .With(m => Trace.TraceWarning(m.ToString())) .With(m => Trace.TraceWarning(string.Format("Deadletter - unable to send message {0} from {1} to {2}", m.Message, m.Sender, m.Sender), typeof(DeadLetter).ToString())) - .With(m => Trace.TraceWarning(string.Format("Unhandled message!"), typeof(UnhandledMessage).ToString())) + .With(m => Trace.TraceWarning("Unhandled message!")) .Default(m => { if (m != null) diff --git a/src/core/Akka/Event/UnhandledMessage.cs b/src/core/Akka/Event/UnhandledMessage.cs index 7e9321c5d41..c42c674b9ee 100644 --- a/src/core/Akka/Event/UnhandledMessage.cs +++ b/src/core/Akka/Event/UnhandledMessage.cs @@ -10,12 +10,12 @@ namespace Akka.Event { /// - /// Class UnhandledMessage. + /// Represents an UnhandledMessage that was not handled by the Recipient. /// public class UnhandledMessage { /// - /// Initializes a new instance of the class. + /// Initializes a new instance of the class. /// /// The message. /// The sender. @@ -28,21 +28,21 @@ internal UnhandledMessage(object message, IActorRef sender, IActorRef recipient) } /// - /// Gets the message. + /// Gets the original message that could not be handled. /// /// The message. public object Message { get; private set; } /// - /// Gets the sender. + /// Gets the sender of the message. /// - /// The sender. + /// The sender of the message. public IActorRef Sender { get; private set; } /// - /// Gets the recipient. + /// Gets the recipient of the message. /// - /// The recipient. + /// The recipient of the message. public IActorRef Recipient { get; private set; } } } diff --git a/src/core/Akka/Event/Warning.cs b/src/core/Akka/Event/Warning.cs index 97605a6f27f..90342a81e8c 100644 --- a/src/core/Akka/Event/Warning.cs +++ b/src/core/Akka/Event/Warning.cs @@ -10,12 +10,12 @@ namespace Akka.Event { /// - /// Class Warning. + /// Represents an Warning log event. /// public class Warning : LogEvent { /// - /// Initializes a new instance of the class. + /// Initializes a new instance of the class. /// /// The log source. /// The log class. @@ -27,10 +27,6 @@ public Warning(string logSource, Type logClass, object message) Message = message; } - /// - /// Logs the level. - /// - /// LogLevel. public override LogLevel LogLevel() { return Event.LogLevel.WarningLevel; From 32d2e2779fd252d100c8bf03e52e836ef3771cf7 Mon Sep 17 00:00:00 2001 From: dsmith Date: Fri, 1 May 2015 16:14:51 +0100 Subject: [PATCH 16/66] Implement RemotingTerminator and add RemoteNodeShutdownAndComeBackSpec --- .../Akka.Cluster.Tests.csproj | 1 - .../Akka.Remote.TestKit.csproj | 5 + src/core/Akka.Remote.TestKit/MsgEncoder.cs | 1 + .../MultiNodeFact.cs | 2 +- src/core/Akka.Remote.TestKit/Player.cs | 27 ++- src/core/Akka.Remote.TestKit/packages.config | 1 + .../Akka.Remote.Tests.csproj | 8 + .../RemoteNodeShutdownAndComeBackSpec.cs | 221 ++++++++++++++++++ src/core/Akka.Remote.Tests/packages.config | 1 + src/core/Akka.Remote/EndpointManager.cs | 2 +- .../Akka.Remote/RemoteActorRefProvider.cs | 76 +++++- src/core/Akka.Remote/RemoteDaemon.cs | 35 ++- 12 files changed, 355 insertions(+), 25 deletions(-) rename src/core/{Akka.Cluster.Tests/MultiNode => Akka.Remote.TestKit}/MultiNodeFact.cs (96%) create mode 100644 src/core/Akka.Remote.Tests/MultiNode/RemoteNodeShutdownAndComeBackSpec.cs diff --git a/src/core/Akka.Cluster.Tests/Akka.Cluster.Tests.csproj b/src/core/Akka.Cluster.Tests/Akka.Cluster.Tests.csproj index d3dc6f40770..53b01ba87e6 100644 --- a/src/core/Akka.Cluster.Tests/Akka.Cluster.Tests.csproj +++ b/src/core/Akka.Cluster.Tests/Akka.Cluster.Tests.csproj @@ -77,7 +77,6 @@ - diff --git a/src/core/Akka.Remote.TestKit/Akka.Remote.TestKit.csproj b/src/core/Akka.Remote.TestKit/Akka.Remote.TestKit.csproj index e2038c39a8a..190c95b18e7 100644 --- a/src/core/Akka.Remote.TestKit/Akka.Remote.TestKit.csproj +++ b/src/core/Akka.Remote.TestKit/Akka.Remote.TestKit.csproj @@ -44,6 +44,10 @@ ..\..\packages\Newtonsoft.Json.6.0.8\lib\net45\Newtonsoft.Json.dll + + False + ..\..\packages\xunit.1.9.2\lib\net20\xunit.dll + @@ -55,6 +59,7 @@ + diff --git a/src/core/Akka.Remote.TestKit/MsgEncoder.cs b/src/core/Akka.Remote.TestKit/MsgEncoder.cs index 84ff4a85c77..f62f214b1b8 100644 --- a/src/core/Akka.Remote.TestKit/MsgEncoder.cs +++ b/src/core/Akka.Remote.TestKit/MsgEncoder.cs @@ -77,6 +77,7 @@ public void Encode(IConnection connection, object message, out List en w.SetFailure( InjectFailure.CreateBuilder() .SetAddress(Address2Proto(throttle.Target)) + .SetFailure(FailType.Throttle) .SetDirection(Direction2Proto(throttle.Direction)) .SetRateMBit(throttle.RateMBit))) .With( diff --git a/src/core/Akka.Cluster.Tests/MultiNode/MultiNodeFact.cs b/src/core/Akka.Remote.TestKit/MultiNodeFact.cs similarity index 96% rename from src/core/Akka.Cluster.Tests/MultiNode/MultiNodeFact.cs rename to src/core/Akka.Remote.TestKit/MultiNodeFact.cs index 5ad137ad853..a3bcc71bb1c 100644 --- a/src/core/Akka.Cluster.Tests/MultiNode/MultiNodeFact.cs +++ b/src/core/Akka.Remote.TestKit/MultiNodeFact.cs @@ -8,7 +8,7 @@ using System; using Xunit; -namespace Akka.Cluster.Tests.MultiNode +namespace Akka.Remote.TestKit { public class MultiNodeFactAttribute : FactAttribute { diff --git a/src/core/Akka.Remote.TestKit/Player.cs b/src/core/Akka.Remote.TestKit/Player.cs index ab420ff4157..ea1e351e085 100644 --- a/src/core/Akka.Remote.TestKit/Player.cs +++ b/src/core/Akka.Remote.TestKit/Player.cs @@ -441,18 +441,18 @@ public void InitFSM() else if (throttleMsg.RateMBit < 0.0f) mode = Blackhole.Instance; else mode = new TokenBucket(1000, throttleMsg.RateMBit*125000, 0, 0); - var cmdTask = - TestConductor.Get(Context.System) - .Transport.ManagementCommand(new SetThrottle(throttleMsg.Target, throttleMsg.Direction, - mode)); + TestConductor.Get(Context.System) + .Transport.ManagementCommand(new SetThrottle(throttleMsg.Target, throttleMsg.Direction, + mode)) + .ContinueWith(t => + { + if (t.IsFaulted) + throw new Exception("Throttle was requested from the TestConductor, but no transport " + + "adapters available that support throttling. Specify 'testTransport(on=true)' in your MultiNodeConfig"); + return new ToServer(Done.Instance); + }, TaskContinuationOptions.ExecuteSynchronously & TaskContinuationOptions.AttachedToParent) + .PipeTo(Self); - cmdTask.ContinueWith(t => - { - if (t.IsFaulted) - throw new Exception("Throttle was requested from the TestConductor, but no transport " + - "adapters available that support throttling. Specify 'testTransport(on=true)' in your MultiNodeConfig"); - Self.Tell(new ToServer(Done.Instance)); - }); return Stay(); } if (@event.FsmEvent is DisconnectMsg) @@ -573,7 +573,10 @@ public void OnDisconnect(HeliosConnectionException cause, IConnection closedChan } _fsm.Tell(PoisonPill.Instance); //TODO: Some logic here in JVM version to execute this on a different pool to the Netty IO pool - RemoteConnection.Shutdown(closedChannel); + Task.Run(() => + { + RemoteConnection.Shutdown(closedChannel); + }); } public void OnMessage(object message, IConnection responseChannel) diff --git a/src/core/Akka.Remote.TestKit/packages.config b/src/core/Akka.Remote.TestKit/packages.config index 3c738d71211..65aa900f7f1 100644 --- a/src/core/Akka.Remote.TestKit/packages.config +++ b/src/core/Akka.Remote.TestKit/packages.config @@ -4,4 +4,5 @@ + \ No newline at end of file diff --git a/src/core/Akka.Remote.Tests/Akka.Remote.Tests.csproj b/src/core/Akka.Remote.Tests/Akka.Remote.Tests.csproj index 36d10259cbe..9f63ec3879e 100644 --- a/src/core/Akka.Remote.Tests/Akka.Remote.Tests.csproj +++ b/src/core/Akka.Remote.Tests/Akka.Remote.Tests.csproj @@ -53,6 +53,9 @@ + + ..\..\packages\Microsoft.Bcl.Immutable.1.0.34\lib\portable-net45+win8+wp8+wpa81\System.Collections.Immutable.dll + ..\..\packages\xunit.1.9.2\lib\net20\xunit.dll @@ -77,6 +80,7 @@ + @@ -98,6 +102,10 @@ {11F4D4B8-7E07-4457-ABF2-609B3E7B2649} Akka.TestKit.Xunit + + {e5957c3e-2b1e-469f-a680-7953b4dea31b} + Akka.Remote.TestKit + {EA4FF8FD-7C53-49C8-B9AA-02E458B3E6A7} Akka.Remote diff --git a/src/core/Akka.Remote.Tests/MultiNode/RemoteNodeShutdownAndComeBackSpec.cs b/src/core/Akka.Remote.Tests/MultiNode/RemoteNodeShutdownAndComeBackSpec.cs new file mode 100644 index 00000000000..d9aa100d6ba --- /dev/null +++ b/src/core/Akka.Remote.Tests/MultiNode/RemoteNodeShutdownAndComeBackSpec.cs @@ -0,0 +1,221 @@ +using System; +using Akka.Actor; +using Akka.Configuration; +using Akka.Event; +using Akka.Remote.TestKit; +using Akka.Remote.Transport; +using Akka.Util.Internal; + +namespace Akka.Remote.Tests.MultiNode +{ + public class RemoteNodeShutdownAndComeBackSpecConfig : MultiNodeConfig + { + readonly RoleName _first; + public RoleName First { get { return _first; } } + readonly RoleName _second; + public RoleName Second { get { return _second; } } + + public RemoteNodeShutdownAndComeBackSpecConfig(int port = 0) + { + _first = Role("first"); + _second = Role("second"); + + CommonConfig = + ConfigurationFactory.ParseString(String.Format(@"akka.remote.helios.tcp.port = {0}", port)).WithFallback( + DebugConfig(true).WithFallback( + ConfigurationFactory.ParseString(@" + akka.loglevel = DEBUG + akka.remote.log-remote-lifecycle-events = Debug + ## Keep it tight, otherwise reestablishing a connection takes too much time + akka.remote.transport-failure-detector.heartbeat-interval = 1 s + akka.remote.transport-failure-detector.acceptable-heartbeat-pause = 3 s + akka.remote.watch-failure-detector.acceptable-heartbeat-pause = 60 s + akka.remote.gate-invalid-addresses-for = 0.5 s + akka.loglevel = DEBUG + akka.remote { + log-received-messages = on + log-sent-messages = on + } + akka.actor.debug { + receive = on + fsm = on + } + akka.remote.log-remote-lifecycle-events = on + akka.log-dead-letters = on + akka.loggers = [""Akka.Logger.NLog.NLogLogger, Akka.Logger.NLog""] + )"))); + + TestTransport = true; + } + + public class RemoteNodeShutdownAndComesBackMultiNode1 : RemoteNodeShutdownAndComeBackSpec + { + public RemoteNodeShutdownAndComesBackMultiNode1() : base(10000) + { + } + } + + public class RemoteNodeShutdownAndComesBackMultiNode2 : RemoteNodeShutdownAndComeBackSpec + { + public RemoteNodeShutdownAndComesBackMultiNode2() : base(10001) + { + } + } + + public class RemoteNodeShutdownAndComeBackSpec : MultiNodeSpec + { + readonly RemoteNodeShutdownAndComeBackSpecConfig _config; + + protected RemoteNodeShutdownAndComeBackSpec() + : this(new RemoteNodeShutdownAndComeBackSpecConfig()) + { + } + + protected RemoteNodeShutdownAndComeBackSpec(int port) + : this(new RemoteNodeShutdownAndComeBackSpecConfig(port)) + { + } + + protected override int InitialParticipantsValueFactory + { + get { return Roles.Count; } + } + + private RemoteNodeShutdownAndComeBackSpec(RemoteNodeShutdownAndComeBackSpecConfig config) + : base(config) + { + _config = config; + } + + public IActorRef Identify(RoleName role, string actorName) + { + Sys.ActorSelection(Node(role) / "user" / actorName).Tell(new Identify(actorName)); + return ExpectMsg().Subject; + } + + [MultiNodeFact] + public void + RemoteNodeShutDownAndComesBackMustProperlyResetSystemMessageBufferStateWhenNewSystemWithSameAddressComesUp() + { + RunOn(() => + { + var secondAddress = Node(_config.Second).Address; + Sys.ActorOf(Props.Create(() => new Subject()), "subject1"); + EnterBarrier("actors-started"); + + var subject = Identify(_config.Second, "subject"); + var sysMsgBarrier = Identify(_config.Second, "sysmsgBarrier"); + + //Prime up the system message buffer + Watch(subject); + EnterBarrier("watch-established"); + + // Wait for proper system message propagation + // (Using a helper actor to ensure that all previous system messages arrived) + Watch(sysMsgBarrier); + Sys.Stop(sysMsgBarrier); + ExpectTerminated(sysMsgBarrier); + + // Drop all messages from this point so no SHUTDOWN is ever received + TestConductor.Blackhole(_config.Second, _config.First, ThrottleTransportAdapter.Direction.Send) + .Wait(); + // Shut down all existing connections so that the system can enter recovery mode (association attempts) + RARP.For(Sys) + .Provider.Transport.ManagementCommand(new ForceDisassociate(Node(_config.Second).Address)) + .Wait(TimeSpan.FromSeconds(3)); + + // Trigger reconnect attempt and also queue up a system message to be in limbo state (UID of remote system + // is unknown, and system message is pending) + Sys.Stop(subject); + + Log.Info("Shutting down second"); + // Get rid of old system -- now SHUTDOWN is lost + TestConductor.Shutdown(_config.Second).Wait(); + + // At this point the second node is restarting, while the first node is trying to reconnect without resetting + // the system message send state + + // Now wait until second system becomes alive again + AwaitAssert(() => + { + var p = CreateTestProbe(); + Sys.ActorSelection(new RootActorPath(secondAddress) / "user" / "subject").Tell(new Identify("subject"), p.Ref); + p.ExpectMsg(m => (string)m.MessageId == "subject" && m.Subject != null, + TimeSpan.FromSeconds(1)); + }, TimeSpan.FromSeconds(30)); + + ExpectTerminated(subject); + + // Establish watch with the new system. This triggers additional system message traffic. If buffers are out + // of synch the remote system will be quarantined and the rest of the test will fail (or even in earlier + // stages depending on circumstances). + Sys.ActorSelection(new RootActorPath(secondAddress)/"user"/"subject").Tell(new Identify("subject")); + var subjectNew = ExpectMsg().Subject; + Watch(subjectNew); + + subjectNew.Tell("shutdown"); + FishForMessage(m => + { + var terminated = m as Terminated; + if (terminated != null && terminated.ActorRef.Equals(subjectNew)) return true; + return false; + }); + }, _config.First); + + RunOn(() => + { + var addr = Sys.AsInstanceOf().Provider.DefaultAddress; + Sys.ActorOf(Props.Create(() => new Subject()), "subject"); + Sys.ActorOf(Props.Create(() => new Subject()), "sysmsgBarrier"); + var path = Node(_config.First); + EnterBarrier("actors-started"); + + EnterBarrier("watch-established"); + + Sys.AwaitTermination(TimeSpan.FromSeconds(30)); + + var config = String.Format(@" + akka.remote.helios.tcp {{ + hostname = {0} + port = {1} + }} + ", addr.Host, addr.Port); + + var freshSystem = ActorSystem.Create(Sys.Name, ConfigurationFactory.ParseString(config) + .WithFallback(Sys.Settings.Config)); + + var b = freshSystem.ActorOf(Props.Create(() => new Blah())); + + freshSystem.EventStream.Subscribe(b, typeof (DeadLetter)); + + freshSystem.AwaitTermination(TimeSpan.FromSeconds(30)); + }, _config.Second); + } + + class Blah : ReceiveActor + { + private ILoggingAdapter _log = Context.GetLogger(); + + public Blah() + { + Receive(l => _log.Warning("DeadLetter of {0}", l.Message.GetType())); + } + } + + class Subject : UntypedActor + { + protected override void OnReceive(object message) + { + var @string = message as string; + if (@string == "shutdown") + { + Context.System.Shutdown(); + return; + } + + Sender.Tell(message); + } + } + } + } +} diff --git a/src/core/Akka.Remote.Tests/packages.config b/src/core/Akka.Remote.Tests/packages.config index d2eaaa8ea58..cb48ad7e435 100644 --- a/src/core/Akka.Remote.Tests/packages.config +++ b/src/core/Akka.Remote.Tests/packages.config @@ -1,5 +1,6 @@  + \ No newline at end of file diff --git a/src/core/Akka.Remote/EndpointManager.cs b/src/core/Akka.Remote/EndpointManager.cs index 993bd18f030..6a6f1421083 100644 --- a/src/core/Akka.Remote/EndpointManager.cs +++ b/src/core/Akka.Remote/EndpointManager.cs @@ -680,7 +680,7 @@ private Task new RemotingTerminator(_local.SystemGuardian))), + "remoting-terminator"); + + _remotingTerminator.Tell(RemoteInternals); Transport.Start(); _remoteWatcher = CreateRemoteWatcher(system); @@ -442,29 +447,86 @@ enum TerminatorState private class RemotingTerminator : FSM { private readonly IActorRef _systemGuardian; + private readonly ILoggingAdapter _log; public RemotingTerminator(IActorRef systemGuardian) { _systemGuardian = systemGuardian; + _log = Context.GetLogger(); InitFSM(); } private void InitFSM() { - When(TerminatorState.Uninitialized, @event => { - var internals = @event.StateData; + var internals = @event.FsmEvent as Internals; if (internals != null) { - //TODO: add a termination hook to the system guardian + _systemGuardian.Tell(RegisterTerminationHook.Instance); return GoTo(TerminatorState.Idle).Using(internals); } return null; }); + When(TerminatorState.Idle, @event => + { + if (@event.StateData != null && @event.FsmEvent is TerminationHook) + { + _log.Info("Shutting down remote daemon."); + @event.StateData.RemoteDaemon.Tell(TerminationHook.Instance); + return GoTo(TerminatorState.WaitDaemonShutdown); + } + return null; + }); + + When(TerminatorState.WaitDaemonShutdown, @event => + { + if (@event.StateData != null && @event.FsmEvent is TerminationHookDone) + { + _log.Info("Remote daemon shut down; proceeding with flushing remote transports."); + @event.StateData.Transport.Shutdown() + .ContinueWith(t => TransportShutdown.Instance, + TaskContinuationOptions.ExecuteSynchronously & TaskContinuationOptions.AttachedToParent) + .PipeTo(Self); + return GoTo(TerminatorState.WaitTransportShutdown); + } + + return null; + }); + + When(TerminatorState.WaitTransportShutdown, @event => + { + if (@event.FsmEvent is TransportShutdown) + { + _log.Info("Remoting shut down."); + _systemGuardian.Tell(TerminationHookDone.Instance); + Stop(); + return GoTo(TerminatorState.Finished); + } + return null; + }); + StartWith(TerminatorState.Uninitialized, null); } + + public sealed class TransportShutdown + { + private TransportShutdown() { } + private static readonly TransportShutdown _instance = new TransportShutdown(); + public static TransportShutdown Instance + { + get + { + return _instance; + } + } + + public override string ToString() + { + return ""; + } + } } #endregion diff --git a/src/core/Akka.Remote/RemoteDaemon.cs b/src/core/Akka.Remote/RemoteDaemon.cs index 412117cdc28..0fa0c0ac917 100644 --- a/src/core/Akka.Remote/RemoteDaemon.cs +++ b/src/core/Akka.Remote/RemoteDaemon.cs @@ -5,12 +5,14 @@ // //----------------------------------------------------------------------- +using System; using System.Collections.Generic; using System.Linq; using Akka.Actor; using Akka.Actor.Internals; using Akka.Dispatch.SysMsg; using Akka.Event; +using Akka.Util; using Akka.Util.Internal; namespace Akka.Remote @@ -75,6 +77,8 @@ public DaemonMsgCreate(Props props, Deploy deploy, string path, IActorRef superv internal class RemoteDaemon : VirtualPathContainer { private readonly ActorSystemImpl _system; + private readonly Switch _terminating; + private readonly IActorRef _terminator; /// /// Initializes a new instance of the class. @@ -82,11 +86,14 @@ internal class RemoteDaemon : VirtualPathContainer /// The system. /// The path. /// The parent. + /// The remoting terminator. /// - public RemoteDaemon(ActorSystemImpl system, ActorPath path, IInternalActorRef parent, ILoggingAdapter log) + public RemoteDaemon(ActorSystemImpl system, ActorPath path, IInternalActorRef parent, IActorRef terminator, ILoggingAdapter log) : base(system.Provider, path, parent, log) { _system = system; + _terminating = new Switch(false); + _terminator = terminator; AddressTerminatedTopic.Get(system).Subscribe(this); } @@ -102,11 +109,25 @@ protected void OnReceive(object message) { Log.Debug("Received command [{0}] to RemoteSystemDaemon on [{1}]", message, Path.Address); if (message is DaemonMsgCreate) HandleDaemonMsgCreate((DaemonMsgCreate)message); + return; + } + + if (message is TerminationHook) + { + _terminating.SwitchOn(() => + { + TerminationHookDoneWhenNoChildren(); + ForEachChild(c => + { + _system.Stop(c); + }); + }); + return; } //Remote ActorSystem on another process / machine has died. //Need to clean up any references to remote deployments here. - else if (message is AddressTerminated) + if (message is AddressTerminated) { var addressTerminated = (AddressTerminated) message; //stop any remote actors that belong to this address @@ -114,6 +135,7 @@ protected void OnReceive(object message) { if(@ref.Parent.Path.Address == addressTerminated.Address) _system.Stop(@ref); }); + return; } } @@ -183,6 +205,13 @@ public override IActorRef GetChild(IEnumerable name) } return ActorRefs.Nobody; } + + public void TerminationHookDoneWhenNoChildren() + { + _terminating.WhileOn(() => + { + if (!HasChildren) _terminator.Tell(TerminationHookDone.Instance, this); + }); + } } } - From f66c26224b4481c579bddea037808d8294be006a Mon Sep 17 00:00:00 2001 From: Aaron Stannard Date: Tue, 5 May 2015 17:02:38 -0700 Subject: [PATCH 17/66] bump commit --- RELEASE_NOTES.md | 1 + 1 file changed, 1 insertion(+) diff --git a/RELEASE_NOTES.md b/RELEASE_NOTES.md index 5e09caa130d..3acce04a66d 100644 --- a/RELEASE_NOTES.md +++ b/RELEASE_NOTES.md @@ -1,5 +1,6 @@ #### 1.0.2 May 1 2015 + #### 1.0.1 Apr 28 2015 **Bugfix release for Akka.NET v1.0.** From 2834d913bb19ede6726f1b054a4ae1ec6f7d865b Mon Sep 17 00:00:00 2001 From: Joshua Benjamin Date: Thu, 16 Apr 2015 23:28:52 -0700 Subject: [PATCH 18/66] Upgraded to XUnit 2.0 RTM added XUnit2 testkit fixed up the ResizerSpec tests used the actual deathwatch to make the DeathWatchSpec better and not racy adjusting teamcity output and exiting with 0 even if tests fail in multinode runner removed the extra copy of Akka.Cluster.Tests so the fake script doesn't pick up three Moved multinode tests to their own assembly so we don't have skipped tests and it matches up with what akka has a bit. Fixed the ThrottlerTransportAdapterSpec Updated the MultiNodeTests to support xunit 2.0 Fixed ActorRefSpec test to handle race condition. test An_ActoRef_should_return_EmptyLocalActorRef_on_deserialize_if_not_present_in_actor_hierarchy_and_remoting_is_not_enabled RoutingSpec.Router_in_general_must_evict_terminated_routees will now use poison pill and wait for the terminated msg. fixed up how project gets required nugets and removed the repositories.config. updated fake and used xunit2helper in fake script --- .gitignore | 1 + build.cmd | 8 +- build.fsx | 33 ++- src/.nuget/NuGet.Config | 3 + src/Akka.sln | 22 ++ .../Akka.TestKit.Xunit.csproj | 12 +- .../Akka.TestKit.Xunit.nuspec | 2 +- .../Internals/AkkaEqualException.cs | 5 +- .../Akka.TestKit.Xunit2.csproj | 89 +++++++ .../Akka.TestKit.Xunit2.nuspec | 20 ++ .../Internals/AkkaAssertEqualityComparer.cs | 111 +++++++++ .../AkkaAssertEqualityComparerAdapter.cs | 47 ++++ .../Internals/AkkaEqualException.cs | 51 ++++ .../Properties/AssemblyInfo.cs | 20 ++ .../testkits/Akka.TestKit.Xunit2/TestKit.cs | 125 ++++++++++ .../Akka.TestKit.Xunit2/XunitAssertions.cs | 50 ++++ .../Akka.TestKit.Xunit2/packages.config | 8 + .../Akka.Cluster.Tests.csproj | 42 ++-- src/core/Akka.Cluster.Tests/ClusterSpec.cs | 2 +- .../Properties/AssemblyInfo.cs | 3 + .../Akka.Cluster.Tests/ReachabilitySpec.cs | 20 +- src/core/Akka.Cluster.Tests/packages.config | 7 +- .../Akka.Cluster/Properties/AssemblyInfo.cs | 1 + .../Akka.FSharp.Tests.fsproj | 18 +- src/core/Akka.FSharp.Tests/packages.config | 6 +- .../Akka.FSharp/Properties/AssemblyInfo.fs | 6 +- ...ka.MultiNodeTestRunner.Shared.Tests.csproj | 6 +- .../Akka.MultiNodeTestRunner.csproj | 18 +- .../Akka.MultiNodeTestRunner/Discovery.cs | 11 +- src/core/Akka.MultiNodeTestRunner/Program.cs | 2 +- .../Akka.MultiNodeTestRunner/packages.config | 4 +- .../Akka.MultiNodeTests.csproj | 121 ++++++++++ .../ClusterDeathWatchSpec.cs | 2 +- .../ConvergenceSpec.cs | 5 +- .../FailureDetectorPuppet.cs | 2 +- .../InitialHeartbeatSpec.cs | 3 +- .../JoinInProgressSpec.cs | 2 +- .../JoinSeedNodeSpec.cs | 2 +- .../LeaderLeavingSpec.cs | 3 +- .../MultiNodeClusterSpec.cs | 9 +- .../MultiNodeFact.cs | 2 +- .../MultiNodeLoggingConfig.cs | 2 +- .../Properties/AssemblyInfo.cs | 36 +++ .../ClusterConsistentHashingGroupSpec.cs | 2 +- .../ClusterConsistentHashingRouterSpec.cs | 2 +- src/core/Akka.MultiNodeTests/packages.config | 9 + .../Akka.NodeTestRunner.csproj | 17 +- src/core/Akka.NodeTestRunner/Discovery.cs | 37 +++ src/core/Akka.NodeTestRunner/Program.cs | 64 ++--- src/core/Akka.NodeTestRunner/Sink.cs | 2 +- src/core/Akka.NodeTestRunner/packages.config | 4 +- .../Akka.Persistence.TestKit.Tests.csproj | 14 +- .../Properties/AssemblyInfo.cs | 3 + .../packages.config | 6 +- .../Akka.Persistence.TestKit.csproj | 20 +- .../Akka.Persistence.TestKit/PluginSpec.cs | 2 +- .../Akka.Persistence.TestKit/packages.config | 6 +- .../Akka.Persistence.Tests.csproj | 22 +- .../Properties/AssemblyInfo.cs | 3 + .../Akka.Persistence.Tests/packages.config | 6 +- .../Akka.Remote.TestKit.Tests.csproj | 21 +- .../Properties/AssemblyInfo.cs | 3 + .../Akka.Remote.TestKit.Tests/packages.config | 6 +- .../Akka.Remote.TestKit.csproj | 7 +- src/core/Akka.Remote.TestKit/MultiNodeSpec.cs | 2 +- .../Akka.Remote.Tests.csproj | 26 +- .../Properties/AssemblyInfo.cs | 4 +- .../Akka.Remote.Tests/RemoteRouterSpec.cs | 1 + .../DaemonMsgCreateSerializerSpec.cs | 2 + .../Transport/GenericTransportSpec.cs | 6 +- .../ThrottlerTransportAdapterSpec.cs | 15 +- src/core/Akka.Remote.Tests/packages.config | 6 +- .../Akka.Remote/Properties/AssemblyInfo.cs | 2 +- .../Akka.TestKit.Tests.csproj | 42 ++-- .../Properties/AssemblyInfo.cs | 3 + .../AllTestForEventFilterBase.cs | 224 ++++++++++++++++++ .../AllTestForEventFilterBase_Instances.cs | 52 ++++ .../TestEventListenerTests/ConfigTests.cs | 24 ++ .../CustomEventFilterTests.cs | 43 ++++ .../DeadLettersEventFilterTests.cs | 45 ++++ .../EventFilterTestBase.cs | 54 +++++ .../TestKitBaseTests/AwaitAssertTests.cs | 37 +++ .../Xunit2/TestKitBaseTests/RemainingTests.cs | 22 ++ .../Xunit2/TestKit_Config_Tests.cs | 32 +++ src/core/Akka.TestKit.Tests/packages.config | 6 +- .../Akka.Tests.Shared.Internals.csproj | 21 +- .../Akka.Tests.Shared.Internals/AkkaSpec.cs | 2 +- .../Helpers/XAssert.cs | 2 +- .../packages.config | 6 +- src/core/Akka.Tests/Actor/ActorRefSpec.cs | 31 ++- src/core/Akka.Tests/Actor/DeathWatchSpec.cs | 24 +- src/core/Akka.Tests/Akka.Tests.csproj | 37 ++- .../Akka.Tests/Properties/AssemblyInfo.cs | 3 + src/core/Akka.Tests/Routing/ResizerSpec.cs | 30 +-- src/core/Akka.Tests/Routing/RoundRobinSpec.cs | 16 +- src/core/Akka.Tests/Routing/RoutingSpec.cs | 10 +- .../Akka.Tests/Routing/SmallestMailboxSpec.cs | 16 +- src/core/Akka.Tests/packages.config | 11 +- src/core/Akka/Actor/ActorCell.DeathWatch.cs | 18 +- src/core/Akka/Properties/AssemblyInfo.cs | 2 +- .../Chat/ChatMessages/ChatMessages.csproj | 17 ++ .../Chat/ChatMessages/packages.config | 6 +- src/packages/repositories.config | 84 ------- 103 files changed, 1791 insertions(+), 391 deletions(-) create mode 100644 src/contrib/testkits/Akka.TestKit.Xunit2/Akka.TestKit.Xunit2.csproj create mode 100644 src/contrib/testkits/Akka.TestKit.Xunit2/Akka.TestKit.Xunit2.nuspec create mode 100644 src/contrib/testkits/Akka.TestKit.Xunit2/Internals/AkkaAssertEqualityComparer.cs create mode 100644 src/contrib/testkits/Akka.TestKit.Xunit2/Internals/AkkaAssertEqualityComparerAdapter.cs create mode 100644 src/contrib/testkits/Akka.TestKit.Xunit2/Internals/AkkaEqualException.cs create mode 100644 src/contrib/testkits/Akka.TestKit.Xunit2/Properties/AssemblyInfo.cs create mode 100644 src/contrib/testkits/Akka.TestKit.Xunit2/TestKit.cs create mode 100644 src/contrib/testkits/Akka.TestKit.Xunit2/XunitAssertions.cs create mode 100644 src/contrib/testkits/Akka.TestKit.Xunit2/packages.config create mode 100644 src/core/Akka.MultiNodeTests/Akka.MultiNodeTests.csproj rename src/core/{Akka.Cluster.Tests/MultiNode => Akka.MultiNodeTests}/ClusterDeathWatchSpec.cs (99%) rename src/core/{Akka.Cluster.Tests/MultiNode => Akka.MultiNodeTests}/ConvergenceSpec.cs (98%) rename src/core/{Akka.Cluster.Tests => Akka.MultiNodeTests}/FailureDetectorPuppet.cs (98%) rename src/core/{Akka.Cluster.Tests/MultiNode => Akka.MultiNodeTests}/InitialHeartbeatSpec.cs (99%) rename src/core/{Akka.Cluster.Tests/MultiNode => Akka.MultiNodeTests}/JoinInProgressSpec.cs (98%) rename src/core/{Akka.Cluster.Tests/MultiNode => Akka.MultiNodeTests}/JoinSeedNodeSpec.cs (98%) rename src/core/{Akka.Cluster.Tests/MultiNode => Akka.MultiNodeTests}/LeaderLeavingSpec.cs (99%) rename src/core/{Akka.Cluster.Tests/MultiNode => Akka.MultiNodeTests}/MultiNodeClusterSpec.cs (98%) rename src/core/{Akka.Cluster.Tests/MultiNode => Akka.MultiNodeTests}/MultiNodeFact.cs (96%) rename src/core/{Akka.Cluster.Tests/MultiNode => Akka.MultiNodeTests}/MultiNodeLoggingConfig.cs (96%) create mode 100644 src/core/Akka.MultiNodeTests/Properties/AssemblyInfo.cs rename src/core/{Akka.Cluster.Tests/MultiNode => Akka.MultiNodeTests}/Routing/ClusterConsistentHashingGroupSpec.cs (99%) rename src/core/{Akka.Cluster.Tests/MultiNode => Akka.MultiNodeTests}/Routing/ClusterConsistentHashingRouterSpec.cs (99%) create mode 100644 src/core/Akka.MultiNodeTests/packages.config create mode 100644 src/core/Akka.NodeTestRunner/Discovery.cs create mode 100644 src/core/Akka.TestKit.Tests/Xunit2/TestEventListenerTests/AllTestForEventFilterBase.cs create mode 100644 src/core/Akka.TestKit.Tests/Xunit2/TestEventListenerTests/AllTestForEventFilterBase_Instances.cs create mode 100644 src/core/Akka.TestKit.Tests/Xunit2/TestEventListenerTests/ConfigTests.cs create mode 100644 src/core/Akka.TestKit.Tests/Xunit2/TestEventListenerTests/CustomEventFilterTests.cs create mode 100644 src/core/Akka.TestKit.Tests/Xunit2/TestEventListenerTests/DeadLettersEventFilterTests.cs create mode 100644 src/core/Akka.TestKit.Tests/Xunit2/TestEventListenerTests/EventFilterTestBase.cs create mode 100644 src/core/Akka.TestKit.Tests/Xunit2/TestKitBaseTests/AwaitAssertTests.cs create mode 100644 src/core/Akka.TestKit.Tests/Xunit2/TestKitBaseTests/RemainingTests.cs create mode 100644 src/core/Akka.TestKit.Tests/Xunit2/TestKit_Config_Tests.cs delete mode 100644 src/packages/repositories.config diff --git a/.gitignore b/.gitignore index abd2a25c953..ea053aabd96 100644 --- a/.gitignore +++ b/.gitignore @@ -210,3 +210,4 @@ FakesAssemblies/ /src/.Akka.boltdata/Settings.json /src/.Akka.boltdata/TestResults.json resetdev.bat +/src/packages/repositories.config diff --git a/build.cmd b/build.cmd index 2293572349c..1ab7b81ac20 100644 --- a/build.cmd +++ b/build.cmd @@ -4,9 +4,9 @@ pushd %~dp0 src\.nuget\NuGet.exe update -self -src\.nuget\NuGet.exe install FAKE -ConfigFile src\.nuget\Nuget.Config -OutputDirectory src\packages -ExcludeVersion -Version 3.4.1 +src\.nuget\NuGet.exe install FAKE -ConfigFile src\.nuget\Nuget.Config -OutputDirectory src\packages -ExcludeVersion -Version 3.28.8 -src\.nuget\NuGet.exe install xunit.runners -ConfigFile src\.nuget\Nuget.Config -OutputDirectory src\packages\FAKE -ExcludeVersion -Version 1.9.2 +src\.nuget\NuGet.exe install xunit.runner.console -ConfigFile src\.nuget\Nuget.Config -OutputDirectory src\packages\FAKE -ExcludeVersion -Version 2.0.0 src\.nuget\NuGet.exe install nunit.runners -ConfigFile src\.nuget\Nuget.Config -OutputDirectory src\packages\FAKE -ExcludeVersion -Version 2.6.4 if not exist src\packages\SourceLink.Fake\tools\SourceLink.fsx ( @@ -17,6 +17,4 @@ rem cls set encoding=utf-8 src\packages\FAKE\tools\FAKE.exe build.fsx %* -popd - - +popd \ No newline at end of file diff --git a/build.fsx b/build.fsx index 9ac6cf7715d..dd315258a94 100644 --- a/build.fsx +++ b/build.fsx @@ -4,6 +4,7 @@ open System open System.IO +open System.Text open Fake open Fake.FileUtils open Fake.MSTest @@ -58,6 +59,14 @@ let nugetExe = FullName @"src\.nuget\NuGet.exe" let docDir = "bin" @@ "doc" +Target "RestorePackages" (fun _ -> + "./src/Akka.sln" + |> RestoreMSSolutionPackages (fun p -> + { p with + OutputPath = "./src/packages" + Retries = 4 }) + ) + //-------------------------------------------------------------------------------- // Clean build results @@ -67,7 +76,6 @@ Target "Clean" <| fun _ -> //-------------------------------------------------------------------------------- // Generate AssemblyInfo files with the version for release notes - open AssemblyInfoFile Target "AssemblyInfo" <| fun _ -> for file in !! "src/**/AssemblyInfo.fs" do @@ -182,6 +190,7 @@ Target "CopyOutput" <| fun _ -> "contrib/dependencyinjection/Akka.DI.Ninject" "contrib/testkits/Akka.TestKit.Xunit" "contrib/testkits/Akka.TestKit.NUnit" + "contrib/testkits/Akka.TestKit.Xunit2" ] |> List.iter copyOutput @@ -201,7 +210,7 @@ Target "CleanTests" <| fun _ -> //-------------------------------------------------------------------------------- // Run tests -open XUnitHelper +open XUnit2Helper Target "RunTests" <| fun _ -> let msTestAssemblies = !! "src/**/bin/Release/Akka.TestKit.VsTest.Tests.dll" let nunitTestAssemblies = !! "src/**/bin/Release/Akka.TestKit.NUnit.Tests.dll" @@ -219,9 +228,9 @@ Target "RunTests" <| fun _ -> DisableShadowCopy = true; OutputFile = testOutput + @"\NUnitTestResults.xml"}) - let xunitToolPath = findToolInSubPath "xunit.console.clr4.exe" "src/packages/xunit.runners*" + let xunitToolPath = findToolInSubPath "xunit.console.exe" "src/packages/xunit.runner.console*/tools" printfn "Using XUnit runner: %s" xunitToolPath - xUnit + xUnit2 (fun p -> { p with OutputDir = testOutput; ToolPath = xunitToolPath }) xunitTestAssemblies @@ -230,9 +239,9 @@ Target "RunTestsMono" <| fun _ -> mkdir testOutput - let xunitToolPath = findToolInSubPath "xunit.console.clr4.exe" "src/packages/xunit.runners*" + let xunitToolPath = findToolInSubPath "xunit.console.exe" "src/packages/xunit.runner.console*/tools" printfn "Using XUnit runner: %s" xunitToolPath - xUnit + xUnit2 (fun p -> { p with OutputDir = testOutput; ToolPath = xunitToolPath }) xunitTestAssemblies @@ -240,8 +249,14 @@ Target "MultiNodeTests" <| fun _ -> let multiNodeTestPath = findToolInSubPath "Akka.MultiNodeTestRunner.exe" "bin/core/Akka.MultiNodeTestRunner*" printfn "Using MultiNodeTestRunner: %s" multiNodeTestPath + let spec = getBuildParam "spec" + + let args = new StringBuilder() + |> append "Akka.MultiNodeTests.dll" + |> append "-Dmultinode.enable-filesink=on" + |> appendIfNotNullOrEmpty spec "-Dmultinode.test-spec=" + |> toText - let args = "Akka.Cluster.Tests.dll -Dmultinode.enable-filesink=on" let result = ExecProcess(fun info -> info.FileName <- multiNodeTestPath info.WorkingDirectory <- (Path.GetDirectoryName (FullName multiNodeTestPath)) @@ -308,7 +323,7 @@ let createNugetPackages _ = let projectDir = Path.GetDirectoryName nuspec let projectFile = (!! (projectDir @@ project + ".*sproj")) |> Seq.head let releaseDir = projectDir @@ @"bin\Release" - let packages = projectDir @@ "packages.config" + let packages = projectDir @@ "packages.config" let packageDependencies = if (fileExists packages) then (getDependencies packages) else [] let dependencies = packageDependencies @ getAkkaDependency project let releaseVersion = getProjectVersion project @@ -511,7 +526,7 @@ Target "HelpDocs" <| fun _ -> //-------------------------------------------------------------------------------- // build dependencies -"Clean" ==> "AssemblyInfo" ==> "Build" ==> "CopyOutput" ==> "BuildRelease" +"Clean" ==> "AssemblyInfo" ==> "RestorePackages" ==> "Build" ==> "CopyOutput" ==> "BuildRelease" // tests dependencies "CleanTests" ==> "RunTests" diff --git a/src/.nuget/NuGet.Config b/src/.nuget/NuGet.Config index 67f8ea046ef..58b08e6dd81 100644 --- a/src/.nuget/NuGet.Config +++ b/src/.nuget/NuGet.Config @@ -3,4 +3,7 @@ + + + \ No newline at end of file diff --git a/src/Akka.sln b/src/Akka.sln index 3f8ddf238e8..7b3be2db2f2 100644 --- a/src/Akka.sln +++ b/src/Akka.sln @@ -194,6 +194,10 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Akka.Persistence.SqlServer" EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Akka.Persistence.SqlServer.Tests", "contrib\persistence\Akka.Persistence.SqlServer.Tests\Akka.Persistence.SqlServer.Tests.csproj", "{5A3C24D7-0D1C-4974-BBB4-22AC792666DE}" EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Akka.TestKit.Xunit2", "contrib\testkits\Akka.TestKit.Xunit2\Akka.TestKit.Xunit2.csproj", "{7DBD5C17-5E9D-40C4-9201-D092751532A7}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Akka.MultiNodeTests", "core\Akka.MultiNodeTests\Akka.MultiNodeTests.csproj", "{F0781BEA-5BA0-4AF0-BB15-E3F209B681F5}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug Mono|Any CPU = Debug Mono|Any CPU @@ -703,6 +707,22 @@ Global {5A3C24D7-0D1C-4974-BBB4-22AC792666DE}.Release Mono|Any CPU.Build.0 = Release|Any CPU {5A3C24D7-0D1C-4974-BBB4-22AC792666DE}.Release|Any CPU.ActiveCfg = Release|Any CPU {5A3C24D7-0D1C-4974-BBB4-22AC792666DE}.Release|Any CPU.Build.0 = Release|Any CPU + {7DBD5C17-5E9D-40C4-9201-D092751532A7}.Debug Mono|Any CPU.ActiveCfg = Debug|Any CPU + {7DBD5C17-5E9D-40C4-9201-D092751532A7}.Debug Mono|Any CPU.Build.0 = Debug|Any CPU + {7DBD5C17-5E9D-40C4-9201-D092751532A7}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {7DBD5C17-5E9D-40C4-9201-D092751532A7}.Debug|Any CPU.Build.0 = Debug|Any CPU + {7DBD5C17-5E9D-40C4-9201-D092751532A7}.Release Mono|Any CPU.ActiveCfg = Release|Any CPU + {7DBD5C17-5E9D-40C4-9201-D092751532A7}.Release Mono|Any CPU.Build.0 = Release|Any CPU + {7DBD5C17-5E9D-40C4-9201-D092751532A7}.Release|Any CPU.ActiveCfg = Release|Any CPU + {7DBD5C17-5E9D-40C4-9201-D092751532A7}.Release|Any CPU.Build.0 = Release|Any CPU + {F0781BEA-5BA0-4AF0-BB15-E3F209B681F5}.Debug Mono|Any CPU.ActiveCfg = Debug|Any CPU + {F0781BEA-5BA0-4AF0-BB15-E3F209B681F5}.Debug Mono|Any CPU.Build.0 = Debug|Any CPU + {F0781BEA-5BA0-4AF0-BB15-E3F209B681F5}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {F0781BEA-5BA0-4AF0-BB15-E3F209B681F5}.Debug|Any CPU.Build.0 = Debug|Any CPU + {F0781BEA-5BA0-4AF0-BB15-E3F209B681F5}.Release Mono|Any CPU.ActiveCfg = Release|Any CPU + {F0781BEA-5BA0-4AF0-BB15-E3F209B681F5}.Release Mono|Any CPU.Build.0 = Release|Any CPU + {F0781BEA-5BA0-4AF0-BB15-E3F209B681F5}.Release|Any CPU.ActiveCfg = Release|Any CPU + {F0781BEA-5BA0-4AF0-BB15-E3F209B681F5}.Release|Any CPU.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE @@ -789,5 +809,7 @@ Global {264C22A4-CAFC-41F6-B82C-4DDC5C196767} = {588C1513-FAB6-42C3-B6FC-3485F13620CF} {BAC85686-AFC4-413E-98DC-5ED8F468BC63} = {264C22A4-CAFC-41F6-B82C-4DDC5C196767} {5A3C24D7-0D1C-4974-BBB4-22AC792666DE} = {264C22A4-CAFC-41F6-B82C-4DDC5C196767} + {7DBD5C17-5E9D-40C4-9201-D092751532A7} = {7625FD95-4B2C-4A5B-BDD5-94B1493FAC8E} + {F0781BEA-5BA0-4AF0-BB15-E3F209B681F5} = {01167D3C-49C4-4CDE-9787-C176D139ACDD} EndGlobalSection EndGlobal diff --git a/src/contrib/testkits/Akka.TestKit.Xunit/Akka.TestKit.Xunit.csproj b/src/contrib/testkits/Akka.TestKit.Xunit/Akka.TestKit.Xunit.csproj index 2bba3427fcd..646d9db3556 100644 --- a/src/contrib/testkits/Akka.TestKit.Xunit/Akka.TestKit.Xunit.csproj +++ b/src/contrib/testkits/Akka.TestKit.Xunit/Akka.TestKit.Xunit.csproj @@ -13,6 +13,7 @@ 512 ..\..\..\ true + a4af550d true @@ -38,22 +39,20 @@ ..\..\..\packages\xunit.1.9.2\lib\net20\xunit.dll + True Properties\SharedAssemblyInfo.cs - + - + - - - {0D3CBAD0-BBDB-43E5-AFC4-ED1D3ECDC224} @@ -64,6 +63,9 @@ Akka + + + diff --git a/src/contrib/testkits/Akka.TestKit.Xunit/Akka.TestKit.Xunit.nuspec b/src/contrib/testkits/Akka.TestKit.Xunit/Akka.TestKit.Xunit.nuspec index 968416fb406..c945c208d74 100644 --- a/src/contrib/testkits/Akka.TestKit.Xunit/Akka.TestKit.Xunit.nuspec +++ b/src/contrib/testkits/Akka.TestKit.Xunit/Akka.TestKit.Xunit.nuspec @@ -6,7 +6,7 @@ @build.number@ @authors@ @authors@ - TestKit for writing tests for Akka.NET using xUnit. + TestKit for writing tests for Akka.NET using xUnit 1.9.2. https://github.com/akkadotnet/akka.net/blob/master/LICENSE https://github.com/akkadotnet/akka.net http://getakka.net/images/AkkaNetLogo.Normal.png diff --git a/src/contrib/testkits/Akka.TestKit.Xunit/Internals/AkkaEqualException.cs b/src/contrib/testkits/Akka.TestKit.Xunit/Internals/AkkaEqualException.cs index 7a728ef48ca..156a0a41815 100644 --- a/src/contrib/testkits/Akka.TestKit.Xunit/Internals/AkkaEqualException.cs +++ b/src/contrib/testkits/Akka.TestKit.Xunit/Internals/AkkaEqualException.cs @@ -23,11 +23,12 @@ public AkkaEqualException(object expected, object actual, string format = "", pa _args = args; } - public AkkaEqualException(object expected, object actual, bool skipPositionCheck, string format = "", params object[] args) + public AkkaEqualException(object expected, object actual, bool skipPositionCheck, string format = "", + params object[] args) : base(expected, actual, skipPositionCheck) { - _format = format; _args = args; + _format = format; } protected AkkaEqualException(SerializationInfo info, StreamingContext context) diff --git a/src/contrib/testkits/Akka.TestKit.Xunit2/Akka.TestKit.Xunit2.csproj b/src/contrib/testkits/Akka.TestKit.Xunit2/Akka.TestKit.Xunit2.csproj new file mode 100644 index 00000000000..3bea9ece7d3 --- /dev/null +++ b/src/contrib/testkits/Akka.TestKit.Xunit2/Akka.TestKit.Xunit2.csproj @@ -0,0 +1,89 @@ + + + + + + Debug + AnyCPU + {7DBD5C17-5E9D-40C4-9201-D092751532A7} + Library + Properties + Akka.TestKit.Xunit2 + Akka.TestKit.Xunit2 + v4.5 + 512 + ..\..\..\ + true + 6577902d + + + true + full + false + bin\Debug\ + DEBUG;TRACE + prompt + 4 + + + pdbonly + true + bin\Release\ + TRACE + prompt + 4 + + + + + + ..\..\..\packages\xunit.abstractions.2.0.0\lib\net35\xunit.abstractions.dll + + + ..\..\..\packages\xunit.assert.2.0.0\lib\portable-net45+win+wpa81+wp80+monotouch+monoandroid+Xamarin.iOS\xunit.assert.dll + + + ..\..\..\packages\xunit.extensibility.core.2.0.0\lib\portable-net45+win+wpa81+wp80+monotouch+monoandroid+Xamarin.iOS\xunit.core.dll + + + + + Properties\SharedAssemblyInfo.cs + + + + + + + + + + + + + + {0d3cbad0-bbdb-43e5-afc4-ed1d3ecdc224} + Akka.TestKit + + + {5deddf90-37f0-48d3-a0b0-a5cbd8a7e377} + Akka + + + + + + + This project references NuGet package(s) that are missing on this computer. Enable NuGet Package Restore to download them. For more information, see http://go.microsoft.com/fwlink/?LinkID=322105. The missing file is {0}. + + + + + + \ No newline at end of file diff --git a/src/contrib/testkits/Akka.TestKit.Xunit2/Akka.TestKit.Xunit2.nuspec b/src/contrib/testkits/Akka.TestKit.Xunit2/Akka.TestKit.Xunit2.nuspec new file mode 100644 index 00000000000..fab4cd1608d --- /dev/null +++ b/src/contrib/testkits/Akka.TestKit.Xunit2/Akka.TestKit.Xunit2.nuspec @@ -0,0 +1,20 @@ + + + + @project@ + @project@@title@ + @build.number@ + @authors@ + @authors@ + TestKit for writing tests for Akka.NET using xUnit 2.0. + https://github.com/akkadotnet/akka.net/blob/master/LICENSE + https://github.com/akkadotnet/akka.net + http://getakka.net/images/AkkaNetLogo.Normal.png + false + @releaseNotes@ + @copyright@ + @tags@ xUnit + @dependencies@ + @references@ + + diff --git a/src/contrib/testkits/Akka.TestKit.Xunit2/Internals/AkkaAssertEqualityComparer.cs b/src/contrib/testkits/Akka.TestKit.Xunit2/Internals/AkkaAssertEqualityComparer.cs new file mode 100644 index 00000000000..b58aea32d81 --- /dev/null +++ b/src/contrib/testkits/Akka.TestKit.Xunit2/Internals/AkkaAssertEqualityComparer.cs @@ -0,0 +1,111 @@ +//----------------------------------------------------------------------- +// +// Copyright (C) 2009-2015 Typesafe Inc. +// Copyright (C) 2013-2015 Akka.NET project +// +//----------------------------------------------------------------------- + +using System; +using System.Collections; +using System.Collections.Generic; +using System.Reflection; + +namespace Akka.TestKit.Xunit2.Internals +{ + /// + /// Default implementation of used by the Akka's xUnit.net equality assertions. + /// Copy of xUnits code + /// https://github.com/xunit/xunit/blob/3e6ab94ca231a6d8c86e90d6e724631a0faa33b7/src/xunit.assert/Asserts/Sdk/AssertEqualityComparer.cs + /// Note! Part of internal API. Breaking changes may occur without notice. Use at own risk. + /// + /// The type that is being compared. + public class AkkaAssertEqualityComparer : IEqualityComparer + { + static readonly IEqualityComparer DefaultInnerComparer = new AkkaAssertEqualityComparerAdapter(new AkkaAssertEqualityComparer()); + static readonly TypeInfo NullableTypeInfo = typeof(Nullable<>).GetTypeInfo(); + + readonly Func innerComparerFactory; + readonly bool skipTypeCheck; + + /// + /// Initializes a new instance of the class. + /// + /// Set to true to skip type equality checks. + /// The inner comparer to be used when the compared objects are enumerable. + public AkkaAssertEqualityComparer(bool skipTypeCheck = false, IEqualityComparer innerComparer = null) + { + this.skipTypeCheck = skipTypeCheck; + + // Use a thunk to delay evaluation of DefaultInnerComparer + innerComparerFactory = () => innerComparer ?? DefaultInnerComparer; + } + + /// + public bool Equals(T x, T y) + { + var typeInfo = typeof(T).GetTypeInfo(); + + // Null? + if(!typeInfo.IsValueType || (typeInfo.IsGenericType && typeInfo.GetGenericTypeDefinition().GetTypeInfo().IsAssignableFrom(NullableTypeInfo))) + { + if(Object.Equals(x, default(T))) + return Object.Equals(y, default(T)); + + if(Object.Equals(y, default(T))) + return false; + } + + // Same type? + if(!skipTypeCheck && x.GetType() != y.GetType()) + return false; + + // Implements IEquatable? + var equatable = x as IEquatable; + if(equatable != null) + return equatable.Equals(y); + + // Implements IComparable? + var comparableGeneric = x as IComparable; + if(comparableGeneric != null) + return comparableGeneric.CompareTo(y) == 0; + + // Implements IComparable? + var comparable = x as IComparable; + if(comparable != null) + return comparable.CompareTo(y) == 0; + + // Enumerable? + var enumerableX = x as IEnumerable; + var enumerableY = y as IEnumerable; + + if(enumerableX != null && enumerableY != null) + { + var enumeratorX = enumerableX.GetEnumerator(); + var enumeratorY = enumerableY.GetEnumerator(); + var equalityComparer = innerComparerFactory(); + + while(true) + { + bool hasNextX = enumeratorX.MoveNext(); + bool hasNextY = enumeratorY.MoveNext(); + + if(!hasNextX || !hasNextY) + return (hasNextX == hasNextY); + + if(!equalityComparer.Equals(enumeratorX.Current, enumeratorY.Current)) + return false; + } + } + + // Last case, rely on Object.Equals + return Object.Equals(x, y); + } + + /// + public int GetHashCode(T obj) + { + throw new NotImplementedException(); + } + } +} + diff --git a/src/contrib/testkits/Akka.TestKit.Xunit2/Internals/AkkaAssertEqualityComparerAdapter.cs b/src/contrib/testkits/Akka.TestKit.Xunit2/Internals/AkkaAssertEqualityComparerAdapter.cs new file mode 100644 index 00000000000..beebeed97f9 --- /dev/null +++ b/src/contrib/testkits/Akka.TestKit.Xunit2/Internals/AkkaAssertEqualityComparerAdapter.cs @@ -0,0 +1,47 @@ +//----------------------------------------------------------------------- +// +// Copyright (C) 2009-2015 Typesafe Inc. +// Copyright (C) 2013-2015 Akka.NET project +// +//----------------------------------------------------------------------- + +using System; +using System.Collections; +using System.Collections.Generic; + +namespace Akka.TestKit.Xunit2.Internals +{ + /// + /// A class that wraps to create . + /// Copy of xUnits class: + /// https://github.com/xunit/xunit/blob/3e6ab94ca231a6d8c86e90d6e724631a0faa33b7/src/xunit.assert/Asserts/Sdk/AssertEqualityComparerAdapter.cs + /// Note! Part of internal API. Breaking changes may occur without notice. Use at own risk. + /// + /// The type that is being compared. + internal class AkkaAssertEqualityComparerAdapter : IEqualityComparer + { + readonly IEqualityComparer innerComparer; + + /// + /// Initializes a new instance of the class. + /// + /// The comparer that is being adapted. + public AkkaAssertEqualityComparerAdapter(IEqualityComparer innerComparer) + { + this.innerComparer = innerComparer; + } + + /// + public new bool Equals(object x, object y) + { + return innerComparer.Equals((T)x, (T)y); + } + + /// + public int GetHashCode(object obj) + { + throw new NotImplementedException(); + } + } +} + diff --git a/src/contrib/testkits/Akka.TestKit.Xunit2/Internals/AkkaEqualException.cs b/src/contrib/testkits/Akka.TestKit.Xunit2/Internals/AkkaEqualException.cs new file mode 100644 index 00000000000..e8055edc668 --- /dev/null +++ b/src/contrib/testkits/Akka.TestKit.Xunit2/Internals/AkkaEqualException.cs @@ -0,0 +1,51 @@ +//----------------------------------------------------------------------- +// +// Copyright (C) 2009-2015 Typesafe Inc. +// Copyright (C) 2013-2015 Akka.NET project +// +//----------------------------------------------------------------------- + +using System; +using System.Runtime.Serialization; +using Xunit.Sdk; + +namespace Akka.TestKit.Xunit2.Internals +{ + public class AkkaEqualException : EqualException + { + private readonly string _format; + private readonly object[] _args; + + public AkkaEqualException(object expected, object actual, string format = "", params object[] args) + : base(expected, actual) + { + _format = format; + _args = args; + } + + protected AkkaEqualException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + + public override string Message + { + get + { + if(string.IsNullOrEmpty(_format)) + return base.Message; + string message; + try + { + message = string.Format(_format, _args); + } + catch(Exception) + { + message = "[Could not string.Format(\"" + _format + "\", " + string.Join(", ", _args) + ")]"; + } + return base.Message + " " + message; + } + } + } +} + diff --git a/src/contrib/testkits/Akka.TestKit.Xunit2/Properties/AssemblyInfo.cs b/src/contrib/testkits/Akka.TestKit.Xunit2/Properties/AssemblyInfo.cs new file mode 100644 index 00000000000..9a3dba8dd8b --- /dev/null +++ b/src/contrib/testkits/Akka.TestKit.Xunit2/Properties/AssemblyInfo.cs @@ -0,0 +1,20 @@ +using System.Reflection; +using System.Runtime.InteropServices; + +// General Information about an assembly is controlled through the following +// set of attributes. Change these attribute values to modify the information +// associated with an assembly. +[assembly: AssemblyTitle("Akka.TestKit.Xunit2")] +[assembly: AssemblyDescription("")] +[assembly: AssemblyConfiguration("")] +[assembly: AssemblyProduct("Akka.TestKit.Xunit2")] +[assembly: AssemblyCulture("")] + +// Setting ComVisible to false makes the types in this assembly not visible +// to COM components. If you need to access a type in this assembly from +// COM, set the ComVisible attribute to true on that type. +[assembly: ComVisible(false)] + +// The following GUID is for the ID of the typelib if this project is exposed to COM +[assembly: Guid("a48eb1da-be56-4078-a5f7-29d8e2bcd590")] + diff --git a/src/contrib/testkits/Akka.TestKit.Xunit2/TestKit.cs b/src/contrib/testkits/Akka.TestKit.Xunit2/TestKit.cs new file mode 100644 index 00000000000..63b5e604ceb --- /dev/null +++ b/src/contrib/testkits/Akka.TestKit.Xunit2/TestKit.cs @@ -0,0 +1,125 @@ +//----------------------------------------------------------------------- +// +// Copyright (C) 2009-2015 Typesafe Inc. +// Copyright (C) 2013-2015 Akka.NET project +// +//----------------------------------------------------------------------- + +using System; +using Akka.Actor; +using Akka.Configuration; + +namespace Akka.TestKit.Xunit2 +{ + /// + /// TestKit for xUnit. + /// + public class TestKit : TestKitBase , IDisposable + { + private static readonly XunitAssertions _assertions=new XunitAssertions(); + private bool _isDisposed; //Automatically initialized to false; + + /// + /// Create a new instance of the for xUnit class. + /// If no is passed in, a new system + /// with will be created. + /// + /// Optional: The actor system. + public TestKit(ActorSystem system = null) + : base(_assertions, system) + { + //Intentionally left blank + } + + /// + /// Create a new instance of the for xUnit class. + /// A new system with the specified configuration will be created. + /// + /// The configuration to use for the system. + /// Optional: the name of the system. Default: "test" + public TestKit(Config config, string actorSystemName=null) + : base(_assertions, config, actorSystemName) + { + //Intentionally left blank + } + + + /// + /// Create a new instance of the for xUnit class. + /// A new system with the specified configuration will be created. + /// + /// The configuration to use for the system. + public TestKit(string config): base(_assertions, ConfigurationFactory.ParseString(config)) + { + //Intentionally left blank + } + + public new static Config DefaultConfig { get { return TestKitBase.DefaultConfig; } } + public new static Config FullDebugConfig { get { return TestKitBase.FullDebugConfig; } } + + protected static XunitAssertions Assertions { get { return _assertions; } } + + + /// + /// This method is called when a test ends. + /// If you override this, make sure you either call + /// base.AfterTest() or TestKitBase.Shutdown to shut down + /// the system. Otherwise you'll leak memory. + /// + /// + protected virtual void AfterAll() + { + Shutdown(); + } + + + // Dispose ------------------------------------------------------------ + + //Destructor: + //~TestKit() + //{ + // // Finalizer calls Dispose(false) + // Dispose(false); + //} + + /// Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources. + public void Dispose() + { + Dispose(true); + //Take this object off the finalization queue and prevent finalization code for this object + //from executing a second time. + GC.SuppressFinalize(this); + } + + + /// Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources. + /// if set to true the method has been called directly or indirectly by a + /// user's code. Managed and unmanaged resources will be disposed.
+ /// if set to false the method has been called by the runtime from inside the finalizer and only + /// unmanaged resources can be disposed. + protected virtual void Dispose(bool disposing) + { + // If disposing equals false, the method has been called by the + // runtime from inside the finalizer and you should not reference + // other objects. Only unmanaged resources can be disposed. + + try + { + //Make sure Dispose does not get called more than once, by checking the disposed field + if(!_isDisposed) + { + if(disposing) + { + AfterAll(); + } + } + _isDisposed = true; + } + finally + { + // base.dispose(disposing); + } + } + } +} + diff --git a/src/contrib/testkits/Akka.TestKit.Xunit2/XunitAssertions.cs b/src/contrib/testkits/Akka.TestKit.Xunit2/XunitAssertions.cs new file mode 100644 index 00000000000..32068b88d43 --- /dev/null +++ b/src/contrib/testkits/Akka.TestKit.Xunit2/XunitAssertions.cs @@ -0,0 +1,50 @@ +//----------------------------------------------------------------------- +// +// Copyright (C) 2009-2015 Typesafe Inc. +// Copyright (C) 2013-2015 Akka.NET project +// +//----------------------------------------------------------------------- + +using System; +using Akka.TestKit.Xunit2.Internals; +using Xunit; + +namespace Akka.TestKit.Xunit2 +{ + /// + /// Assertions for xUnit + /// + public class XunitAssertions : ITestKitAssertions + { + public void Fail(string format = "", params object[] args) + { + Assert.True(false, string.Format(format, args)); + } + + public void AssertTrue(bool condition, string format = "", params object[] args) + { + Assert.True(condition, string.Format(format, args)); + } + + public void AssertFalse(bool condition, string format = "", params object[] args) + { + + Assert.False(condition, string.Format(format, args)); + } + + public void AssertEqual(T expected, T actual, string format = "", params object[] args) + { + var comparer = new AkkaAssertEqualityComparer(); + if(!comparer.Equals(expected, actual)) + throw new AkkaEqualException(expected, actual, format, args); + } + + public void AssertEqual(T expected, T actual, Func comparer, string format = "", params object[] args) + { + if(!comparer(expected, actual)) + throw new AkkaEqualException(expected, actual, format, args); + } + + } +} + diff --git a/src/contrib/testkits/Akka.TestKit.Xunit2/packages.config b/src/contrib/testkits/Akka.TestKit.Xunit2/packages.config new file mode 100644 index 00000000000..3109db107fb --- /dev/null +++ b/src/contrib/testkits/Akka.TestKit.Xunit2/packages.config @@ -0,0 +1,8 @@ + + + + + + + + \ No newline at end of file diff --git a/src/core/Akka.Cluster.Tests/Akka.Cluster.Tests.csproj b/src/core/Akka.Cluster.Tests/Akka.Cluster.Tests.csproj index d3dc6f40770..3caab4879d0 100644 --- a/src/core/Akka.Cluster.Tests/Akka.Cluster.Tests.csproj +++ b/src/core/Akka.Cluster.Tests/Akka.Cluster.Tests.csproj @@ -1,5 +1,6 @@  + Debug AnyCPU @@ -12,6 +13,7 @@ 512 ..\..\ true + d8ea7d64 true @@ -31,19 +33,33 @@ 4 + + ..\..\packages\FluentAssertions.3.3.0\lib\net45\FluentAssertions.dll + + + ..\..\packages\FluentAssertions.3.3.0\lib\net45\FluentAssertions.Core.dll + ..\..\packages\Google.ProtocolBuffers.2.4.1.521\lib\net40\Google.ProtocolBuffers.Serialization.dll - - ..\..\packages\xunit.1.9.2\lib\net20\xunit.dll - + + ..\..\packages\Google.ProtocolBuffers.2.4.1.521\lib\net40\Google.ProtocolBuffers.dll ..\..\packages\Microsoft.Bcl.Immutable.1.0.34\lib\portable-net45+win8+wp8+wpa81\System.Collections.Immutable.dll + + ..\..\packages\xunit.abstractions.2.0.0\lib\net35\xunit.abstractions.dll + + + ..\..\packages\xunit.assert.2.0.0\lib\portable-net45+win+wpa81+wp80+monotouch+monoandroid+Xamarin.iOS\xunit.assert.dll + + + ..\..\packages\xunit.extensibility.core.2.0.0\lib\portable-net45+win+wpa81+wp80+monotouch+monoandroid+Xamarin.iOS\xunit.core.dll + @@ -63,24 +79,12 @@ - - - - - - - - - - - - @@ -91,9 +95,9 @@ - - {11F4D4B8-7E07-4457-ABF2-609B3E7B2649} - Akka.TestKit.Xunit + + {7dbd5c17-5e9d-40c4-9201-d092751532a7} + Akka.TestKit.Xunit2 {6AB00F61-269A-4501-B06A-026707F000A7} @@ -126,6 +130,7 @@ + @@ -152,6 +157,7 @@ This project references NuGet package(s) that are missing on this computer. Enable NuGet Package Restore to download them. For more information, see http://go.microsoft.com/fwlink/?LinkID=322105. The missing file is {0}. + + \ No newline at end of file diff --git a/src/core/Akka.Cluster.Tests/MultiNode/ClusterDeathWatchSpec.cs b/src/core/Akka.MultiNodeTests/ClusterDeathWatchSpec.cs similarity index 99% rename from src/core/Akka.Cluster.Tests/MultiNode/ClusterDeathWatchSpec.cs rename to src/core/Akka.MultiNodeTests/ClusterDeathWatchSpec.cs index c2532ed09a5..a34aec837a0 100644 --- a/src/core/Akka.Cluster.Tests/MultiNode/ClusterDeathWatchSpec.cs +++ b/src/core/Akka.MultiNodeTests/ClusterDeathWatchSpec.cs @@ -15,7 +15,7 @@ using Akka.TestKit.TestActors; using Xunit; -namespace Akka.Cluster.Tests.MultiNode +namespace Akka.MultiNodeTests { public class ClusterDeathWatchSpecConfig : MultiNodeConfig { diff --git a/src/core/Akka.Cluster.Tests/MultiNode/ConvergenceSpec.cs b/src/core/Akka.MultiNodeTests/ConvergenceSpec.cs similarity index 98% rename from src/core/Akka.Cluster.Tests/MultiNode/ConvergenceSpec.cs rename to src/core/Akka.MultiNodeTests/ConvergenceSpec.cs index d48a5eb4e6e..d2818efba11 100644 --- a/src/core/Akka.Cluster.Tests/MultiNode/ConvergenceSpec.cs +++ b/src/core/Akka.MultiNodeTests/ConvergenceSpec.cs @@ -8,13 +8,14 @@ using System; using System.Linq; using System.Threading; +using Akka.Actor; +using Akka.Cluster; using Akka.Configuration; using Akka.Remote.TestKit; using Akka.TestKit; using Xunit; -using Address = Akka.Actor.Address; -namespace Akka.Cluster.Tests.MultiNode +namespace Akka.MultiNodeTests { public class ConvergenceSpecConfig : MultiNodeConfig { diff --git a/src/core/Akka.Cluster.Tests/FailureDetectorPuppet.cs b/src/core/Akka.MultiNodeTests/FailureDetectorPuppet.cs similarity index 98% rename from src/core/Akka.Cluster.Tests/FailureDetectorPuppet.cs rename to src/core/Akka.MultiNodeTests/FailureDetectorPuppet.cs index 76a4c191528..4af8d961398 100644 --- a/src/core/Akka.Cluster.Tests/FailureDetectorPuppet.cs +++ b/src/core/Akka.MultiNodeTests/FailureDetectorPuppet.cs @@ -10,7 +10,7 @@ using Akka.Remote; using Akka.Util; -namespace Akka.Cluster.Tests +namespace Akka.MultiNodeTests { /// /// User controllable "puppet" failure detector. diff --git a/src/core/Akka.Cluster.Tests/MultiNode/InitialHeartbeatSpec.cs b/src/core/Akka.MultiNodeTests/InitialHeartbeatSpec.cs similarity index 99% rename from src/core/Akka.Cluster.Tests/MultiNode/InitialHeartbeatSpec.cs rename to src/core/Akka.MultiNodeTests/InitialHeartbeatSpec.cs index 855f01b7b3e..7f8a4d4a0fe 100644 --- a/src/core/Akka.Cluster.Tests/MultiNode/InitialHeartbeatSpec.cs +++ b/src/core/Akka.MultiNodeTests/InitialHeartbeatSpec.cs @@ -7,12 +7,13 @@ using System; using System.Linq; +using Akka.Cluster; using Akka.Configuration; using Akka.Remote.TestKit; using Akka.Remote.Transport; using Xunit; -namespace Akka.Cluster.Tests.MultiNode +namespace Akka.MultiNodeTests { public class InitialHeartbeatMultiNodeConfig : MultiNodeConfig { diff --git a/src/core/Akka.Cluster.Tests/MultiNode/JoinInProgressSpec.cs b/src/core/Akka.MultiNodeTests/JoinInProgressSpec.cs similarity index 98% rename from src/core/Akka.Cluster.Tests/MultiNode/JoinInProgressSpec.cs rename to src/core/Akka.MultiNodeTests/JoinInProgressSpec.cs index 189d249e5f0..6b2e81d3f38 100644 --- a/src/core/Akka.Cluster.Tests/MultiNode/JoinInProgressSpec.cs +++ b/src/core/Akka.MultiNodeTests/JoinInProgressSpec.cs @@ -12,7 +12,7 @@ using Akka.Remote.TestKit; using Xunit; -namespace Akka.Cluster.Tests.MultiNode +namespace Akka.MultiNodeTests { public class JoinInProgressMultiNodeConfig : MultiNodeConfig { diff --git a/src/core/Akka.Cluster.Tests/MultiNode/JoinSeedNodeSpec.cs b/src/core/Akka.MultiNodeTests/JoinSeedNodeSpec.cs similarity index 98% rename from src/core/Akka.Cluster.Tests/MultiNode/JoinSeedNodeSpec.cs rename to src/core/Akka.MultiNodeTests/JoinSeedNodeSpec.cs index 88e95a71f8f..51b9eb33cb3 100644 --- a/src/core/Akka.Cluster.Tests/MultiNode/JoinSeedNodeSpec.cs +++ b/src/core/Akka.MultiNodeTests/JoinSeedNodeSpec.cs @@ -11,7 +11,7 @@ using Akka.Configuration; using Akka.Remote.TestKit; -namespace Akka.Cluster.Tests.MultiNode +namespace Akka.MultiNodeTests { public class JoinSeedNodeConfig : MultiNodeConfig { diff --git a/src/core/Akka.Cluster.Tests/MultiNode/LeaderLeavingSpec.cs b/src/core/Akka.MultiNodeTests/LeaderLeavingSpec.cs similarity index 99% rename from src/core/Akka.Cluster.Tests/MultiNode/LeaderLeavingSpec.cs rename to src/core/Akka.MultiNodeTests/LeaderLeavingSpec.cs index 5d26c160341..4b29c58ac02 100644 --- a/src/core/Akka.Cluster.Tests/MultiNode/LeaderLeavingSpec.cs +++ b/src/core/Akka.MultiNodeTests/LeaderLeavingSpec.cs @@ -8,10 +8,11 @@ using System; using System.Linq; using Akka.Actor; +using Akka.Cluster; using Akka.Remote.TestKit; using Akka.TestKit; -namespace Akka.Cluster.Tests.MultiNode +namespace Akka.MultiNodeTests { public class LeaderLeavingSpecConfig : MultiNodeConfig { diff --git a/src/core/Akka.Cluster.Tests/MultiNode/MultiNodeClusterSpec.cs b/src/core/Akka.MultiNodeTests/MultiNodeClusterSpec.cs similarity index 98% rename from src/core/Akka.Cluster.Tests/MultiNode/MultiNodeClusterSpec.cs rename to src/core/Akka.MultiNodeTests/MultiNodeClusterSpec.cs index d2d42c303e2..a5af205496c 100644 --- a/src/core/Akka.Cluster.Tests/MultiNode/MultiNodeClusterSpec.cs +++ b/src/core/Akka.MultiNodeTests/MultiNodeClusterSpec.cs @@ -12,15 +12,16 @@ using System.Linq; using System.Text.RegularExpressions; using Akka.Actor; +using Akka.Cluster; using Akka.Configuration; using Akka.Dispatch.SysMsg; using Akka.Remote.TestKit; using Akka.Remote.Transport; using Akka.TestKit; -using Akka.TestKit.Xunit; +using Akka.TestKit.Xunit2; using Xunit; -namespace Akka.Cluster.Tests.MultiNode +namespace Akka.MultiNodeTests { //TODO: WatchedByCoroner? //@Aaronontheweb: Coroner is a JVM-specific instrument used to report deadlocks and other fun stuff. @@ -30,7 +31,7 @@ public abstract class MultiNodeClusterSpec : MultiNodeSpec public static Config ClusterConfigWithFailureDetectorPuppet() { return ConfigurationFactory.ParseString( - @"akka.cluster.failure-detector.implementation-class = ""Akka.Cluster.Tests.FailureDetectorPuppet, Akka.Cluster.Tests""") + @"akka.cluster.failure-detector.implementation-class = ""Akka.MultiNodeTests.FailureDetectorPuppet, Akka.MultiNodeTests""") .WithFallback(ClusterConfig()); } @@ -229,7 +230,7 @@ public Address GetAddress(RoleName role) /// /// Get the cluster node to use. /// - public Cluster Cluster { get { return Cluster.Get(Sys); } } + public Cluster.Cluster Cluster { get { return Akka.Cluster.Cluster.Get(Sys); } } /// /// Use this method for the initial startup of the cluster node diff --git a/src/core/Akka.Cluster.Tests/MultiNode/MultiNodeFact.cs b/src/core/Akka.MultiNodeTests/MultiNodeFact.cs similarity index 96% rename from src/core/Akka.Cluster.Tests/MultiNode/MultiNodeFact.cs rename to src/core/Akka.MultiNodeTests/MultiNodeFact.cs index 5ad137ad853..d8a0d8cbb82 100644 --- a/src/core/Akka.Cluster.Tests/MultiNode/MultiNodeFact.cs +++ b/src/core/Akka.MultiNodeTests/MultiNodeFact.cs @@ -8,7 +8,7 @@ using System; using Xunit; -namespace Akka.Cluster.Tests.MultiNode +namespace Akka.MultiNodeTests { public class MultiNodeFactAttribute : FactAttribute { diff --git a/src/core/Akka.Cluster.Tests/MultiNode/MultiNodeLoggingConfig.cs b/src/core/Akka.MultiNodeTests/MultiNodeLoggingConfig.cs similarity index 96% rename from src/core/Akka.Cluster.Tests/MultiNode/MultiNodeLoggingConfig.cs rename to src/core/Akka.MultiNodeTests/MultiNodeLoggingConfig.cs index 7e63e548c78..f25adab5e8e 100644 --- a/src/core/Akka.Cluster.Tests/MultiNode/MultiNodeLoggingConfig.cs +++ b/src/core/Akka.MultiNodeTests/MultiNodeLoggingConfig.cs @@ -8,7 +8,7 @@ using Akka.Configuration; using Akka.Remote.TestKit; -namespace Akka.Cluster.Tests.MultiNode +namespace Akka.MultiNodeTests { /// /// Static provider that allows toggleable logging diff --git a/src/core/Akka.MultiNodeTests/Properties/AssemblyInfo.cs b/src/core/Akka.MultiNodeTests/Properties/AssemblyInfo.cs new file mode 100644 index 00000000000..a064f2d3bfb --- /dev/null +++ b/src/core/Akka.MultiNodeTests/Properties/AssemblyInfo.cs @@ -0,0 +1,36 @@ +using System.Reflection; +using System.Runtime.CompilerServices; +using System.Runtime.InteropServices; + +// General Information about an assembly is controlled through the following +// set of attributes. Change these attribute values to modify the information +// associated with an assembly. +[assembly: AssemblyTitle("Akka.MultiNodeTests")] +[assembly: AssemblyDescription("")] +[assembly: AssemblyConfiguration("")] +[assembly: AssemblyCompany("")] +[assembly: AssemblyProduct("Akka.MultiNodeTests")] +[assembly: AssemblyCopyright("Copyright © 2015")] +[assembly: AssemblyTrademark("")] +[assembly: AssemblyCulture("")] + +// Setting ComVisible to false makes the types in this assembly not visible +// to COM components. If you need to access a type in this assembly from +// COM, set the ComVisible attribute to true on that type. +[assembly: ComVisible(false)] + +// The following GUID is for the ID of the typelib if this project is exposed to COM +[assembly: Guid("f44c16ff-d622-4c69-bb17-8da38e8aa2f4")] + +// Version information for an assembly consists of the following four values: +// +// Major Version +// Minor Version +// Build Number +// Revision +// +// You can specify all the values or you can default the Build and Revision Numbers +// by using the '*' as shown below: +// [assembly: AssemblyVersion("1.0.*")] +[assembly: AssemblyVersion("1.0.0.0")] +[assembly: AssemblyFileVersion("1.0.0.0")] diff --git a/src/core/Akka.Cluster.Tests/MultiNode/Routing/ClusterConsistentHashingGroupSpec.cs b/src/core/Akka.MultiNodeTests/Routing/ClusterConsistentHashingGroupSpec.cs similarity index 99% rename from src/core/Akka.Cluster.Tests/MultiNode/Routing/ClusterConsistentHashingGroupSpec.cs rename to src/core/Akka.MultiNodeTests/Routing/ClusterConsistentHashingGroupSpec.cs index 83399350164..be5e95ac8bb 100644 --- a/src/core/Akka.Cluster.Tests/MultiNode/Routing/ClusterConsistentHashingGroupSpec.cs +++ b/src/core/Akka.MultiNodeTests/Routing/ClusterConsistentHashingGroupSpec.cs @@ -15,7 +15,7 @@ using Akka.Routing; using Akka.TestKit; -namespace Akka.Cluster.Tests.MultiNode.Routing +namespace Akka.MultiNodeTests.Routing { public class ClusterConsistentHashingGroupSpecConfig : MultiNodeConfig { diff --git a/src/core/Akka.Cluster.Tests/MultiNode/Routing/ClusterConsistentHashingRouterSpec.cs b/src/core/Akka.MultiNodeTests/Routing/ClusterConsistentHashingRouterSpec.cs similarity index 99% rename from src/core/Akka.Cluster.Tests/MultiNode/Routing/ClusterConsistentHashingRouterSpec.cs rename to src/core/Akka.MultiNodeTests/Routing/ClusterConsistentHashingRouterSpec.cs index 81f8cae0ae7..176348ec7d5 100644 --- a/src/core/Akka.Cluster.Tests/MultiNode/Routing/ClusterConsistentHashingRouterSpec.cs +++ b/src/core/Akka.MultiNodeTests/Routing/ClusterConsistentHashingRouterSpec.cs @@ -15,7 +15,7 @@ using Akka.TestKit; using Xunit; -namespace Akka.Cluster.Tests.MultiNode.Routing +namespace Akka.MultiNodeTests.Routing { public class ConsistentHashingRouterMultiNodeConfig : MultiNodeConfig { diff --git a/src/core/Akka.MultiNodeTests/packages.config b/src/core/Akka.MultiNodeTests/packages.config new file mode 100644 index 00000000000..597b89ce463 --- /dev/null +++ b/src/core/Akka.MultiNodeTests/packages.config @@ -0,0 +1,9 @@ + + + + + + + + + \ No newline at end of file diff --git a/src/core/Akka.NodeTestRunner/Akka.NodeTestRunner.csproj b/src/core/Akka.NodeTestRunner/Akka.NodeTestRunner.csproj index 199352f22ce..9c4b778ca46 100644 --- a/src/core/Akka.NodeTestRunner/Akka.NodeTestRunner.csproj +++ b/src/core/Akka.NodeTestRunner/Akka.NodeTestRunner.csproj @@ -51,14 +51,17 @@ - - ..\..\packages\xunit.abstractions.2.0.0-beta4-build2738\lib\net35\xunit.abstractions.dll + + False + ..\..\packages\xunit.abstractions.2.0.0\lib\net35\xunit.abstractions.dll - - ..\..\packages\xunit.runner.utility.2.0.0-beta4-build2738\lib\net35\xunit.runner.utility.dll + + False + ..\..\packages\xunit.runner.utility.2.1.0-beta2-build2981\lib\net35\xunit.runner.utility.desktop.dll + @@ -72,9 +75,9 @@ {A8AA2D7E-3D35-44DF-AF92-80A2C39C1F4D} Akka.Logger.NLog - - {C8D6A95C-50BF-4416-A212-86B18B87220D} - Akka.Cluster.Tests + + {f0781bea-5ba0-4af0-bb15-e3f209b681f5} + Akka.MultiNodeTests {E5957C3E-2B1E-469F-A680-7953B4DEA31B} diff --git a/src/core/Akka.NodeTestRunner/Discovery.cs b/src/core/Akka.NodeTestRunner/Discovery.cs new file mode 100644 index 00000000000..d9eb1ec9fa7 --- /dev/null +++ b/src/core/Akka.NodeTestRunner/Discovery.cs @@ -0,0 +1,37 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using Xunit; +using Xunit.Abstractions; + +namespace Akka.NodeTestRunner +{ + [Serializable] + public class Discovery : TestMessageVisitor + { + private readonly string _assemblyName; + private readonly string _className; + public List TestCases { get; private set; } + + public Discovery(string assemblyName, string className) + { + _assemblyName = assemblyName; + _className = className; + TestCases = new List(); + } + + protected override bool Visit(ITestCaseDiscoveryMessage discovery) + { + var name = discovery.TestAssembly.Assembly.AssemblyPath.Split('\\').Last(); + if (!name.Equals(_assemblyName, StringComparison.OrdinalIgnoreCase)) + return true; + + var testName = discovery.TestClass.Class.Name; + if (testName.Equals(_className, StringComparison.OrdinalIgnoreCase)) + { + TestCases.Add(discovery.TestCase); + } + return true; + } + } +} \ No newline at end of file diff --git a/src/core/Akka.NodeTestRunner/Program.cs b/src/core/Akka.NodeTestRunner/Program.cs index 241c1c16665..f7fb2967899 100644 --- a/src/core/Akka.NodeTestRunner/Program.cs +++ b/src/core/Akka.NodeTestRunner/Program.cs @@ -26,44 +26,44 @@ static void Main(string[] args) using (var controller = new XunitFrontController(assemblyName)) { - using (var sink = new Sink(nodeIndex)) + using (var discovery = new Discovery(assemblyName, typeName)) { - Thread.Sleep(10000); - try + using (var sink = new Sink(nodeIndex)) { - controller.RunTests( - new[] + Thread.Sleep(10000); + try + { + controller.Find(true, discovery, TestFrameworkOptions.ForDiscovery()); + discovery.Finished.WaitOne(); + controller.RunTests(discovery.TestCases, sink, TestFrameworkOptions.ForExecution()); + } + catch (AggregateException ex) + { + var specFail = new SpecFail(nodeIndex, displayName); + specFail.FailureExceptionTypes.Add(ex.GetType().ToString()); + specFail.FailureMessages.Add(ex.Message); + specFail.FailureStackTraces.Add(ex.StackTrace); + foreach (var innerEx in ex.Flatten().InnerExceptions) { - new Xunit1TestCase(assemblyName, null, typeName, testName, displayName, null, - "MultiNodeTest") - }, sink, new TestFrameworkOptions()); - } - catch (AggregateException ex) - { - var specFail = new SpecFail(nodeIndex, displayName); - specFail.FailureExceptionTypes.Add(ex.GetType().ToString()); - specFail.FailureMessages.Add(ex.Message); - specFail.FailureStackTraces.Add(ex.StackTrace); - foreach (var innerEx in ex.Flatten().InnerExceptions) + specFail.FailureExceptionTypes.Add(innerEx.GetType().ToString()); + specFail.FailureMessages.Add(innerEx.Message); + specFail.FailureStackTraces.Add(innerEx.StackTrace); + } + Console.WriteLine(specFail); + Environment.Exit(1); //signal failure + } + catch (Exception ex) { - specFail.FailureExceptionTypes.Add(innerEx.GetType().ToString()); - specFail.FailureMessages.Add(innerEx.Message); - specFail.FailureStackTraces.Add(innerEx.StackTrace); + var specFail = new SpecFail(nodeIndex, displayName); + specFail.FailureExceptionTypes.Add(ex.GetType().ToString()); + specFail.FailureMessages.Add(ex.Message); + specFail.FailureStackTraces.Add(ex.StackTrace); + Console.WriteLine(specFail); + Environment.Exit(1); //signal failure } - Console.WriteLine(specFail); - Environment.Exit(1); //signal failure - } - catch (Exception ex) - { - var specFail = new SpecFail(nodeIndex, displayName); - specFail.FailureExceptionTypes.Add(ex.GetType().ToString()); - specFail.FailureMessages.Add(ex.Message); - specFail.FailureStackTraces.Add(ex.StackTrace); - Console.WriteLine(specFail); - Environment.Exit(1); //signal failure + sink.Finished.WaitOne(); + Environment.Exit(sink.Passed ? 0 : 1); } - sink.Finished.WaitOne(); - Environment.Exit(sink.Passed ? 0 : 1); } } } diff --git a/src/core/Akka.NodeTestRunner/Sink.cs b/src/core/Akka.NodeTestRunner/Sink.cs index 697152328ca..e85c6095ebd 100644 --- a/src/core/Akka.NodeTestRunner/Sink.cs +++ b/src/core/Akka.NodeTestRunner/Sink.cs @@ -14,7 +14,7 @@ namespace Akka.NodeTestRunner { - class Sink : IMessageSink, IDisposable + class Sink : MarshalByRefObject, IMessageSink, IDisposable { public bool Passed { get; private set; } public ManualResetEvent Finished { get; private set; } diff --git a/src/core/Akka.NodeTestRunner/packages.config b/src/core/Akka.NodeTestRunner/packages.config index ddcee7391e6..b4605234264 100644 --- a/src/core/Akka.NodeTestRunner/packages.config +++ b/src/core/Akka.NodeTestRunner/packages.config @@ -1,5 +1,5 @@  - - + + \ No newline at end of file diff --git a/src/core/Akka.Persistence.TestKit.Tests/Akka.Persistence.TestKit.Tests.csproj b/src/core/Akka.Persistence.TestKit.Tests/Akka.Persistence.TestKit.Tests.csproj index bf1e17fe6c8..0935f5ae3a2 100644 --- a/src/core/Akka.Persistence.TestKit.Tests/Akka.Persistence.TestKit.Tests.csproj +++ b/src/core/Akka.Persistence.TestKit.Tests/Akka.Persistence.TestKit.Tests.csproj @@ -1,5 +1,6 @@  + Debug @@ -13,6 +14,7 @@ 512 ..\..\ true + b3f6fdc7 true @@ -37,8 +39,15 @@ - - ..\..\packages\xunit.1.9.2\lib\net20\xunit.dll + + + ..\..\packages\xunit.abstractions.2.0.0\lib\net35\xunit.abstractions.dll + + + ..\..\packages\xunit.assert.2.0.0\lib\portable-net45+win+wpa81+wp80+monotouch+monoandroid+Xamarin.iOS\xunit.assert.dll + + + ..\..\packages\xunit.extensibility.core.2.0.0\lib\portable-net45+win+wpa81+wp80+monotouch+monoandroid+Xamarin.iOS\xunit.core.dll @@ -75,6 +84,7 @@ This project references NuGet package(s) that are missing on this computer. Enable NuGet Package Restore to download them. For more information, see http://go.microsoft.com/fwlink/?LinkID=322105. The missing file is {0}. + + \ No newline at end of file diff --git a/src/contrib/persistence/Akka.Persistence.Sql.Common/InternalExtensions.cs b/src/contrib/persistence/Akka.Persistence.Sql.Common/InternalExtensions.cs new file mode 100644 index 00000000000..b4608d841b3 --- /dev/null +++ b/src/contrib/persistence/Akka.Persistence.Sql.Common/InternalExtensions.cs @@ -0,0 +1,12 @@ +using System; + +namespace Akka.Persistence.Sql.Common +{ + internal static class InternalExtensions + { + public static string QualifiedTypeName(this Type type) + { + return type.FullName + ", " + type.Assembly.GetName().Name; + } + } +} \ No newline at end of file diff --git a/src/contrib/persistence/Akka.Persistence.Sql.Common/Journal/JournalDbEngine.cs b/src/contrib/persistence/Akka.Persistence.Sql.Common/Journal/JournalDbEngine.cs new file mode 100644 index 00000000000..bba3c9dc620 --- /dev/null +++ b/src/contrib/persistence/Akka.Persistence.Sql.Common/Journal/JournalDbEngine.cs @@ -0,0 +1,335 @@ +using System; +using System.Collections.Generic; +using System.Data; +using System.Data.Common; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using Akka.Actor; + +namespace Akka.Persistence.Sql.Common.Journal +{ + /// + /// Class used for storing intermediate result of the + /// in form which is ready to be stored directly in the SQL table. + /// + public class JournalEntry + { + public readonly string PersistenceId; + public readonly long SequenceNr; + public readonly bool IsDeleted; + public readonly string PayloadType; + public readonly byte[] Payload; + + public JournalEntry(string persistenceId, long sequenceNr, bool isDeleted, string payloadType, byte[] payload) + { + PersistenceId = persistenceId; + SequenceNr = sequenceNr; + IsDeleted = isDeleted; + PayloadType = payloadType; + Payload = payload; + } + } + + /// + /// Class used to abstract SQL persistence capabilities for concrete implementation of actor journal. + /// + public abstract class JournalDbEngine : IDisposable + { + /// + /// Settings applied to journal mapped from HOCON config file. + /// + public readonly JournalSettings Settings; + + /// + /// List of cancellation tokens for each of the currently pending database operations. + /// + protected readonly LinkedList PendingOperations; + + private readonly Akka.Serialization.Serialization _serialization; + private DbConnection _dbConnection; + + protected JournalDbEngine(JournalSettings settings, Akka.Serialization.Serialization serialization) + { + Settings = settings; + _serialization = serialization; + + QueryMapper = new DefaultJournalQueryMapper(serialization); + + PendingOperations = new LinkedList(); + } + + /// + /// Initializes a database connection. + /// + protected abstract DbConnection CreateDbConnection(); + + /// + /// Copies values from entities to database command. + /// + /// + /// + protected abstract void CopyParamsToCommand(DbCommand sqlCommand, JournalEntry entry); + + /// + /// Gets database connection. + /// + public IDbConnection DbConnection { get { return _dbConnection; } } + + /// + /// Used for generating SQL commands for journal-related database operations. + /// + public IJournalQueryBuilder QueryBuilder { get; protected set; } + + /// + /// Used for mapping results returned from database into objects. + /// + public IJournalQueryMapper QueryMapper { get; protected set; } + + /// + /// Initializes and opens a database connection. + /// + public void Open() + { + // close connection if it was open + Close(); + + _dbConnection = CreateDbConnection(); + _dbConnection.Open(); + } + + /// + /// Closes database connection if exists. + /// + public void Close() + { + if (_dbConnection != null) + { + StopPendingOperations(); + + _dbConnection.Dispose(); + _dbConnection = null; + } + } + + /// + /// Stops all currently executing database operations. + /// + protected void StopPendingOperations() + { + // stop all operations executed in the background + var node = PendingOperations.First; + while (node != null) + { + var curr = node; + node = node.Next; + + curr.Value.Cancel(); + PendingOperations.Remove(curr); + } + } + + void IDisposable.Dispose() + { + Close(); + } + + /// + /// Asynchronously replays all requested messages related to provided , + /// using provided sequence ranges (inclusive) with number of messages replayed + /// (counting from the beginning). Replay callback is invoked for each replayed message. + /// + /// Identifier of persistent messages stream to be replayed. + /// Lower inclusive sequence number bound. Unbound by default. + /// Upper inclusive sequence number bound. Unbound by default. + /// Maximum number of messages to be replayed. Unbound by default. + /// Action invoked for each replayed message. + public Task ReplayMessagesAsync(string persistenceId, long fromSequenceNr, long toSequenceNr, long max, IActorRef sender, Action replayCallback) + { + var sqlCommand = QueryBuilder.SelectMessages(persistenceId, fromSequenceNr, toSequenceNr, max); + CompleteCommand(sqlCommand); + + var tokenSource = GetCancellationTokenSource(); + + return sqlCommand + .ExecuteReaderAsync(tokenSource.Token) + .ContinueWith(task => + { + var reader = task.Result; + try + { + while (reader.Read()) + { + var persistent = QueryMapper.Map(reader, sender); + if (persistent != null) + { + replayCallback(persistent); + } + } + } + finally + { + PendingOperations.Remove(tokenSource); + reader.Close(); + } + }, TaskContinuationOptions.ExecuteSynchronously | TaskContinuationOptions.AttachedToParent); + } + + /// + /// Asynchronously reads a highest sequence number of the event stream related with provided . + /// + public Task ReadHighestSequenceNrAsync(string persistenceId, long fromSequenceNr) + { + var sqlCommand = QueryBuilder.SelectHighestSequenceNr(persistenceId); + CompleteCommand(sqlCommand); + + var tokenSource = GetCancellationTokenSource(); + + return sqlCommand + .ExecuteScalarAsync(tokenSource.Token) + .ContinueWith(task => + { + PendingOperations.Remove(tokenSource); + var result = task.Result; + return result is long ? Convert.ToInt64(task.Result) : 0L; + }, TaskContinuationOptions.ExecuteSynchronously | TaskContinuationOptions.AttachedToParent); + } + + /// + /// Synchronously writes all persistent inside SQL Server database. + /// + /// Specific table used for message persistence may be defined through configuration within + /// 'akka.persistence.journal.sql-server' scope with 'schema-name' and 'table-name' keys. + /// + public void WriteMessages(IEnumerable messages) + { + var persistentMessages = messages.ToArray(); + var sqlCommand = QueryBuilder.InsertBatchMessages(persistentMessages); + CompleteCommand(sqlCommand); + + var journalEntires = persistentMessages.Select(ToJournalEntry).ToList(); + + InsertInTransaction(sqlCommand, journalEntires); + } + + /// + /// Synchronously deletes all persisted messages identified by provided + /// up to provided message sequence number (inclusive). If flag is cleared, + /// messages will still reside inside database, but will be logically counted as deleted. + /// + public void DeleteMessagesTo(string persistenceId, long toSequenceNr, bool isPermanent) + { + var sqlCommand = QueryBuilder.DeleteBatchMessages(persistenceId, toSequenceNr, isPermanent); + CompleteCommand(sqlCommand); + + sqlCommand.ExecuteNonQuery(); + } + + /// + /// Asynchronously writes all persistent inside SQL Server database. + /// + /// Specific table used for message persistence may be defined through configuration within + /// 'akka.persistence.journal.sql-server' scope with 'schema-name' and 'table-name' keys. + /// + public async Task WriteMessagesAsync(IEnumerable messages) + { + var persistentMessages = messages.ToArray(); + var sqlCommand = QueryBuilder.InsertBatchMessages(persistentMessages); + CompleteCommand(sqlCommand); + + var journalEntires = persistentMessages.Select(ToJournalEntry).ToList(); + + await InsertInTransactionAsync(sqlCommand, journalEntires); + } + + /// + /// Asynchronously deletes all persisted messages identified by provided + /// up to provided message sequence number (inclusive). If flag is cleared, + /// messages will still reside inside database, but will be logically counted as deleted. + /// + public async Task DeleteMessagesToAsync(string persistenceId, long toSequenceNr, bool isPermanent) + { + var sqlCommand = QueryBuilder.DeleteBatchMessages(persistenceId, toSequenceNr, isPermanent); + CompleteCommand(sqlCommand); + + await sqlCommand.ExecuteNonQueryAsync(); + } + + private void CompleteCommand(DbCommand sqlCommand) + { + sqlCommand.Connection = _dbConnection; + sqlCommand.CommandTimeout = (int)Settings.ConnectionTimeout.TotalMilliseconds; + } + + private CancellationTokenSource GetCancellationTokenSource() + { + var source = new CancellationTokenSource(); + PendingOperations.AddLast(source); + return source; + } + + private JournalEntry ToJournalEntry(IPersistentRepresentation message) + { + var payloadType = message.Payload.GetType(); + var serializer = _serialization.FindSerializerForType(payloadType); + + return new JournalEntry(message.PersistenceId, message.SequenceNr, message.IsDeleted, + payloadType.QualifiedTypeName(), serializer.ToBinary(message.Payload)); + } + + private void InsertInTransaction(DbCommand sqlCommand, IEnumerable journalEntires) + { + using (var tx = _dbConnection.BeginTransaction()) + { + sqlCommand.Transaction = tx; + try + { + foreach (var entry in journalEntires) + { + CopyParamsToCommand(sqlCommand, entry); + + if (sqlCommand.ExecuteNonQuery() != 1) + { + //TODO: something went wrong, ExecuteNonQuery() should return 1 (number of rows added) + } + } + + tx.Commit(); + } + catch (Exception) + { + tx.Rollback(); + throw; + } + } + } + + private async Task InsertInTransactionAsync(DbCommand sqlCommand, IEnumerable journalEntires) + { + using (var tx = _dbConnection.BeginTransaction()) + { + sqlCommand.Transaction = tx; + try + { + foreach (var entry in journalEntires) + { + CopyParamsToCommand(sqlCommand, entry); + + var commandResult = await sqlCommand.ExecuteNonQueryAsync(); + if (commandResult != 1) + { + //TODO: something went wrong, ExecuteNonQuery() should return 1 (number of rows added) + } + } + + tx.Commit(); + } + catch (Exception) + { + tx.Rollback(); + throw; + } + } + } + } +} \ No newline at end of file diff --git a/src/contrib/persistence/Akka.Persistence.Sql.Common/Journal/QueryBuilder.cs b/src/contrib/persistence/Akka.Persistence.Sql.Common/Journal/QueryBuilder.cs new file mode 100644 index 00000000000..9c7f23d6e5e --- /dev/null +++ b/src/contrib/persistence/Akka.Persistence.Sql.Common/Journal/QueryBuilder.cs @@ -0,0 +1,31 @@ +using System.Data.Common; + +namespace Akka.Persistence.Sql.Common.Journal +{ + /// + /// SQL query builder used for generating queries required to perform journal's tasks. + /// + public interface IJournalQueryBuilder + { + /// + /// Returns query which should return a frame of messages filtered accordingly to provided parameters. + /// + DbCommand SelectMessages(string persistenceId, long fromSequenceNr, long toSequenceNr, long max); + + /// + /// Returns query returning single number considered as the highest sequence number in current journal. + /// + DbCommand SelectHighestSequenceNr(string persistenceId); + + /// + /// Returns a non-query command used to insert collection of in journal table. + /// + DbCommand InsertBatchMessages(IPersistentRepresentation[] messages); + + /// + /// Depending on flag this method may return either UPDATE or DELETE statement + /// used to alter IsDeleted field or delete rows permanently. + /// + DbCommand DeleteBatchMessages(string persistenceId, long toSequenceNr, bool permanent); + } +} \ No newline at end of file diff --git a/src/contrib/persistence/Akka.Persistence.SqlServer/Journal/QueryMapper.cs b/src/contrib/persistence/Akka.Persistence.Sql.Common/Journal/QueryMapper.cs similarity index 69% rename from src/contrib/persistence/Akka.Persistence.SqlServer/Journal/QueryMapper.cs rename to src/contrib/persistence/Akka.Persistence.Sql.Common/Journal/QueryMapper.cs index fa36665fb38..991258603b4 100644 --- a/src/contrib/persistence/Akka.Persistence.SqlServer/Journal/QueryMapper.cs +++ b/src/contrib/persistence/Akka.Persistence.Sql.Common/Journal/QueryMapper.cs @@ -1,7 +1,8 @@ using System; -using System.Data.SqlClient; +using System.Data.Common; +using Akka.Actor; -namespace Akka.Persistence.SqlServer.Journal +namespace Akka.Persistence.Sql.Common.Journal { /// /// Mapper used for generating persistent representations based on SQL query results. @@ -12,9 +13,13 @@ public interface IJournalQueryMapper /// Takes a current row from the SQL data reader and produces a persistent representation object in result. /// It's not supposed to move reader's cursor in any way. /// - IPersistentRepresentation Map(SqlDataReader reader); + IPersistentRepresentation Map(DbDataReader reader, IActorRef sender = null); } + /// + /// Default implementation of used for mapping data + /// returned from ADO.NET data readers back to messages. + /// internal class DefaultJournalQueryMapper : IJournalQueryMapper { private readonly Akka.Serialization.Serialization _serialization; @@ -24,17 +29,17 @@ public DefaultJournalQueryMapper(Akka.Serialization.Serialization serialization) _serialization = serialization; } - public IPersistentRepresentation Map(SqlDataReader reader) + public IPersistentRepresentation Map(DbDataReader reader, IActorRef sender = null) { var persistenceId = reader.GetString(0); var sequenceNr = reader.GetInt64(1); var isDeleted = reader.GetBoolean(2); var payload = GetPayload(reader); - return new Persistent(payload, sequenceNr, persistenceId, isDeleted); + return new Persistent(payload, sequenceNr, persistenceId, isDeleted, sender); } - private object GetPayload(SqlDataReader reader) + private object GetPayload(DbDataReader reader) { var payloadType = reader.GetString(3); var type = Type.GetType(payloadType, true); diff --git a/src/contrib/persistence/Akka.Persistence.Sql.Common/Properties/AssemblyInfo.cs b/src/contrib/persistence/Akka.Persistence.Sql.Common/Properties/AssemblyInfo.cs new file mode 100644 index 00000000000..e4be5070d3a --- /dev/null +++ b/src/contrib/persistence/Akka.Persistence.Sql.Common/Properties/AssemblyInfo.cs @@ -0,0 +1,36 @@ +using System.Reflection; +using System.Runtime.CompilerServices; +using System.Runtime.InteropServices; + +// General Information about an assembly is controlled through the following +// set of attributes. Change these attribute values to modify the information +// associated with an assembly. +[assembly: AssemblyTitle("Akka.Persistence.Sql.Common")] +[assembly: AssemblyDescription("")] +[assembly: AssemblyConfiguration("")] +[assembly: AssemblyCompany("")] +[assembly: AssemblyProduct("Akka.Persistence.Sql.Common")] +[assembly: AssemblyCopyright("Copyright © 2015")] +[assembly: AssemblyTrademark("")] +[assembly: AssemblyCulture("")] + +// Setting ComVisible to false makes the types in this assembly not visible +// to COM components. If you need to access a type in this assembly from +// COM, set the ComVisible attribute to true on that type. +[assembly: ComVisible(false)] + +// The following GUID is for the ID of the typelib if this project is exposed to COM +[assembly: Guid("e438d2c3-1075-4b01-bb84-e9efd3a36691")] + +// Version information for an assembly consists of the following four values: +// +// Major Version +// Minor Version +// Build Number +// Revision +// +// You can specify all the values or you can default the Build and Revision Numbers +// by using the '*' as shown below: +// [assembly: AssemblyVersion("1.0.*")] +[assembly: AssemblyVersion("1.0.0.0")] +[assembly: AssemblyFileVersion("1.0.0.0")] diff --git a/src/contrib/persistence/Akka.Persistence.Sql.Common/Settings.cs b/src/contrib/persistence/Akka.Persistence.Sql.Common/Settings.cs new file mode 100644 index 00000000000..6c4e20fec85 --- /dev/null +++ b/src/contrib/persistence/Akka.Persistence.Sql.Common/Settings.cs @@ -0,0 +1,77 @@ +using System; +using Akka.Configuration; + +namespace Akka.Persistence.Sql.Common +{ + /// + /// Configuration settings representation targeting Sql Server journal actor. + /// + public class JournalSettings + { + /// + /// Connection string used to access a persistent SQL Server instance. + /// + public string ConnectionString { get; private set; } + + /// + /// Connection timeout for SQL Server related operations. + /// + public TimeSpan ConnectionTimeout { get; private set; } + + /// + /// Schema name, where table corresponding to event journal is placed. + /// + public string SchemaName { get; private set; } + + /// + /// Name of the table corresponding to event journal. + /// + public string TableName { get; private set; } + + public JournalSettings(Config config) + { + if (config == null) throw new ArgumentNullException("config", "SqlServer journal settings cannot be initialized, because required HOCON section couldn't been found"); + + ConnectionString = config.GetString("connection-string"); + ConnectionTimeout = config.GetTimeSpan("connection-timeout"); + SchemaName = config.GetString("schema-name"); + TableName = config.GetString("table-name"); + } + } + + /// + /// Configuration settings representation targeting Sql Server snapshot store actor. + /// + public class SnapshotStoreSettings + { + /// + /// Connection string used to access a persistent SQL Server instance. + /// + public string ConnectionString { get; private set; } + + /// + /// Connection timeout for SQL Server related operations. + /// + public TimeSpan ConnectionTimeout { get; private set; } + + /// + /// Schema name, where table corresponding to snapshot store is placed. + /// + public string SchemaName { get; private set; } + + /// + /// Name of the table corresponding to snapshot store. + /// + public string TableName { get; private set; } + + public SnapshotStoreSettings(Config config) + { + if (config == null) throw new ArgumentNullException("config", "SqlServer snapshot store settings cannot be initialized, because required HOCON section couldn't been found"); + + ConnectionString = config.GetString("connection-string"); + ConnectionTimeout = config.GetTimeSpan("connection-timeout"); + SchemaName = config.GetString("schema-name"); + TableName = config.GetString("table-name"); + } + } +} \ No newline at end of file diff --git a/src/contrib/persistence/Akka.Persistence.Sql.Common/Snapshot/DbSnapshotStore.cs b/src/contrib/persistence/Akka.Persistence.Sql.Common/Snapshot/DbSnapshotStore.cs new file mode 100644 index 00000000000..f76389da503 --- /dev/null +++ b/src/contrib/persistence/Akka.Persistence.Sql.Common/Snapshot/DbSnapshotStore.cs @@ -0,0 +1,168 @@ +using System.Collections.Generic; +using System.Data.Common; +using System.Threading; +using System.Threading.Tasks; +using Akka.Persistence.Snapshot; + +namespace Akka.Persistence.Sql.Common.Snapshot +{ + /// + /// Abstract snapshot store implementation, customized to work with SQL-based persistence providers. + /// + public abstract class DbSnapshotStore : SnapshotStore + { + /// + /// List of cancellation tokens for all pending asynchronous database operations. + /// + protected readonly LinkedList PendingOperations; + + private DbConnection _connection; + + protected DbSnapshotStore() + { + QueryMapper = new DefaultSnapshotQueryMapper(Context.System.Serialization); + PendingOperations = new LinkedList(); + } + + /// + /// Returns a new instance of database connection. + /// + protected abstract DbConnection CreateDbConnection(); + + /// + /// Gets settings for the current snapshot store. + /// + protected abstract SnapshotStoreSettings Settings { get; } + + /// + /// Gets current database connection. + /// + public DbConnection DbConnection { get { return _connection; } } + + /// + /// Query builder used to convert snapshot store related operations into corresponding SQL queries. + /// + public ISnapshotQueryBuilder QueryBuilder { get; set; } + + /// + /// Query mapper used to map SQL query results into snapshots. + /// + public ISnapshotQueryMapper QueryMapper { get; set; } + + protected override void PreStart() + { + base.PreStart(); + + _connection = CreateDbConnection(); + _connection.Open(); + } + + protected override void PostStop() + { + base.PostStop(); + + // stop all operations executed in the background + var node = PendingOperations.First; + while (node != null) + { + var curr = node; + node = node.Next; + + curr.Value.Cancel(); + PendingOperations.Remove(curr); + } + + _connection.Close(); + } + + /// + /// Asynchronously loads snapshot with the highest sequence number for a persistent actor/view matching specified criteria. + /// + protected override Task LoadAsync(string persistenceId, SnapshotSelectionCriteria criteria) + { + var sqlCommand = QueryBuilder.SelectSnapshot(persistenceId, criteria.MaxSequenceNr, criteria.MaxTimeStamp); + CompleteCommand(sqlCommand); + + var tokenSource = GetCancellationTokenSource(); + return sqlCommand + .ExecuteReaderAsync(tokenSource.Token) + .ContinueWith(task => + { + var reader = task.Result; + try + { + return reader.Read() ? QueryMapper.Map(reader) : null; + } + finally + { + PendingOperations.Remove(tokenSource); + reader.Close(); + } + }, TaskContinuationOptions.ExecuteSynchronously | TaskContinuationOptions.AttachedToParent); + } + + /// + /// Asynchronously stores a snapshot with metadata as record in SQL table. + /// + protected override Task SaveAsync(SnapshotMetadata metadata, object snapshot) + { + var entry = ToSnapshotEntry(metadata, snapshot); + var sqlCommand = QueryBuilder.InsertSnapshot(entry); + CompleteCommand(sqlCommand); + + var tokenSource = GetCancellationTokenSource(); + + return sqlCommand.ExecuteNonQueryAsync(tokenSource.Token) + .ContinueWith(task => + { + PendingOperations.Remove(tokenSource); + }, TaskContinuationOptions.ExecuteSynchronously | TaskContinuationOptions.AttachedToParent); + } + + protected override void Saved(SnapshotMetadata metadata) { } + + protected override void Delete(SnapshotMetadata metadata) + { + var sqlCommand = QueryBuilder.DeleteOne(metadata.PersistenceId, metadata.SequenceNr, metadata.Timestamp); + CompleteCommand(sqlCommand); + + sqlCommand.ExecuteNonQuery(); + } + + protected override void Delete(string persistenceId, SnapshotSelectionCriteria criteria) + { + var sqlCommand = QueryBuilder.DeleteMany(persistenceId, criteria.MaxSequenceNr, criteria.MaxTimeStamp); + CompleteCommand(sqlCommand); + + sqlCommand.ExecuteNonQuery(); + } + + private void CompleteCommand(DbCommand command) + { + command.Connection = _connection; + command.CommandTimeout = (int)Settings.ConnectionTimeout.TotalMilliseconds; + } + + private CancellationTokenSource GetCancellationTokenSource() + { + var source = new CancellationTokenSource(); + PendingOperations.AddLast(source); + return source; + } + + private SnapshotEntry ToSnapshotEntry(SnapshotMetadata metadata, object snapshot) + { + var snapshotType = snapshot.GetType(); + var serializer = Context.System.Serialization.FindSerializerForType(snapshotType); + + var binary = serializer.ToBinary(snapshot); + + return new SnapshotEntry( + persistenceId: metadata.PersistenceId, + sequenceNr: metadata.SequenceNr, + timestamp: metadata.Timestamp, + snapshotType: snapshotType.QualifiedTypeName(), + snapshot: binary); + } + } +} \ No newline at end of file diff --git a/src/contrib/persistence/Akka.Persistence.Sql.Common/Snapshot/QueryBuilder.cs b/src/contrib/persistence/Akka.Persistence.Sql.Common/Snapshot/QueryBuilder.cs new file mode 100644 index 00000000000..9c2fc5d20ee --- /dev/null +++ b/src/contrib/persistence/Akka.Persistence.Sql.Common/Snapshot/QueryBuilder.cs @@ -0,0 +1,77 @@ +using System; +using System.Data.Common; + +namespace Akka.Persistence.Sql.Common.Snapshot +{ + /// + /// Flattened and serialized snapshot object used as intermediate representation + /// before saving snapshot with metadata inside SQL Server database. + /// + public class SnapshotEntry + { + /// + /// Persistence identifier of persistent actor, current snapshot relates to. + /// + public readonly string PersistenceId; + + /// + /// Sequence number used to identify snapshot in it's persistent actor scope. + /// + public readonly long SequenceNr; + + /// + /// Timestamp used to specify date, when the snapshot has been made. + /// + public readonly DateTime Timestamp; + + /// + /// Stringified fully qualified CLR type name of the serialized object. + /// + public readonly string SnapshotType; + + /// + /// Serialized object data. + /// + public readonly byte[] Snapshot; + + public SnapshotEntry(string persistenceId, long sequenceNr, DateTime timestamp, string snapshotType, byte[] snapshot) + { + PersistenceId = persistenceId; + SequenceNr = sequenceNr; + Timestamp = timestamp; + SnapshotType = snapshotType; + Snapshot = snapshot; + } + } + + /// + /// Query builder used for prepare SQL commands used for snapshot store persistence operations. + /// + public interface ISnapshotQueryBuilder + { + /// + /// Deletes a single snapshot identified by it's persistent actor's , + /// and . + /// + DbCommand DeleteOne(string persistenceId, long sequenceNr, DateTime timestamp); + + /// + /// Deletes all snapshot matching persistent actor's as well as + /// upper (inclusive) bounds of the both and . + /// + DbCommand DeleteMany(string persistenceId, long maxSequenceNr, DateTime maxTimestamp); + + /// + /// Inserts a single snapshot represented by provided instance. + /// + DbCommand InsertSnapshot(SnapshotEntry entry); + + /// + /// Selects a single snapshot identified by persistent actor's , + /// matching upper (inclusive) bounds of both and . + /// In case, when more than one snapshot matches specified criteria, one with the highest sequence number will be selected. + /// + DbCommand SelectSnapshot(string persistenceId, long maxSequenceNr, DateTime maxTimestamp); + } + +} \ No newline at end of file diff --git a/src/contrib/persistence/Akka.Persistence.SqlServer/Snapshot/QueryMapper.cs b/src/contrib/persistence/Akka.Persistence.Sql.Common/Snapshot/QueryMapper.cs similarity index 84% rename from src/contrib/persistence/Akka.Persistence.SqlServer/Snapshot/QueryMapper.cs rename to src/contrib/persistence/Akka.Persistence.Sql.Common/Snapshot/QueryMapper.cs index d2547c3208b..1bce34e01fc 100644 --- a/src/contrib/persistence/Akka.Persistence.SqlServer/Snapshot/QueryMapper.cs +++ b/src/contrib/persistence/Akka.Persistence.Sql.Common/Snapshot/QueryMapper.cs @@ -1,7 +1,7 @@ using System; -using System.Data.SqlClient; +using System.Data.Common; -namespace Akka.Persistence.SqlServer.Snapshot +namespace Akka.Persistence.Sql.Common.Snapshot { /// /// Mapper used to map results of snapshot SELECT queries into valid snapshot objects. @@ -11,7 +11,7 @@ public interface ISnapshotQueryMapper /// /// Map data found under current cursor pointed by SQL data reader into instance. /// - SelectedSnapshot Map(SqlDataReader reader); + SelectedSnapshot Map(DbDataReader reader); } internal class DefaultSnapshotQueryMapper : ISnapshotQueryMapper @@ -23,7 +23,7 @@ public DefaultSnapshotQueryMapper(Akka.Serialization.Serialization serialization _serialization = serialization; } - public SelectedSnapshot Map(SqlDataReader reader) + public SelectedSnapshot Map(DbDataReader reader) { var persistenceId = reader.GetString(0); var sequenceNr = reader.GetInt64(1); @@ -35,7 +35,7 @@ public SelectedSnapshot Map(SqlDataReader reader) return new SelectedSnapshot(metadata, snapshot); } - private object GetSnapshot(SqlDataReader reader) + private object GetSnapshot(DbDataReader reader) { var type = Type.GetType(reader.GetString(3), true); var serializer = _serialization.FindSerializerForType(type); diff --git a/src/contrib/persistence/Akka.Persistence.SqlServer.Tests/Akka.Persistence.SqlServer.Tests.csproj b/src/contrib/persistence/Akka.Persistence.SqlServer.Tests/Akka.Persistence.SqlServer.Tests.csproj index 5c19ca2eaaa..215b87e0526 100644 --- a/src/contrib/persistence/Akka.Persistence.SqlServer.Tests/Akka.Persistence.SqlServer.Tests.csproj +++ b/src/contrib/persistence/Akka.Persistence.SqlServer.Tests/Akka.Persistence.SqlServer.Tests.csproj @@ -85,6 +85,10 @@ {11f4d4b8-7e07-4457-abf2-609b3e7b2649} Akka.TestKit.Xunit + + {3b9e6211-9488-4db5-b714-24248693b38f} + Akka.Persistence.Sql.Common + {bac85686-afc4-413e-98dc-5ed8f468bc63} Akka.Persistence.SqlServer diff --git a/src/contrib/persistence/Akka.Persistence.SqlServer/Akka.Persistence.SqlServer.csproj b/src/contrib/persistence/Akka.Persistence.SqlServer/Akka.Persistence.SqlServer.csproj index 4847156456b..7a53e5b3a79 100644 --- a/src/contrib/persistence/Akka.Persistence.SqlServer/Akka.Persistence.SqlServer.csproj +++ b/src/contrib/persistence/Akka.Persistence.SqlServer/Akka.Persistence.SqlServer.csproj @@ -41,12 +41,10 @@ - - @@ -59,6 +57,10 @@ {5deddf90-37f0-48d3-a0b0-a5cbd8a7e377} Akka + + {3b9e6211-9488-4db5-b714-24248693b38f} + Akka.Persistence.Sql.Common + diff --git a/src/contrib/persistence/Akka.Persistence.SqlServer/Extension.cs b/src/contrib/persistence/Akka.Persistence.SqlServer/Extension.cs index df961f40972..92333106158 100644 --- a/src/contrib/persistence/Akka.Persistence.SqlServer/Extension.cs +++ b/src/contrib/persistence/Akka.Persistence.SqlServer/Extension.cs @@ -1,93 +1,37 @@ using System; using Akka.Actor; using Akka.Configuration; +using Akka.Persistence.Sql.Common; namespace Akka.Persistence.SqlServer { - /// - /// Configuration settings representation targeting Sql Server journal actor. - /// - public class JournalSettings + + public class SqlServerJournalSettings : JournalSettings { public const string ConfigPath = "akka.persistence.journal.sql-server"; - /// - /// Connection string used to access a persistent SQL Server instance. - /// - public string ConnectionString { get; private set; } - - /// - /// Connection timeout for SQL Server related operations. - /// - public TimeSpan ConnectionTimeout { get; private set; } - - /// - /// Schema name, where table corresponding to event journal is placed. - /// - public string SchemaName { get; private set; } - - /// - /// Name of the table corresponding to event journal. - /// - public string TableName { get; private set; } - /// /// Flag determining in in case of event journal table missing, it should be automatically initialized. /// public bool AutoInitialize { get; private set; } - public JournalSettings(Config config) + public SqlServerJournalSettings(Config config) : base(config) { - if (config == null) throw new ArgumentNullException("config", "SqlServer journal settings cannot be initialized, because required HOCON section couldn't been found"); - - ConnectionString = config.GetString("connection-string"); - ConnectionTimeout = config.GetTimeSpan("connection-timeout"); - SchemaName = config.GetString("schema-name"); - TableName = config.GetString("table-name"); AutoInitialize = config.GetBoolean("auto-initialize"); } } - /// - /// Configuration settings representation targeting Sql Server snapshot store actor. - /// - public class SnapshotStoreSettings + public class SqlServerSnapshotSettings : SnapshotStoreSettings { public const string ConfigPath = "akka.persistence.snapshot-store.sql-server"; - /// - /// Connection string used to access a persistent SQL Server instance. - /// - public string ConnectionString { get; private set; } - - /// - /// Connection timeout for SQL Server related operations. - /// - public TimeSpan ConnectionTimeout { get; private set; } - - /// - /// Schema name, where table corresponding to snapshot store is placed. - /// - public string SchemaName { get; private set; } - - /// - /// Name of the table corresponding to snapshot store. - /// - public string TableName { get; private set; } - /// /// Flag determining in in case of snapshot store table missing, it should be automatically initialized. /// public bool AutoInitialize { get; private set; } - public SnapshotStoreSettings(Config config) + public SqlServerSnapshotSettings(Config config) : base(config) { - if (config == null) throw new ArgumentNullException("config", "SqlServer snapshot store settings cannot be initialized, because required HOCON section couldn't been found"); - - ConnectionString = config.GetString("connection-string"); - ConnectionTimeout = config.GetTimeSpan("connection-timeout"); - SchemaName = config.GetString("schema-name"); - TableName = config.GetString("table-name"); AutoInitialize = config.GetBoolean("auto-initialize"); } } @@ -100,19 +44,19 @@ public class SqlServerPersistenceExtension : IExtension /// /// Journal-related settings loaded from HOCON configuration. /// - public readonly JournalSettings JournalSettings; + public readonly SqlServerJournalSettings JournalSettings; /// /// Snapshot store related settings loaded from HOCON configuration. /// - public readonly SnapshotStoreSettings SnapshotStoreSettings; + public readonly SqlServerSnapshotSettings SnapshotStoreSettings; public SqlServerPersistenceExtension(ExtendedActorSystem system) { system.Settings.InjectTopLevelFallback(SqlServerPersistence.DefaultConfiguration()); - JournalSettings = new JournalSettings(system.Settings.Config.GetConfig(JournalSettings.ConfigPath)); - SnapshotStoreSettings = new SnapshotStoreSettings(system.Settings.Config.GetConfig(SnapshotStoreSettings.ConfigPath)); + JournalSettings = new SqlServerJournalSettings(system.Settings.Config.GetConfig(SqlServerJournalSettings.ConfigPath)); + SnapshotStoreSettings = new SqlServerSnapshotSettings(system.Settings.Config.GetConfig(SqlServerSnapshotSettings.ConfigPath)); if (JournalSettings.AutoInitialize) { diff --git a/src/contrib/persistence/Akka.Persistence.SqlServer/InternalExtensions.cs b/src/contrib/persistence/Akka.Persistence.SqlServer/InternalExtensions.cs index 1fc0293dbc1..23bdb4977c5 100644 --- a/src/contrib/persistence/Akka.Persistence.SqlServer/InternalExtensions.cs +++ b/src/contrib/persistence/Akka.Persistence.SqlServer/InternalExtensions.cs @@ -5,11 +5,6 @@ namespace Akka.Persistence.SqlServer { internal static class InternalExtensions { - public static string QualifiedTypeName(this Type type) - { - return type.FullName + ", " + type.Assembly.GetName().Name; - } - public static string QuoteSchemaAndTable(this string sqlQuery, string schemaName, string tableName) { var cb = new SqlCommandBuilder(); diff --git a/src/contrib/persistence/Akka.Persistence.SqlServer/Journal/QueryBuilder.cs b/src/contrib/persistence/Akka.Persistence.SqlServer/Journal/QueryBuilder.cs index 9a4559e23f8..4b1197d8354 100644 --- a/src/contrib/persistence/Akka.Persistence.SqlServer/Journal/QueryBuilder.cs +++ b/src/contrib/persistence/Akka.Persistence.SqlServer/Journal/QueryBuilder.cs @@ -1,36 +1,11 @@ using System.Data; +using System.Data.Common; using System.Data.SqlClient; using System.Text; +using Akka.Persistence.Sql.Common.Journal; namespace Akka.Persistence.SqlServer.Journal { - /// - /// SQL query builder used for generating queries required to perform journal's tasks. - /// - public interface IJournalQueryBuilder - { - /// - /// Returns query which should return a frame of messages filtered accordingly to provided parameters. - /// - SqlCommand SelectMessages(string persistenceId, long fromSequenceNr, long toSequenceNr, long max); - - /// - /// Returns query returning single number considered as the highest sequence number in current journal. - /// - SqlCommand SelectHighestSequenceNr(string persistenceId); - - /// - /// Returns a non-query command used to insert collection of in journal table. - /// - SqlCommand InsertBatchMessages(IPersistentRepresentation[] messages); - - /// - /// Depending on flag this method may return either UPDATE or DELETE statement - /// used to alter IsDeleted field or delete rows permanently. - /// - SqlCommand DeleteBatchMessages(string persistenceId, long toSequenceNr, bool permanent); - } - internal class DefaultJournalQueryBuilder : IJournalQueryBuilder { private readonly string _schemaName; @@ -49,7 +24,7 @@ public DefaultJournalQueryBuilder(string tableName, string schemaName) _selectHighestSequenceNrSql = @"SELECT MAX(SequenceNr) FROM {0}.{1} WHERE CS_PID = CHECKSUM(@pid)".QuoteSchemaAndTable(_schemaName, _tableName); } - public SqlCommand SelectMessages(string persistenceId, long fromSequenceNr, long toSequenceNr, long max) + public DbCommand SelectMessages(string persistenceId, long fromSequenceNr, long toSequenceNr, long max) { var sql = BuildSelectMessagesSql(fromSequenceNr, toSequenceNr, max); var command = new SqlCommand(sql) @@ -60,7 +35,7 @@ public SqlCommand SelectMessages(string persistenceId, long fromSequenceNr, long return command; } - public SqlCommand SelectHighestSequenceNr(string persistenceId) + public DbCommand SelectHighestSequenceNr(string persistenceId) { var command = new SqlCommand(_selectHighestSequenceNrSql) { @@ -70,7 +45,7 @@ public SqlCommand SelectHighestSequenceNr(string persistenceId) return command; } - public SqlCommand InsertBatchMessages(IPersistentRepresentation[] messages) + public DbCommand InsertBatchMessages(IPersistentRepresentation[] messages) { var command = new SqlCommand(_insertMessagesSql); command.Parameters.Add("@PersistenceId", SqlDbType.NVarChar); @@ -82,7 +57,7 @@ public SqlCommand InsertBatchMessages(IPersistentRepresentation[] messages) return command; } - public SqlCommand DeleteBatchMessages(string persistenceId, long toSequenceNr, bool permanent) + public DbCommand DeleteBatchMessages(string persistenceId, long toSequenceNr, bool permanent) { var sql = BuildDeleteSql(toSequenceNr, permanent); var command = new SqlCommand(sql) diff --git a/src/contrib/persistence/Akka.Persistence.SqlServer/Journal/SqlServerJournal.cs b/src/contrib/persistence/Akka.Persistence.SqlServer/Journal/SqlServerJournal.cs index ade46f0f734..e5bcd5e7045 100644 --- a/src/contrib/persistence/Akka.Persistence.SqlServer/Journal/SqlServerJournal.cs +++ b/src/contrib/persistence/Akka.Persistence.SqlServer/Journal/SqlServerJournal.cs @@ -1,232 +1,154 @@ using System; using System.Collections.Generic; +using System.Data.Common; using System.Data.SqlClient; -using System.Linq; -using System.Threading; using System.Threading.Tasks; using Akka.Persistence.Journal; +using Akka.Persistence.Sql.Common; +using Akka.Persistence.Sql.Common.Journal; namespace Akka.Persistence.SqlServer.Journal { /// - /// Persistent journal actor using SQL Server as persistence layer. It processes write requests - /// one by one in synchronous manner, while reading results asynchronously. + /// Specialization of the which uses SQL Server as it's sql backend database. /// - public class SqlServerJournal : SyncWriteJournal + public class SqlJournalEngine : JournalDbEngine { - #region journal internal types definitions - - internal class JournalEntry + public SqlJournalEngine(JournalSettings journalSettings, Akka.Serialization.Serialization serialization) + : base(journalSettings, serialization) { - public readonly string PersistenceId; - public readonly long SequenceNr; - public readonly bool IsDeleted; - public readonly string PayloadType; - public readonly byte[] Payload; + QueryBuilder = new DefaultJournalQueryBuilder(journalSettings.TableName, journalSettings.SchemaName); + } - public JournalEntry(string persistenceId, long sequenceNr, bool isDeleted, string payloadType, byte[] payload) - { - PersistenceId = persistenceId; - SequenceNr = sequenceNr; - IsDeleted = isDeleted; - PayloadType = payloadType; - Payload = payload; - } + protected override DbConnection CreateDbConnection() + { + return new SqlConnection(Settings.ConnectionString); } - #endregion + protected override void CopyParamsToCommand(DbCommand sqlCommand, JournalEntry entry) + { + sqlCommand.Parameters["@PersistenceId"].Value = entry.PersistenceId; + sqlCommand.Parameters["@SequenceNr"].Value = entry.SequenceNr; + sqlCommand.Parameters["@IsDeleted"].Value = entry.IsDeleted; + sqlCommand.Parameters["@PayloadType"].Value = entry.PayloadType; + sqlCommand.Parameters["@Payload"].Value = entry.Payload; + } + } + /// + /// Persistent journal actor using SQL Server as persistence layer. It processes write requests + /// one by one in asynchronous manner, while reading results asynchronously. + /// + public class SqlServerJournal : AsyncWriteJournal + { private readonly SqlServerPersistenceExtension _extension; - private SqlConnection _connection; - - protected readonly LinkedList PendingOperations; - - /// - /// Used for generating SQL commands for journal-related database operations. - /// - public IJournalQueryBuilder QueryBuilder { get; protected set; } - /// - /// Used for mapping results returned from database into objects. - /// - public IJournalQueryMapper QueryMapper { get; protected set; } + private JournalDbEngine _engine; public SqlServerJournal() { _extension = SqlServerPersistence.Instance.Apply(Context.System); + } - var settings = _extension.JournalSettings; - QueryBuilder = new DefaultJournalQueryBuilder(settings.TableName, settings.SchemaName); - QueryMapper = new DefaultJournalQueryMapper(Context.System.Serialization); - PendingOperations = new LinkedList(); + /// + /// Gets an engine instance responsible for handling all database-related journal requests. + /// + protected virtual JournalDbEngine Engine + { + get + { + return _engine ?? (_engine = new SqlJournalEngine(_extension.JournalSettings, Context.System.Serialization)); + } } protected override void PreStart() { base.PreStart(); - - _connection = new SqlConnection(_extension.JournalSettings.ConnectionString); - _connection.Open(); + Engine.Open(); } protected override void PostStop() { base.PostStop(); - - // stop all operations executed in the background - var node = PendingOperations.First; - while (node != null) - { - var curr = node; - node = node.Next; - - curr.Value.Cancel(); - PendingOperations.Remove(curr); - } - - _connection.Close(); + Engine.Close(); } - /// - /// Asynchronously replays all requested messages related to provided , - /// using provided sequence ranges (inclusive) with number of messages replayed - /// (counting from the beginning). Replay callback is invoked for each replayed message. - /// - /// Identifier of persistent messages stream to be replayed. - /// Lower inclusive sequence number bound. Unbound by default. - /// Upper inclusive sequence number bound. Unbound by default. - /// Maximum number of messages to be replayed. Unbound by default. - /// Action invoked for each replayed message. public override Task ReplayMessagesAsync(string persistenceId, long fromSequenceNr, long toSequenceNr, long max, Action replayCallback) { - var sqlCommand = QueryBuilder.SelectMessages(persistenceId, fromSequenceNr, toSequenceNr, max); - CompleteCommand(sqlCommand); - - var tokenSource = GetCancellationTokenSource(); - - return sqlCommand - .ExecuteReaderAsync(tokenSource.Token) - .ContinueWith(task => - { - var reader = task.Result; - try - { - while (reader.Read()) - { - var persistent = QueryMapper.Map(reader); - if (persistent != null) - replayCallback(persistent); - } - } - finally - { - PendingOperations.Remove(tokenSource); - reader.Close(); - } - }, TaskContinuationOptions.ExecuteSynchronously | TaskContinuationOptions.AttachedToParent); + return Engine.ReplayMessagesAsync(persistenceId, fromSequenceNr, toSequenceNr, max, Context.Sender, replayCallback); } - /// - /// Asynchronously reads a highest sequence number of the event stream related with provided . - /// public override Task ReadHighestSequenceNrAsync(string persistenceId, long fromSequenceNr) { - var sqlCommand = QueryBuilder.SelectHighestSequenceNr(persistenceId); - CompleteCommand(sqlCommand); - - var tokenSource = GetCancellationTokenSource(); + return Engine.ReadHighestSequenceNrAsync(persistenceId, fromSequenceNr); + } - return sqlCommand - .ExecuteScalarAsync(tokenSource.Token) - .ContinueWith(task => - { - PendingOperations.Remove(tokenSource); - var result = task.Result; - return result is long ? Convert.ToInt64(task.Result) : 0L; - }, TaskContinuationOptions.ExecuteSynchronously | TaskContinuationOptions.AttachedToParent); + protected override Task WriteMessagesAsync(IEnumerable messages) + { + return Engine.WriteMessagesAsync(messages); } - /// - /// Synchronously writes all persistent inside SQL Server database. - /// - /// Specific table used for message persistence may be defined through configuration within - /// 'akka.persistence.journal.sql-server' scope with 'schema-name' and 'table-name' keys. - /// - public override void WriteMessages(IEnumerable messages) + protected override Task DeleteMessagesToAsync(string persistenceId, long toSequenceNr, bool isPermanent) { - var persistentMessages = messages.ToArray(); - var sqlCommand = QueryBuilder.InsertBatchMessages(persistentMessages); - CompleteCommand(sqlCommand); + return Engine.DeleteMessagesToAsync(persistenceId, toSequenceNr, isPermanent); + } + } - var journalEntires = persistentMessages.Select(ToJournalEntry).ToList(); + /// + /// Persistent journal actor using SQL Server as persistence layer. It processes write requests + /// one by one in synchronous manner, while reading results asynchronously. Use for tests only. + /// + public class SyncSqlServerJournal : SyncWriteJournal + { + private readonly SqlServerPersistenceExtension _extension; + private JournalDbEngine _engine; - InsertInTransaction(sqlCommand, journalEntires); + public SyncSqlServerJournal() + { + _extension = SqlServerPersistence.Instance.Apply(Context.System); } /// - /// Synchronously deletes all persisted messages identified by provided - /// up to provided message sequence number (inclusive). If flag is cleared, - /// messages will still reside inside database, but will be logically counted as deleted. + /// Gets an engine instance responsible for handling all database-related journal requests. /// - public override void DeleteMessagesTo(string persistenceId, long toSequenceNr, bool isPermanent) + protected virtual JournalDbEngine Engine { - var sqlCommand = QueryBuilder.DeleteBatchMessages(persistenceId, toSequenceNr, isPermanent); - CompleteCommand(sqlCommand); + get + { + return _engine ?? (_engine = new SqlJournalEngine(_extension.JournalSettings, Context.System.Serialization)); + } + } - sqlCommand.ExecuteNonQuery(); + protected override void PreStart() + { + base.PreStart(); + Engine.Open(); } - private void CompleteCommand(SqlCommand sqlCommand) + protected override void PostStop() { - sqlCommand.Connection = _connection; - sqlCommand.CommandTimeout = (int)_extension.JournalSettings.ConnectionTimeout.TotalMilliseconds; + base.PostStop(); + Engine.Close(); } - private CancellationTokenSource GetCancellationTokenSource() + public override Task ReplayMessagesAsync(string persistenceId, long fromSequenceNr, long toSequenceNr, long max, Action replayCallback) { - var source = new CancellationTokenSource(); - PendingOperations.AddLast(source); - return source; + return Engine.ReplayMessagesAsync(persistenceId, fromSequenceNr, toSequenceNr, max, Context.Sender, replayCallback); } - private static JournalEntry ToJournalEntry(IPersistentRepresentation message) + public override Task ReadHighestSequenceNrAsync(string persistenceId, long fromSequenceNr) { - var payloadType = message.Payload.GetType(); - var serializer = Context.System.Serialization.FindSerializerForType(payloadType); + return Engine.ReadHighestSequenceNrAsync(persistenceId, fromSequenceNr); + } - return new JournalEntry(message.PersistenceId, message.SequenceNr, message.IsDeleted, - payloadType.QualifiedTypeName(), serializer.ToBinary(message.Payload)); + public override void WriteMessages(IEnumerable messages) + { + Engine.WriteMessages(messages); } - private void InsertInTransaction(SqlCommand sqlCommand, IEnumerable journalEntires) + public override void DeleteMessagesTo(string persistenceId, long toSequenceNr, bool isPermanent) { - using (var tx = _connection.BeginTransaction()) - { - sqlCommand.Transaction = tx; - try - { - foreach (var entry in journalEntires) - { - sqlCommand.Parameters["@PersistenceId"].Value = entry.PersistenceId; - sqlCommand.Parameters["@SequenceNr"].Value = entry.SequenceNr; - sqlCommand.Parameters["@IsDeleted"].Value = entry.IsDeleted; - sqlCommand.Parameters["@PayloadType"].Value = entry.PayloadType; - sqlCommand.Parameters["@Payload"].Value = entry.Payload; - - if (sqlCommand.ExecuteNonQuery() != 1) - { - //TODO: something went wrong, ExecuteNonQuery() should return 1 (number of rows added) - } - } - - tx.Commit(); - } - catch (Exception) - { - tx.Rollback(); - throw; - } - } + Engine.DeleteMessagesTo(persistenceId, toSequenceNr, isPermanent); } } } \ No newline at end of file diff --git a/src/contrib/persistence/Akka.Persistence.SqlServer/Snapshot/QueryBuilder.cs b/src/contrib/persistence/Akka.Persistence.SqlServer/Snapshot/QueryBuilder.cs index bc03dc85bf7..6376f3a2403 100644 --- a/src/contrib/persistence/Akka.Persistence.SqlServer/Snapshot/QueryBuilder.cs +++ b/src/contrib/persistence/Akka.Persistence.SqlServer/Snapshot/QueryBuilder.cs @@ -1,81 +1,12 @@ using System; using System.Data; +using System.Data.Common; using System.Data.SqlClient; using System.Text; +using Akka.Persistence.Sql.Common.Snapshot; namespace Akka.Persistence.SqlServer.Snapshot { - /// - /// Flattened and serialized snapshot object used as intermediate representation - /// before saving snapshot with metadata inside SQL Server database. - /// - public class SnapshotEntry - { - /// - /// Persistence identifier of persistent actor, current snapshot relates to. - /// - public readonly string PersistenceId; - - /// - /// Sequence number used to identify snapshot in it's persistent actor scope. - /// - public readonly long SequenceNr; - - /// - /// Timestamp used to specify date, when the snapshot has been made. - /// - public readonly DateTime Timestamp; - - /// - /// Stringified fully qualified CLR type name of the serialized object. - /// - public readonly string SnapshotType; - - /// - /// Serialized object data. - /// - public readonly byte[] Snapshot; - - public SnapshotEntry(string persistenceId, long sequenceNr, DateTime timestamp, string snapshotType, byte[] snapshot) - { - PersistenceId = persistenceId; - SequenceNr = sequenceNr; - Timestamp = timestamp; - SnapshotType = snapshotType; - Snapshot = snapshot; - } - } - - /// - /// Query builder used for prepare SQL commands used for snapshot store persistence operations. - /// - public interface ISnapshotQueryBuilder - { - /// - /// Deletes a single snapshot identified by it's persistent actor's , - /// and . - /// - SqlCommand DeleteOne(string persistenceId, long sequenceNr, DateTime timestamp); - - /// - /// Deletes all snapshot matching persistent actor's as well as - /// upper (inclusive) bounds of the both and . - /// - SqlCommand DeleteMany(string persistenceId, long maxSequenceNr, DateTime maxTimestamp); - - /// - /// Inserts a single snapshot represented by provided instance. - /// - SqlCommand InsertSnapshot(SnapshotEntry entry); - - /// - /// Selects a single snapshot identified by persistent actor's , - /// matching upper (inclusive) bounds of both and . - /// In case, when more than one snapshot matches specified criteria, one with the highest sequence number will be selected. - /// - SqlCommand SelectSnapshot(string persistenceId, long maxSequenceNr, DateTime maxTimestamp); - } - internal class DefaultSnapshotQueryBuilder : ISnapshotQueryBuilder { private readonly string _deleteSql; @@ -89,7 +20,7 @@ public DefaultSnapshotQueryBuilder(string schemaName, string tableName) _selectSql = @"SELECT PersistenceId, SequenceNr, Timestamp, SnapshotType, Snapshot FROM {0}.{1} WHERE CS_PID = CHECKSUM(@PersistenceId)".QuoteSchemaAndTable(schemaName, tableName); } - public SqlCommand DeleteOne(string persistenceId, long sequenceNr, DateTime timestamp) + public DbCommand DeleteOne(string persistenceId, long sequenceNr, DateTime timestamp) { var sqlCommand = new SqlCommand(); sqlCommand.Parameters.Add(new SqlParameter("@PersistenceId", SqlDbType.NVarChar, persistenceId.Length) { Value = persistenceId }); @@ -112,7 +43,7 @@ public SqlCommand DeleteOne(string persistenceId, long sequenceNr, DateTime time return sqlCommand; } - public SqlCommand DeleteMany(string persistenceId, long maxSequenceNr, DateTime maxTimestamp) + public DbCommand DeleteMany(string persistenceId, long maxSequenceNr, DateTime maxTimestamp) { var sqlCommand = new SqlCommand(); sqlCommand.Parameters.Add(new SqlParameter("@PersistenceId", SqlDbType.NVarChar, persistenceId.Length) { Value = persistenceId }); @@ -135,7 +66,7 @@ public SqlCommand DeleteMany(string persistenceId, long maxSequenceNr, DateTime return sqlCommand; } - public SqlCommand InsertSnapshot(SnapshotEntry entry) + public DbCommand InsertSnapshot(SnapshotEntry entry) { var sqlCommand = new SqlCommand(_insertSql) { @@ -152,7 +83,7 @@ public SqlCommand InsertSnapshot(SnapshotEntry entry) return sqlCommand; } - public SqlCommand SelectSnapshot(string persistenceId, long maxSequenceNr, DateTime maxTimestamp) + public DbCommand SelectSnapshot(string persistenceId, long maxSequenceNr, DateTime maxTimestamp) { var sqlCommand = new SqlCommand(); sqlCommand.Parameters.Add(new SqlParameter("@PersistenceId", SqlDbType.NVarChar, persistenceId.Length) { Value = persistenceId }); diff --git a/src/contrib/persistence/Akka.Persistence.SqlServer/Snapshot/SqlServerSnapshotStore.cs b/src/contrib/persistence/Akka.Persistence.SqlServer/Snapshot/SqlServerSnapshotStore.cs index d69f9819e05..21061de6caf 100644 --- a/src/contrib/persistence/Akka.Persistence.SqlServer/Snapshot/SqlServerSnapshotStore.cs +++ b/src/contrib/persistence/Akka.Persistence.SqlServer/Snapshot/SqlServerSnapshotStore.cs @@ -1,149 +1,28 @@ -using System.Collections.Generic; +using System.Data.Common; using System.Data.SqlClient; -using System.Threading; -using System.Threading.Tasks; -using Akka.Persistence.Snapshot; +using Akka.Persistence.Sql.Common; +using Akka.Persistence.Sql.Common.Snapshot; namespace Akka.Persistence.SqlServer.Snapshot { /// /// Actor used for storing incoming snapshots into persistent snapshot store backed by SQL Server database. /// - public class SqlServerSnapshotStore : SnapshotStore + public class SqlServerSnapshotStore : DbSnapshotStore { - private readonly SqlServerPersistenceExtension _extension; - private SqlConnection _connection; + private readonly SqlServerSnapshotSettings _settings; - protected readonly LinkedList PendingOperations; - - public SqlServerSnapshotStore() - { - _extension = SqlServerPersistence.Instance.Apply(Context.System); - - var settings = _extension.SnapshotStoreSettings; - QueryBuilder = new DefaultSnapshotQueryBuilder(settings.SchemaName, settings.TableName); - QueryMapper = new DefaultSnapshotQueryMapper(Context.System.Serialization); - PendingOperations = new LinkedList(); - } - - /// - /// Query builder used to convert snapshot store related operations into corresponding SQL queries. - /// - public ISnapshotQueryBuilder QueryBuilder { get; set; } - - /// - /// Query mapper used to map SQL query results into snapshots. - /// - public ISnapshotQueryMapper QueryMapper { get; set; } - - protected override void PreStart() - { - base.PreStart(); - - _connection = new SqlConnection(_extension.SnapshotStoreSettings.ConnectionString); - _connection.Open(); - } - - protected override void PostStop() + public SqlServerSnapshotStore() : base() { - base.PostStop(); - - // stop all operations executed in the background - var node = PendingOperations.First; - while (node != null) - { - var curr = node; - node = node.Next; - - curr.Value.Cancel(); - PendingOperations.Remove(curr); - } - - _connection.Close(); + _settings = SqlServerPersistence.Instance.Apply(Context.System).SnapshotStoreSettings; + QueryBuilder = new DefaultSnapshotQueryBuilder(_settings.SchemaName, _settings.TableName); } - protected override Task LoadAsync(string persistenceId, SnapshotSelectionCriteria criteria) - { - var sqlCommand = QueryBuilder.SelectSnapshot(persistenceId, criteria.MaxSequenceNr, criteria.MaxTimeStamp); - CompleteCommand(sqlCommand); - - var tokenSource = GetCancellationTokenSource(); - return sqlCommand - .ExecuteReaderAsync(tokenSource.Token) - .ContinueWith(task => - { - var reader = task.Result; - try - { - return reader.Read() ? QueryMapper.Map(reader) : null; - } - finally - { - PendingOperations.Remove(tokenSource); - reader.Close(); - } - }, TaskContinuationOptions.ExecuteSynchronously | TaskContinuationOptions.AttachedToParent); - } + protected override SnapshotStoreSettings Settings { get { return _settings; } } - protected override Task SaveAsync(SnapshotMetadata metadata, object snapshot) + protected override DbConnection CreateDbConnection() { - var entry = ToSnapshotEntry(metadata, snapshot); - var sqlCommand = QueryBuilder.InsertSnapshot(entry); - CompleteCommand(sqlCommand); - - var tokenSource = GetCancellationTokenSource(); - - return sqlCommand.ExecuteNonQueryAsync(tokenSource.Token) - .ContinueWith(task => - { - PendingOperations.Remove(tokenSource); - }, TaskContinuationOptions.ExecuteSynchronously | TaskContinuationOptions.AttachedToParent); - } - - protected override void Saved(SnapshotMetadata metadata) { } - - protected override void Delete(SnapshotMetadata metadata) - { - var sqlCommand = QueryBuilder.DeleteOne(metadata.PersistenceId, metadata.SequenceNr, metadata.Timestamp); - CompleteCommand(sqlCommand); - - sqlCommand.ExecuteNonQuery(); - } - - protected override void Delete(string persistenceId, SnapshotSelectionCriteria criteria) - { - var sqlCommand = QueryBuilder.DeleteMany(persistenceId, criteria.MaxSequenceNr, criteria.MaxTimeStamp); - CompleteCommand(sqlCommand); - - sqlCommand.ExecuteNonQuery(); - } - - private void CompleteCommand(SqlCommand command) - { - command.Connection = _connection; - command.CommandTimeout = (int)_extension.SnapshotStoreSettings.ConnectionTimeout.TotalMilliseconds; - } - - private CancellationTokenSource GetCancellationTokenSource() - { - var source = new CancellationTokenSource(); - PendingOperations.AddLast(source); - return source; - } - - private SnapshotEntry ToSnapshotEntry(SnapshotMetadata metadata, object snapshot) - { - var snapshotType = snapshot.GetType(); - var serializer = Context.System.Serialization.FindSerializerForType(snapshotType); - - var binary = serializer.ToBinary(snapshot); - - return new SnapshotEntry( - persistenceId: metadata.PersistenceId, - sequenceNr: metadata.SequenceNr, - timestamp: metadata.Timestamp, - snapshotType: snapshotType.QualifiedTypeName(), - snapshot: binary); + return new SqlConnection(Settings.ConnectionString); } } } \ No newline at end of file diff --git a/src/core/Akka.MultiNodeTests/Akka.MultiNodeTests.csproj b/src/core/Akka.MultiNodeTests/Akka.MultiNodeTests.csproj index 8121c482765..855a38b74c6 100644 --- a/src/core/Akka.MultiNodeTests/Akka.MultiNodeTests.csproj +++ b/src/core/Akka.MultiNodeTests/Akka.MultiNodeTests.csproj @@ -102,6 +102,9 @@ + + + diff --git a/src/core/Akka.Persistence/Journal/AsyncWriteJournal.cs b/src/core/Akka.Persistence/Journal/AsyncWriteJournal.cs index c51f2292438..589ca90d1b6 100644 --- a/src/core/Akka.Persistence/Journal/AsyncWriteJournal.cs +++ b/src/core/Akka.Persistence/Journal/AsyncWriteJournal.cs @@ -63,10 +63,11 @@ protected override bool Receive(object message) private void HandleDeleteMessagesTo(DeleteMessagesTo message) { + var eventStream = Context.System.EventStream; DeleteMessagesToAsync(message.PersistenceId, message.ToSequenceNr, message.IsPermanent) .ContinueWith(t => { - if (!t.IsFaulted && CanPublish) Context.System.EventStream.Publish(message); + if (!t.IsFaulted && CanPublish) eventStream.Publish(message); }, _continuationOptions); } From 8ba957af57856b20642cfc53f62c56f1b4b7abc0 Mon Sep 17 00:00:00 2001 From: Thomas Lazar Date: Mon, 11 May 2015 15:44:30 +0200 Subject: [PATCH 26/66] Added Extensions methods to ActorSystem and ActorContext to make DI more accessible Issue #965 --- .../Akka.DI.Core/Akka.DI.Core.csproj | 1 + .../Akka.DI.Core/DIActorContextAdapter.cs | 6 ++++ .../Akka.DI.Core/DIActorSystemAdapter.cs | 34 +++++++++++++++++++ .../Akka.DI.Core/Extensions.cs | 6 +++- 4 files changed, 46 insertions(+), 1 deletion(-) create mode 100644 src/contrib/dependencyInjection/Akka.DI.Core/DIActorSystemAdapter.cs diff --git a/src/contrib/dependencyInjection/Akka.DI.Core/Akka.DI.Core.csproj b/src/contrib/dependencyInjection/Akka.DI.Core/Akka.DI.Core.csproj index 8d61ce7db5c..2e947ef984e 100644 --- a/src/contrib/dependencyInjection/Akka.DI.Core/Akka.DI.Core.csproj +++ b/src/contrib/dependencyInjection/Akka.DI.Core/Akka.DI.Core.csproj @@ -44,6 +44,7 @@ + diff --git a/src/contrib/dependencyInjection/Akka.DI.Core/DIActorContextAdapter.cs b/src/contrib/dependencyInjection/Akka.DI.Core/DIActorContextAdapter.cs index db0765c01d8..7ae6e2447f3 100644 --- a/src/contrib/dependencyInjection/Akka.DI.Core/DIActorContextAdapter.cs +++ b/src/contrib/dependencyInjection/Akka.DI.Core/DIActorContextAdapter.cs @@ -24,6 +24,12 @@ public IActorRef ActorOf(string name = null) where TActor : ActorBase { return context.ActorOf(producer.Props(typeof(TActor)), name); } + + public Props Props() where TActor : ActorBase + { + return producer.Props(typeof(TActor)); + } + } } diff --git a/src/contrib/dependencyInjection/Akka.DI.Core/DIActorSystemAdapter.cs b/src/contrib/dependencyInjection/Akka.DI.Core/DIActorSystemAdapter.cs new file mode 100644 index 00000000000..be6053c2574 --- /dev/null +++ b/src/contrib/dependencyInjection/Akka.DI.Core/DIActorSystemAdapter.cs @@ -0,0 +1,34 @@ +//----------------------------------------------------------------------- +// +// Copyright (C) 2009-2015 Typesafe Inc. +// Copyright (C) 2013-2015 Akka.NET project +// +//----------------------------------------------------------------------- + +using System; +using Akka.Actor; + +namespace Akka.DI.Core +{ + public class DIActorSystemAdapter + { + readonly DIExt producer; + readonly ActorSystem system; + public DIActorSystemAdapter(ActorSystem system) + { + if (system == null) throw new ArgumentNullException("system"); + this.system = system; + this.producer = system.GetExtension(); + } + public IActorRef ActorOf(string name = null) where TActor : ActorBase + { + return system.ActorOf(producer.Props(typeof(TActor)), name); + } + + public Props Props() where TActor : ActorBase + { + return producer.Props(typeof(TActor)); + } + } +} + diff --git a/src/contrib/dependencyInjection/Akka.DI.Core/Extensions.cs b/src/contrib/dependencyInjection/Akka.DI.Core/Extensions.cs index 246a959ec85..25606611d02 100644 --- a/src/contrib/dependencyInjection/Akka.DI.Core/Extensions.cs +++ b/src/contrib/dependencyInjection/Akka.DI.Core/Extensions.cs @@ -28,7 +28,11 @@ public static void AddDependencyResolver(this ActorSystem system, IDependencyRes system.RegisterExtension(DIExtension.DIExtensionProvider); DIExtension.DIExtensionProvider.Get(system).Initialize(dependencyResolver); } - + + public static DIActorSystemAdapter DI(this ActorSystem system) + { + return new DIActorSystemAdapter(system); + } public static DIActorContextAdapter DI(this IActorContext context) { From 25f595d322b3dc0099060d9c7dfee574cc332207 Mon Sep 17 00:00:00 2001 From: Joshua Benjamin Date: Fri, 8 May 2015 19:19:37 -0700 Subject: [PATCH 27/66] Fixing up cluster leader election to match akka canonical. Fixed rejoining node Fixed ClusterDomainEventPublisher spec --- .../ClusterDomainEventPublisherSpec.cs | 45 ++++++++++--- .../ClusterDomainEventSpec.cs | 21 +++--- src/core/Akka.Cluster.Tests/GossipSpec.cs | 8 +-- src/core/Akka.Cluster/ClusterDaemon.cs | 61 +++++++++++++----- src/core/Akka.Cluster/ClusterEvent.cs | 64 ++++++++++++------- src/core/Akka.Cluster/ClusterReadView.cs | 3 +- src/core/Akka.Cluster/Gossip.cs | 58 ++++++++++++----- src/core/Akka.Remote/Endpoint.cs | 12 +++- .../Transport/AkkaProtocolTransport.cs | 3 +- 9 files changed, 190 insertions(+), 85 deletions(-) diff --git a/src/core/Akka.Cluster.Tests/ClusterDomainEventPublisherSpec.cs b/src/core/Akka.Cluster.Tests/ClusterDomainEventPublisherSpec.cs index 45407cff4de..6a63917770c 100644 --- a/src/core/Akka.Cluster.Tests/ClusterDomainEventPublisherSpec.cs +++ b/src/core/Akka.Cluster.Tests/ClusterDomainEventPublisherSpec.cs @@ -15,16 +15,26 @@ namespace Akka.Cluster.Tests { public class ClusterDomainEventPublisherSpec : AkkaSpec { - IActorRef _publisher; + const string Config = @" + akka.cluster { + auto-down-unreachable-after = 0s + periodic-tasks-initial-delay = 120 s // turn off scheduled tasks + publish-stats-interval = 0 s # always, when it happens + failure-detector.implementation-class = ""Akka.MultiNodeTests.FailureDetectorPuppet, Akka.MultiNodeTests"" + } + akka.actor.provider = ""Akka.Cluster.ClusterActorRefProvider, Akka.Cluster"" + akka.remote.helios.tcp.port = 0"; + + readonly IActorRef _publisher; static readonly Member aUp = TestMember.Create(new Address("akka.tcp", "sys", "a", 2552), MemberStatus.Up); - static readonly Member aLeaving = aUp.Copy(status: MemberStatus.Leaving); - static readonly Member aExiting = aLeaving.Copy(status: MemberStatus.Exiting); - static readonly Member aRemoved = aLeaving.Copy(status: MemberStatus.Removed); + static readonly Member aLeaving = aUp.Copy(MemberStatus.Leaving); + static readonly Member aExiting = aLeaving.Copy(MemberStatus.Exiting); + static readonly Member aRemoved = aExiting.Copy(MemberStatus.Removed); static readonly Member bExiting = TestMember.Create(new Address("akka.tcp", "sys", "b", 2552), MemberStatus.Exiting); - static readonly Member bRemoved = bExiting.Copy(status: MemberStatus.Removed); + static readonly Member bRemoved = bExiting.Copy(MemberStatus.Removed); static readonly Member cJoining = TestMember.Create(new Address("akka.tcp", "sys", "c", 2552), MemberStatus.Joining, ImmutableHashSet.Create("GRP")); - static readonly Member cUp = cJoining.Copy(status: MemberStatus.Up); - static readonly Member cRemoved = cUp.Copy(status: MemberStatus.Removed); + static readonly Member cUp = cJoining.Copy(MemberStatus.Up); + static readonly Member cRemoved = cUp.Copy(MemberStatus.Removed); static readonly Member a51Up = TestMember.Create(new Address("akk.tcp", "sys", "a", 2551), MemberStatus.Up); static readonly Member dUp = TestMember.Create(new Address("akka.tcp", "sys", "d", 2552), MemberStatus.Up, ImmutableHashSet.Create("GRP")); @@ -37,14 +47,15 @@ public class ClusterDomainEventPublisherSpec : AkkaSpec static readonly Gossip g6 = new Gossip(ImmutableSortedSet.Create(aLeaving, bExiting, cUp)).Seen(aUp.UniqueAddress); static readonly Gossip g7 = new Gossip(ImmutableSortedSet.Create(aExiting, bExiting, cUp)).Seen(aUp.UniqueAddress); static readonly Gossip g8 = new Gossip(ImmutableSortedSet.Create(aUp, bExiting, cUp, dUp), new GossipOverview(Reachability.Empty.Unreachable(aUp.UniqueAddress, dUp.UniqueAddress))).Seen(aUp.UniqueAddress); - - TestProbe _memberSubscriber; - public ClusterDomainEventPublisherSpec() + readonly TestProbe _memberSubscriber; + + public ClusterDomainEventPublisherSpec() : base(Config) { _memberSubscriber = CreateTestProbe(); Sys.EventStream.Subscribe(_memberSubscriber.Ref, typeof(ClusterEvent.IMemberEvent)); Sys.EventStream.Subscribe(_memberSubscriber.Ref, typeof(ClusterEvent.LeaderChanged)); + Sys.EventStream.Subscribe(_memberSubscriber.Ref, typeof(ClusterEvent.ClusterShuttingDown)); _publisher = Sys.ActorOf(Props.Create()); //TODO: If parent told of exception then test should fail (if not expected in some way)? @@ -144,6 +155,19 @@ public void ClusterDomainEventPublisherMustSendEventsCorrespondingToCurrentState subscriber.ExpectNoMsg(TimeSpan.FromMilliseconds(500)); } + [Fact] + public void Should_support_unsubscribe() + { + var subscriber = CreateTestProbe(); + _publisher.Tell(new InternalClusterAction.Subscribe(subscriber.Ref, ClusterEvent.SubscriptionInitialStateMode.InitialStateAsSnapshot, ImmutableHashSet.Create(typeof(ClusterEvent.IMemberEvent)))); + subscriber.ExpectMsg(); + _publisher.Tell(new InternalClusterAction.Unsubscribe(subscriber.Ref, typeof(ClusterEvent.IMemberEvent))); + _publisher.Tell(new InternalClusterAction.PublishChanges(g3)); + subscriber.ExpectNoMsg(TimeSpan.FromMilliseconds(500)); + _memberSubscriber.ExpectMsg(new ClusterEvent.MemberExited(bExiting)); + _memberSubscriber.ExpectMsg(new ClusterEvent.MemberUp(cUp)); + } + [Fact] public void ClusterDomainEventPublisherMustSendPublishSeenChanged() { @@ -162,6 +186,7 @@ public void ClusterDomainEventPublisherMustSendPublishSeenChanged() public void ClusterDomainEventPublisherMustPublishRemovedWhenStopped() { _publisher.Tell(PoisonPill.Instance); + _memberSubscriber.ExpectMsg(ClusterEvent.ClusterShuttingDown.Instance); _memberSubscriber.ExpectMsg(new ClusterEvent.MemberRemoved(aRemoved, MemberStatus.Up)); } diff --git a/src/core/Akka.Cluster.Tests/ClusterDomainEventSpec.cs b/src/core/Akka.Cluster.Tests/ClusterDomainEventSpec.cs index 084c4fbae15..0628b853bc2 100644 --- a/src/core/Akka.Cluster.Tests/ClusterDomainEventSpec.cs +++ b/src/core/Akka.Cluster.Tests/ClusterDomainEventSpec.cs @@ -36,6 +36,9 @@ public class ClusterDomainEventSpec static readonly Member eUp = TestMember.Create(new Address("akka.tcp", "sys", "e", 2552), MemberStatus.Up, eRoles); static readonly Member eDown = TestMember.Create(new Address("akka.tcp", "sys", "e", 2552), MemberStatus.Down, eRoles); + private static readonly UniqueAddress selfDummyAddress = + new UniqueAddress(new Address("akka.tcp", "sys", "selfDummy", 2552), 17); + private static Tuple> Converge(Gossip gossip) { var seed = Tuple.Create(gossip, ImmutableHashSet.Create()); @@ -65,7 +68,7 @@ public void DomainEventsMustBeProducedForNewMembers() Assert.Equal(ImmutableList.Create(new ClusterEvent.MemberUp(bUp)), ClusterEvent.DiffMemberEvents(g1, g2)); Assert.Equal(ImmutableList.Create(), ClusterEvent.DiffUnreachable(g1, g2)); - Assert.Equal(ImmutableList.Create(new ClusterEvent.SeenChanged(true, s2.Select(s => s.Address).ToImmutableHashSet())), ClusterEvent.DiffSeen(g1, g2)); + Assert.Equal(ImmutableList.Create(new ClusterEvent.SeenChanged(true, s2.Select(s => s.Address).ToImmutableHashSet())), ClusterEvent.DiffSeen(g1, g2, selfDummyAddress)); } [Fact] @@ -80,7 +83,7 @@ public void DomainEventMustBeProducedForChangedStatusOfMembers() Assert.Equal(ImmutableList.Create(new ClusterEvent.MemberUp(aUp)), ClusterEvent.DiffMemberEvents(g1, g2)); Assert.Equal(ImmutableList.Create(), ClusterEvent.DiffUnreachable(g1, g2)); - Assert.Equal(ImmutableList.Create(new ClusterEvent.SeenChanged(true, s2.Select(s => s.Address).ToImmutableHashSet())), ClusterEvent.DiffSeen(g1, g2)); + Assert.Equal(ImmutableList.Create(new ClusterEvent.SeenChanged(true, s2.Select(s => s.Address).ToImmutableHashSet())), ClusterEvent.DiffSeen(g1, g2, selfDummyAddress)); } [Fact] @@ -95,7 +98,7 @@ public void DomainEventMustBeProducedForMembersInUnreachable() var g2 = new Gossip(ImmutableSortedSet.Create(aUp, cUp, bDown, eDown), new GossipOverview(reachability2)); Assert.Equal(ImmutableList.Create(new ClusterEvent.UnreachableMember(bDown)), ClusterEvent.DiffUnreachable(g1, g2)); - Assert.Equal(ImmutableList.Create(), ClusterEvent.DiffSeen(g1, g2)); + Assert.Equal(ImmutableList.Create(), ClusterEvent.DiffSeen(g1, g2, selfDummyAddress)); } [Fact] @@ -126,10 +129,10 @@ public void DomainEventMustBeProducedForConvergenceChanges() Assert.Equal(ImmutableList.Create(), ClusterEvent.DiffMemberEvents(g1, g2)); Assert.Equal(ImmutableList.Create(), ClusterEvent.DiffUnreachable(g1, g2)); - Assert.Equal(ImmutableList.Create(new ClusterEvent.SeenChanged(true, ImmutableHashSet.Create(aUp.Address, bUp.Address))), ClusterEvent.DiffSeen(g1, g2)); + Assert.Equal(ImmutableList.Create(new ClusterEvent.SeenChanged(true, ImmutableHashSet.Create(aUp.Address, bUp.Address))), ClusterEvent.DiffSeen(g1, g2, selfDummyAddress)); Assert.Equal(ImmutableList.Create(), ClusterEvent.DiffMemberEvents(g2, g1)); Assert.Equal(ImmutableList.Create(), ClusterEvent.DiffUnreachable(g2, g1)); - Assert.Equal(ImmutableList.Create(new ClusterEvent.SeenChanged(true, ImmutableHashSet.Create(aUp.Address, bUp.Address, eJoining.Address))), ClusterEvent.DiffSeen(g2, g1)); + Assert.Equal(ImmutableList.Create(new ClusterEvent.SeenChanged(true, ImmutableHashSet.Create(aUp.Address, bUp.Address, eJoining.Address))), ClusterEvent.DiffSeen(g2, g1, selfDummyAddress)); } [Fact] @@ -144,8 +147,8 @@ public void DomainEventMustBeProducedForLeaderChanges() Assert.Equal(ImmutableList.Create(new ClusterEvent.MemberRemoved(aRemoved, MemberStatus.Up)), ClusterEvent.DiffMemberEvents(g1, g2)); Assert.Equal(ImmutableList.Create(), ClusterEvent.DiffUnreachable(g1, g2)); - Assert.Equal(ImmutableList.Create(new ClusterEvent.SeenChanged(true, s2.Select(a => a.Address).ToImmutableHashSet())), ClusterEvent.DiffSeen(g1, g2)); - Assert.Equal(ImmutableList.Create(new ClusterEvent.LeaderChanged(bUp.Address)), ClusterEvent.DiffLeader(g1, g2)); + Assert.Equal(ImmutableList.Create(new ClusterEvent.SeenChanged(true, s2.Select(a => a.Address).ToImmutableHashSet())), ClusterEvent.DiffSeen(g1, g2, selfDummyAddress)); + Assert.Equal(ImmutableList.Create(new ClusterEvent.LeaderChanged(bUp.Address)), ClusterEvent.DiffLeader(g1, g2, selfDummyAddress)); } [Fact] @@ -162,13 +165,13 @@ public void DomainEventMustBeProducedForRoleLeaderChanges() new ClusterEvent.RoleLeaderChanged("DE", dLeaving.Address), new ClusterEvent.RoleLeaderChanged("EE", eUp.Address) ); - Assert.Equal(expected, ClusterEvent.DiffRolesLeader(g0, g1)); + Assert.Equal(expected, ClusterEvent.DiffRolesLeader(g0, g1, selfDummyAddress)); var expected2 = ImmutableHashSet.Create( new ClusterEvent.RoleLeaderChanged("AA", null), new ClusterEvent.RoleLeaderChanged("AB", bUp.Address), new ClusterEvent.RoleLeaderChanged("DE", eJoining.Address) ); - Assert.Equal(expected2, ClusterEvent.DiffRolesLeader(g1, g2)); + Assert.Equal(expected2, ClusterEvent.DiffRolesLeader(g1, g2, selfDummyAddress)); } } } diff --git a/src/core/Akka.Cluster.Tests/GossipSpec.cs b/src/core/Akka.Cluster.Tests/GossipSpec.cs index 41982492c4f..7f63c97a766 100644 --- a/src/core/Akka.Cluster.Tests/GossipSpec.cs +++ b/src/core/Akka.Cluster.Tests/GossipSpec.cs @@ -31,7 +31,7 @@ public class GossipSpec [Fact] public void AGossipMustReachConvergenceWhenItsEmpty() { - Assert.True(Gossip.Empty.Convergence); + Assert.True(Gossip.Empty.Convergence(a1.UniqueAddress)); } [Fact] @@ -91,9 +91,9 @@ public void AGossipMustMergeMembersByRemovingRemovedMembers() [Fact] public void AGossipMustHaveLeaderAsFirstMemberBasedOnOrderingExceptExitingStatus() { - Assert.Equal(c2.UniqueAddress, new Gossip(ImmutableSortedSet.Create(c2, e2)).Leader); - Assert.Equal(e2.UniqueAddress, new Gossip(ImmutableSortedSet.Create(c3, e2)).Leader); - Assert.Equal(c3.UniqueAddress, new Gossip(ImmutableSortedSet.Create(c3)).Leader); + Assert.Equal(c2.UniqueAddress, new Gossip(ImmutableSortedSet.Create(c2, e2)).Leader(c2.UniqueAddress)); + Assert.Equal(e2.UniqueAddress, new Gossip(ImmutableSortedSet.Create(c3, e2)).Leader(e2.UniqueAddress)); + Assert.Equal(c3.UniqueAddress, new Gossip(ImmutableSortedSet.Create(c3)).Leader(c3.UniqueAddress)); } [Fact] diff --git a/src/core/Akka.Cluster/ClusterDaemon.cs b/src/core/Akka.Cluster/ClusterDaemon.cs index c61fd9d8157..2a693a69fc6 100644 --- a/src/core/Akka.Cluster/ClusterDaemon.cs +++ b/src/core/Akka.Cluster/ClusterDaemon.cs @@ -655,6 +655,7 @@ string VclockName(UniqueAddress node) private bool _logInfo; readonly IActorRef _publisher; + private int _leaderActionCounter = 0; public ClusterCoreDaemon(IActorRef publisher) { @@ -1042,14 +1043,23 @@ public void Joining(UniqueAddress node, ImmutableHashSet roles) else { var localMembers = _latestGossip.Members; + var localMember = localMembers.FirstOrDefault(m => m.Address == node.Address); - // check by address without uid to make sure that node with same host:port is not allowed - // to join until previous node with that host:port has been removed from the cluster - var alreadyMember = localMembers.Any(m => m.Address == node.Address); - var isUnreachable = !_latestGossip.Overview.Reachability.IsReachable(node); - - if (alreadyMember) _log.Info("Existing member [{0}] is trying to join, ignoring", node); - else if (isUnreachable) _log.Info("Unreachable member [{0}] is trying to join, ignoring", node); + if (localMember != null && localMember.UniqueAddress == node) + { + _log.Info("Existing member [{0}] is joining again.", node); + if (!node.Equals(SelfUniqueAddress)) + { + Sender.Tell(new InternalClusterAction.Welcome(SelfUniqueAddress, _latestGossip)); + } + } + else if (localMember != null) + { + _log.Info("New incarnation of existing member [{0}] is trying to join. " + + "Existing will be removed from the cluster and then new member will be allowed to join.", node); + if (localMember.Status != MemberStatus.Down && localMember.Status != MemberStatus.Leaving && localMember.Status != MemberStatus.Exiting) + Downing(localMember.Address); + } else { // remove the node from the failure detector @@ -1243,11 +1253,6 @@ public ReceiveGossipType ReceiveGossip(GossipEnvelope envelope) from.Address, envelope.To); return ReceiveGossipType.Ignored; } - if (!remoteGossip.Overview.Reachability.IsReachable(SelfUniqueAddress)) - { - _log.Info("Ignoring received gossip with myself as unreachable, from [{0}]", from.Address); - return ReceiveGossipType.Ignored; - } if (!localGossip.Overview.Reachability.IsReachable(SelfUniqueAddress, from)) { _log.Info("Ignoring received gossip from unreachable [{0}]", from); @@ -1266,8 +1271,8 @@ public ReceiveGossipType ReceiveGossip(GossipEnvelope envelope) var comparison = remoteGossip.Version.CompareTo(localGossip.Version); - Gossip winningGossip = null; - bool talkback = false; + Gossip winningGossip; + bool talkback; ReceiveGossipType gossipType; switch (comparison) @@ -1449,11 +1454,33 @@ public double AdjustedGossipDifferentViewProbability /// public void LeaderActions() { - if (_latestGossip.IsLeader(SelfUniqueAddress)) + if (_latestGossip.IsLeader(SelfUniqueAddress, SelfUniqueAddress)) { // only run the leader actions if we are the LEADER - if (_latestGossip.Convergence) + const int firstNotice = 20; + const int periodicNotice = 60; + if (_latestGossip.Convergence(SelfUniqueAddress)) + { + if (_leaderActionCounter >= firstNotice) + _log.Info("Leader can perform its duties again"); + _leaderActionCounter = 0; LeaderActionsOnConvergence(); + } + else + { + _leaderActionCounter += 1; + if (_leaderActionCounter == firstNotice || _leaderActionCounter%periodicNotice == 0) + { + _log.Info( + "Leader can currently not perform its duties, reachability status: [{0}], member status: [{1}]", + _latestGossip.ReachabilityExcludingDownedObservers, + string.Join(", ", _latestGossip.Members + .Select(m => string.Format("${0} ${1} seen=${2}", + m.Address, + m.Status, + _latestGossip.SeenByNode(m.UniqueAddress))))); + } + } } } @@ -1687,7 +1714,7 @@ public void GossipStatusTo(UniqueAddress node, IActorRef destination) public bool ValidNodeForGossip(UniqueAddress node) { return !node.Equals(SelfUniqueAddress) && _latestGossip.HasMember(node) && - _latestGossip.Overview.Reachability.IsReachable(node); + _latestGossip.ReachabilityExcludingDownedObservers.IsReachable(node); } public void UpdateLatestGossip(Gossip newGossip) diff --git a/src/core/Akka.Cluster/ClusterEvent.cs b/src/core/Akka.Cluster/ClusterEvent.cs index 9bf2da3948f..b22a5784bc3 100644 --- a/src/core/Akka.Cluster/ClusterEvent.cs +++ b/src/core/Akka.Cluster/ClusterEvent.cs @@ -358,6 +358,11 @@ public override bool Equals(object obj) } } + public sealed class ClusterShuttingDown : IClusterDomainEvent + { + public static readonly IClusterDomainEvent Instance = new ClusterShuttingDown(); + } + //TODO: xml doc /// /// Marker interface to facilitate subscription of @@ -597,38 +602,43 @@ private static IEnumerable CollectMemberEvents(IEnumerable } } - internal static ImmutableList DiffLeader(Gossip oldGossip, Gossip newGossip) + internal static ImmutableList DiffLeader(Gossip oldGossip, Gossip newGossip, UniqueAddress selfUniqueAddress) { - var newLeader = newGossip.Leader; - if ((newLeader == null && oldGossip.Leader == null) || newLeader != null && newLeader.Equals(oldGossip.Leader)) return ImmutableList.Create(); - if (newLeader == null) return ImmutableList.Create(new LeaderChanged(null)); - return ImmutableList.Create(new LeaderChanged(newLeader.Address)); + var newLeader = newGossip.Leader(selfUniqueAddress); + if ((newLeader == null && oldGossip.Leader(selfUniqueAddress) == null) + || newLeader != null && newLeader.Equals(oldGossip.Leader(selfUniqueAddress))) + return ImmutableList.Create(); + + return ImmutableList.Create(newLeader == null + ? new LeaderChanged(null) + : new LeaderChanged(newLeader.Address)); } - internal static ImmutableHashSet DiffRolesLeader(Gossip oldGossip, Gossip newGossip) + internal static ImmutableHashSet DiffRolesLeader(Gossip oldGossip, Gossip newGossip, UniqueAddress selfUniqueAddress) { - return InternalDiffRolesLeader(oldGossip, newGossip).ToImmutableHashSet(); + return InternalDiffRolesLeader(oldGossip, newGossip, selfUniqueAddress).ToImmutableHashSet(); } - private static IEnumerable InternalDiffRolesLeader(Gossip oldGossip, Gossip newGossip) + private static IEnumerable InternalDiffRolesLeader(Gossip oldGossip, Gossip newGossip, UniqueAddress selfUniqueAddress) { foreach (var role in oldGossip.AllRoles.Union(newGossip.AllRoles)) { - var newLeader = newGossip.RoleLeader(role); - if(newLeader == null && oldGossip.RoleLeader(role) != null) + var newLeader = newGossip.RoleLeader(role, selfUniqueAddress); + if(newLeader == null && oldGossip.RoleLeader(role, selfUniqueAddress) != null) yield return new RoleLeaderChanged(role, null); - if(newLeader != null && !newLeader.Equals(oldGossip.RoleLeader(role))) + if(newLeader != null && !newLeader.Equals(oldGossip.RoleLeader(role, selfUniqueAddress))) yield return new RoleLeaderChanged(role, newLeader.Address); } } - internal static ImmutableList DiffSeen(Gossip oldGossip, Gossip newGossip) + internal static ImmutableList DiffSeen(Gossip oldGossip, Gossip newGossip, UniqueAddress selfUniqueAddres) { - if (newGossip.Equals(oldGossip)) return ImmutableList.Create(); + if (newGossip.Equals(oldGossip)) + return ImmutableList.Create(); - var newConvergence = newGossip.Convergence; + var newConvergence = newGossip.Convergence(selfUniqueAddres); var newSeenBy = newGossip.SeenBy; - if (newConvergence != oldGossip.Convergence || newSeenBy != oldGossip.SeenBy) + if (newConvergence != oldGossip.Convergence(selfUniqueAddres) || newSeenBy != oldGossip.SeenBy) return ImmutableList.Create(new SeenChanged(newConvergence, newSeenBy.Select(s => s.Address).ToImmutableHashSet())); return ImmutableList.Create(); @@ -648,10 +658,11 @@ internal static ImmutableList DiffReachability(Gossip oldGo sealed class ClusterDomainEventPublisher : UntypedActor { Gossip _latestGossip; - + private readonly UniqueAddress _selfUniqueAddress = Cluster.Get(Context.System).SelfUniqueAddress; + public ClusterDomainEventPublisher() { - _latestGossip = Gossip.Empty; + _latestGossip = Gossip.Empty; _eventStream = Context.System.EventStream; } @@ -663,6 +674,7 @@ protected override void PreRestart(Exception reason, object message) protected override void PostStop() { // publish the final removed state before shutting down + Publish(ClusterEvent.ClusterShuttingDown.Instance); PublishChanges(Gossip.Empty); } @@ -712,15 +724,19 @@ protected override void OnReceive(object message) /// private void SendCurrentClusterState(IActorRef receiver) { + var unreachable = _latestGossip.Overview.Reachability.AllUnreachableOrTerminated + .Where(node => node != _selfUniqueAddress) + .Select(_latestGossip.GetMember) + .ToImmutableHashSet(); + var state = new ClusterEvent.CurrentClusterState( _latestGossip.Members, - _latestGossip.Overview.Reachability.AllUnreachableOrTerminated.Select(_latestGossip.GetMember) - .ToImmutableHashSet(), + unreachable, _latestGossip.SeenBy.Select(s => s.Address).ToImmutableHashSet(), - _latestGossip.Leader == null ? null : _latestGossip.Leader.Address, + _latestGossip.Leader(_selfUniqueAddress) == null ? null : _latestGossip.Leader(_selfUniqueAddress).Address, _latestGossip.AllRoles.ToImmutableDictionary(r => r, r => { - var leader = _latestGossip.RoleLeader(r); + var leader = _latestGossip.RoleLeader(r, _selfUniqueAddress); return leader == null ? null : leader.Address; })); receiver.Tell(state); @@ -766,10 +782,10 @@ private void PublishDiff(Gossip oldGossip, Gossip newGossip, Action pub) foreach (var @event in ClusterEvent.DiffMemberEvents(oldGossip, newGossip)) pub(@event); foreach (var @event in ClusterEvent.DiffUnreachable(oldGossip, newGossip)) pub(@event); foreach (var @event in ClusterEvent.DiffReachable(oldGossip, newGossip)) pub(@event); - foreach (var @event in ClusterEvent.DiffLeader(oldGossip, newGossip)) pub(@event); - foreach (var @event in ClusterEvent.DiffRolesLeader(oldGossip, newGossip)) pub(@event); + foreach (var @event in ClusterEvent.DiffLeader(oldGossip, newGossip, _selfUniqueAddress)) pub(@event); + foreach (var @event in ClusterEvent.DiffRolesLeader(oldGossip, newGossip, _selfUniqueAddress)) pub(@event); // publish internal SeenState for testing purposes - foreach (var @event in ClusterEvent.DiffSeen(oldGossip, newGossip)) pub(@event); + foreach (var @event in ClusterEvent.DiffSeen(oldGossip, newGossip, _selfUniqueAddress)) pub(@event); foreach (var @event in ClusterEvent.DiffReachability(oldGossip, newGossip)) pub(@event); } diff --git a/src/core/Akka.Cluster/ClusterReadView.cs b/src/core/Akka.Cluster/ClusterReadView.cs index f4ce46df3ed..055d5a3c738 100644 --- a/src/core/Akka.Cluster/ClusterReadView.cs +++ b/src/core/Akka.Cluster/ClusterReadView.cs @@ -147,7 +147,8 @@ public EventBusListener(Cluster cluster, ClusterReadView readView) .With(changed => { readView._clusterMetrics = changed.NodeMetrics; - }); + }) + .With(_ => { }); }); Receive(state => diff --git a/src/core/Akka.Cluster/Gossip.cs b/src/core/Akka.Cluster/Gossip.cs index 8c6454960ec..8ec7109ac85 100644 --- a/src/core/Akka.Cluster/Gossip.cs +++ b/src/core/Akka.Cluster/Gossip.cs @@ -6,7 +6,6 @@ //----------------------------------------------------------------------- using System; -using System.Collections.Generic; using System.Collections.Immutable; using System.Linq; using Akka.Remote; @@ -71,10 +70,12 @@ public static Gossip Create(ImmutableSortedSet members) readonly ImmutableSortedSet _members; readonly GossipOverview _overview; readonly VectorClock _version; + private readonly Lazy _reachability; public ImmutableSortedSet Members { get { return _members; } } public GossipOverview Overview { get { return _overview; } } public VectorClock Version { get { return _version; } } + public Reachability ReachabilityExcludingDownedObservers { get { return _reachability.Value; } } public Gossip(ImmutableSortedSet members) : this(members, new GossipOverview(), VectorClock.Create() ) {} @@ -89,6 +90,15 @@ public Gossip(ImmutableSortedSet members, GossipOverview overview, Vecto _membersMap = new Lazy>( () => members.ToImmutableDictionary(m => m.UniqueAddress, m => m)); + _reachability = new Lazy(() => + { + var downed = _members + .Where(m => m.Status == MemberStatus.Down) + .Select(m=>m.UniqueAddress); + + return overview.Reachability.Remove(downed); + }); + if (Cluster.IsAssertInvariantsEnabled) AssertInvariants(); } @@ -204,39 +214,53 @@ public Gossip Merge(Gossip that) // status is in the seen table and has the latest vector clock // version /// - public bool Convergence + public bool Convergence(UniqueAddress selfUniqueAddress) { - get - { - var unreachable = _overview.Reachability.AllUnreachableOrTerminated.Select(GetMember); - return unreachable.All(m => ConvergenceSkipUnreachableWithMemberStatus.Contains(m.Status)) && - !_members.Any(m => Gossip.ConvergenceMemberStatus.Contains(m.Status) && !SeenByNode(m.UniqueAddress)); - } + var unreachable = _overview.Reachability.AllUnreachableOrTerminated + .Where(node => node != selfUniqueAddress) + .Select(GetMember); + + var convergedUnreachable = unreachable + .All(m => ConvergenceSkipUnreachableWithMemberStatus.Contains(m.Status)); + + var convergedSeen = + !_members.Any(m => ConvergenceMemberStatus.Contains(m.Status) && !SeenByNode(m.UniqueAddress)); + + return convergedUnreachable && convergedSeen; } - public bool IsLeader(UniqueAddress node) + public bool IsLeader(UniqueAddress node, UniqueAddress selfUniqueAddress) { - return Leader.Equals(node); + return Leader(selfUniqueAddress) == node && node != null; } - public UniqueAddress Leader + public UniqueAddress Leader(UniqueAddress selfUniqueAddress) { - get { return LeaderOf(_members); } + return LeaderOf(_members, selfUniqueAddress); } - public UniqueAddress RoleLeader(string role) + public UniqueAddress RoleLeader(string role, UniqueAddress selfUniqueAddress) { - return LeaderOf(_members.Where(m => m.HasRole(role))); + var roleMembers = _members + .Where(m => m.HasRole(role)) + .ToImmutableSortedSet(); + + return LeaderOf(roleMembers, selfUniqueAddress); } - private UniqueAddress LeaderOf(IEnumerable mbrs) + private UniqueAddress LeaderOf(ImmutableSortedSet mbrs, UniqueAddress selfUniqueAddress) { var reachableMembers = _overview.Reachability.IsAllReachable ? mbrs - : mbrs.Where(m => _overview.Reachability.IsReachable(m.UniqueAddress)); + : mbrs + .Where(m => _overview.Reachability.IsReachable(m.UniqueAddress) || m.UniqueAddress == selfUniqueAddress) + .ToImmutableSortedSet(); + if (!reachableMembers.Any()) return null; - var member = reachableMembers.FirstOrDefault(m => Gossip.LeaderMemberStatus.Contains(m.Status)) ?? + + var member = reachableMembers.FirstOrDefault(m => LeaderMemberStatus.Contains(m.Status)) ?? reachableMembers.Min(Member.LeaderStatusOrdering); + return member.UniqueAddress; } diff --git a/src/core/Akka.Remote/Endpoint.cs b/src/core/Akka.Remote/Endpoint.cs index ed36fc9ebbc..dc2c6b79ee9 100644 --- a/src/core/Akka.Remote/Endpoint.cs +++ b/src/core/Akka.Remote/Endpoint.cs @@ -824,7 +824,17 @@ protected override void PreStart() { if (_handle == null) { - Transport.Associate(RemoteAddress, _refuseUid).ContinueWith(x => new Handle(x.Result), + Transport + .Associate(RemoteAddress, _refuseUid) + .ContinueWith(handle => + { + if (handle.IsFaulted) + { + var inner = handle.Exception.Flatten().InnerException; + return (object)new Status.Failure(new InvalidAssociationException("Association failure", inner)); + } + return new Handle(handle.Result); + }, TaskContinuationOptions.ExecuteSynchronously & TaskContinuationOptions.AttachedToParent) .PipeTo(Self); } diff --git a/src/core/Akka.Remote/Transport/AkkaProtocolTransport.cs b/src/core/Akka.Remote/Transport/AkkaProtocolTransport.cs index 809c308e4df..4159d964072 100644 --- a/src/core/Akka.Remote/Transport/AkkaProtocolTransport.cs +++ b/src/core/Akka.Remote/Transport/AkkaProtocolTransport.cs @@ -118,8 +118,7 @@ public Task Associate(Address remoteAddress, int? refuseUid) manager.Tell(new AssociateUnderlyingRefuseUid(SchemeAugmenter.RemoveScheme(remoteAddress), statusPromise, refuseUid)); - return statusPromise.Task.ContinueWith(result => ((AkkaProtocolHandle) result.Result), - TaskContinuationOptions.AttachedToParent & TaskContinuationOptions.ExecuteSynchronously); + return statusPromise.Task.CastTask(); } #region Static properties From c0818f6266d66aceba1dba8156e1e16b05b0a399 Mon Sep 17 00:00:00 2001 From: Joshua Benjamin Date: Mon, 11 May 2015 19:23:25 -0700 Subject: [PATCH 28/66] fixed assembly info for csharp.. it wasn't getting generated anymore updated prerelease --- build.fsx | 36 ++++++++++++++++++------------------ src/SharedAssemblyInfo.cs | 14 +++----------- 2 files changed, 21 insertions(+), 29 deletions(-) diff --git a/build.fsx b/build.fsx index 0042f15dba8..1428c1cc88d 100644 --- a/build.fsx +++ b/build.fsx @@ -36,14 +36,14 @@ let parsedRelease = File.ReadLines "RELEASE_NOTES.md" |> ReleaseNotesHelper.parseReleaseNotes -//Fake.ReleaseNotesHelper.Parse assumes letters+int in PreRelease.TryParse. -//This means we cannot append the full date yyyMMddHHmmss to prerelease. -//See https://github.com/fsharp/FAKE/issues/522 -//TODO: When this has been fixed, switch to DateTime.UtcNow.ToString("yyyyMMddHHmmss") -let preReleaseVersion = parsedRelease.AssemblyVersion + "-" + (getBuildParamOrDefault "nugetprerelease" "pre") + DateTime.UtcNow.ToString("yyMMddHHmm") +let envBuildNumber = System.Environment.GetEnvironmentVariable("BUILD_NUMBER") +let buildNumber = if String.IsNullOrWhiteSpace(envBuildNumber) then "0" else envBuildNumber + +let version = parsedRelease.AssemblyVersion + "." + buildNumber + let isUnstableDocs = hasBuildParam "unstable" let isPreRelease = hasBuildParam "nugetprerelease" -let release = if isPreRelease then ReleaseNotesHelper.ReleaseNotes.New(parsedRelease.AssemblyVersion, preReleaseVersion, parsedRelease.Notes) else parsedRelease +let release = if isPreRelease then ReleaseNotesHelper.ReleaseNotes.New(version, version + " -beta", parsedRelease.Notes) else parsedRelease printfn "Assembly version: %s\nNuget version; %s\n" release.AssemblyVersion release.NugetVersion //-------------------------------------------------------------------------------- @@ -77,15 +77,21 @@ Target "Clean" <| fun _ -> // Generate AssemblyInfo files with the version for release notes open AssemblyInfoFile + Target "AssemblyInfo" <| fun _ -> + CreateCSharpAssemblyInfoWithConfig "src/SharedAssemblyInfo.cs" [ + Attribute.Company company + Attribute.Copyright copyright + Attribute.Trademark "" + Attribute.Version version + Attribute.FileVersion version ] <| AssemblyInfoFileConfig(false) + for file in !! "src/**/AssemblyInfo.fs" do let title = file |> Path.GetDirectoryName |> Path.GetDirectoryName |> Path.GetFileName - - let version = release.AssemblyVersion + ".0" CreateFSharpAssemblyInfo file [ Attribute.Title title @@ -98,12 +104,6 @@ Target "AssemblyInfo" <| fun _ -> Attribute.Version version Attribute.FileVersion version ] - CreateCSharpAssemblyInfoWithConfig "src/SharedAssemblyInfo.cs" [ - Attribute.Company company - Attribute.Copyright copyright - Attribute.Trademark "" - Attribute.Version version - Attribute.FileVersion version ] |> ignore //-------------------------------------------------------------------------------- // Build the solution @@ -281,8 +281,8 @@ module Nuget = match project with | "Akka" -> [] | "Akka.Cluster" -> ["Akka.Remote", release.NugetVersion] - | "Akka.Persistence.TestKit" -> ["Akka.Persistence", preReleaseVersion] - | "Akka.Persistence.FSharp" -> ["Akka.Persistence", preReleaseVersion] + | "Akka.Persistence.TestKit" -> ["Akka.Persistence", release.NugetVersion] + | "Akka.Persistence.FSharp" -> ["Akka.Persistence", release.NugetVersion] | di when (di.StartsWith("Akka.DI.") && not (di.EndsWith("Core"))) -> ["Akka.DI.Core", release.NugetVersion] | testkit when testkit.StartsWith("Akka.TestKit.") -> ["Akka.TestKit", release.NugetVersion] | _ -> ["Akka", release.NugetVersion] @@ -290,8 +290,8 @@ module Nuget = // used to add -pre suffix to pre-release packages let getProjectVersion project = match project with - | "Akka.Cluster" -> preReleaseVersion - | persistence when persistence.StartsWith("Akka.Persistence") -> preReleaseVersion + | "Akka.Cluster" -> release.NugetVersion + | persistence when persistence.StartsWith("Akka.Persistence") -> release.NugetVersion | _ -> release.NugetVersion open Nuget diff --git a/src/SharedAssemblyInfo.cs b/src/SharedAssemblyInfo.cs index 7f7e44b208a..64e91e4e908 100644 --- a/src/SharedAssemblyInfo.cs +++ b/src/SharedAssemblyInfo.cs @@ -1,16 +1,8 @@ -//----------------------------------------------------------------------- -// -// Copyright (C) 2009-2015 Typesafe Inc. -// Copyright (C) 2013-2015 Akka.NET project -// -//----------------------------------------------------------------------- - -// +// using System.Reflection; [assembly: AssemblyCompanyAttribute("Akka.NET Team")] [assembly: AssemblyCopyrightAttribute("Copyright © 2013-2015 Akka.NET Team")] [assembly: AssemblyTrademarkAttribute("")] -[assembly: AssemblyVersionAttribute("1.0.1.0")] -[assembly: AssemblyFileVersionAttribute("1.0.1.0")] - +[assembly: AssemblyVersionAttribute("1.0.2.0")] +[assembly: AssemblyFileVersionAttribute("1.0.2.0")] From c82857fedd868352b9276a1deefc6b9a80c4d91f Mon Sep 17 00:00:00 2001 From: Thomas Lazar Date: Tue, 12 May 2015 10:09:45 +0200 Subject: [PATCH 29/66] Changes to the BasicCastleWindsorUse project to use the new extension. --- .../Examples/BasicCastleWindsorUse/Program.cs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/contrib/dependencyInjection/Examples/BasicCastleWindsorUse/Program.cs b/src/contrib/dependencyInjection/Examples/BasicCastleWindsorUse/Program.cs index ba4bb6c2edb..95c84af07bf 100644 --- a/src/contrib/dependencyInjection/Examples/BasicCastleWindsorUse/Program.cs +++ b/src/contrib/dependencyInjection/Examples/BasicCastleWindsorUse/Program.cs @@ -12,6 +12,7 @@ using Castle.Windsor; using System; using System.Threading.Tasks; +using Akka.DI.Core; namespace BasicCastleWindsorUse { @@ -33,7 +34,7 @@ private static void WithHashPool() var propsResolver = new WindsorDependencyResolver(container, system); - var router = system.ActorOf(propsResolver.Create().WithRouter(FromConfig.Instance), "router1"); + var router = system.ActorOf(system.DI().Props().WithRouter(FromConfig.Instance), "router1"); Task.Delay(500).Wait(); Console.WriteLine("Sending Messages"); From 275d2f56bebcd9762d8da2be9663ada0bb724927 Mon Sep 17 00:00:00 2001 From: David Flerlage Date: Tue, 12 May 2015 16:33:33 -0700 Subject: [PATCH 30/66] PostgreSQL persistence plugin for both event journal and snapshot store. See issue #944 -Largely ported from Akka.Persistence.SqlServer plugin -Event journal and snapshot store specs passing -Added separate build task for PostgreSql tests -Added initial draft of readme and nuspec file --- build.fsx | 11 +- src/Akka.sln | 22 +++ .../Akka.Persistence.PostgreSql.Tests.csproj | 106 +++++++++++ .../DbUtils.cs | 80 +++++++++ .../PostgreSqlJournalSpec.cs | 49 ++++++ .../PostgreSqlSnapshotStoreSpec.cs | 49 ++++++ .../Properties/AssemblyInfo.cs | 36 ++++ .../app.config | 6 + .../packages.config | 4 + .../Akka.Persistence.PostgreSql.csproj | 101 +++++++++++ .../Akka.Persistence.PostgreSql.nuspec | 20 +++ .../Akka.Persistence.PostgreSql/Extension.cs | 116 ++++++++++++ .../InternalExtensions.cs | 15 ++ .../Journal/PostgreSqlJournal.cs | 96 ++++++++++ .../Journal/QueryBuilder.cs | 140 +++++++++++++++ .../Journal/QueryMapper.cs | 39 ++++ .../PostgreSqlInitializer.cs | 99 +++++++++++ .../Properties/AssemblyInfo.cs | 36 ++++ .../Akka.Persistence.PostgreSql/README.md | 88 ++++++++++ .../Snapshot/PostgreSqlSnapshotStore.cs | 44 +++++ .../Snapshot/QueryBuilder.cs | 166 ++++++++++++++++++ .../Snapshot/QueryMapper.cs | 44 +++++ .../packages.config | 4 + .../postgresql.conf | 54 ++++++ 24 files changed, 1424 insertions(+), 1 deletion(-) create mode 100644 src/contrib/persistence/Akka.Persistence.PostgreSql.Tests/Akka.Persistence.PostgreSql.Tests.csproj create mode 100644 src/contrib/persistence/Akka.Persistence.PostgreSql.Tests/DbUtils.cs create mode 100644 src/contrib/persistence/Akka.Persistence.PostgreSql.Tests/PostgreSqlJournalSpec.cs create mode 100644 src/contrib/persistence/Akka.Persistence.PostgreSql.Tests/PostgreSqlSnapshotStoreSpec.cs create mode 100644 src/contrib/persistence/Akka.Persistence.PostgreSql.Tests/Properties/AssemblyInfo.cs create mode 100644 src/contrib/persistence/Akka.Persistence.PostgreSql.Tests/app.config create mode 100644 src/contrib/persistence/Akka.Persistence.PostgreSql.Tests/packages.config create mode 100644 src/contrib/persistence/Akka.Persistence.PostgreSql/Akka.Persistence.PostgreSql.csproj create mode 100644 src/contrib/persistence/Akka.Persistence.PostgreSql/Akka.Persistence.PostgreSql.nuspec create mode 100644 src/contrib/persistence/Akka.Persistence.PostgreSql/Extension.cs create mode 100644 src/contrib/persistence/Akka.Persistence.PostgreSql/InternalExtensions.cs create mode 100644 src/contrib/persistence/Akka.Persistence.PostgreSql/Journal/PostgreSqlJournal.cs create mode 100644 src/contrib/persistence/Akka.Persistence.PostgreSql/Journal/QueryBuilder.cs create mode 100644 src/contrib/persistence/Akka.Persistence.PostgreSql/Journal/QueryMapper.cs create mode 100644 src/contrib/persistence/Akka.Persistence.PostgreSql/PostgreSqlInitializer.cs create mode 100644 src/contrib/persistence/Akka.Persistence.PostgreSql/Properties/AssemblyInfo.cs create mode 100644 src/contrib/persistence/Akka.Persistence.PostgreSql/README.md create mode 100644 src/contrib/persistence/Akka.Persistence.PostgreSql/Snapshot/PostgreSqlSnapshotStore.cs create mode 100644 src/contrib/persistence/Akka.Persistence.PostgreSql/Snapshot/QueryBuilder.cs create mode 100644 src/contrib/persistence/Akka.Persistence.PostgreSql/Snapshot/QueryMapper.cs create mode 100644 src/contrib/persistence/Akka.Persistence.PostgreSql/packages.config create mode 100644 src/contrib/persistence/Akka.Persistence.PostgreSql/postgresql.conf diff --git a/build.fsx b/build.fsx index 1428c1cc88d..d8e3d65acae 100644 --- a/build.fsx +++ b/build.fsx @@ -217,7 +217,8 @@ Target "RunTests" <| fun _ -> let xunitTestAssemblies = !! "src/**/bin/Release/*.Tests.dll" -- "src/**/bin/Release/Akka.TestKit.VsTest.Tests.dll" -- "src/**/bin/Release/Akka.TestKit.NUnit.Tests.dll" -- - "src/**/bin/Release/Akka.Persistence.SqlServer.Tests.dll" + "src/**/bin/Release/Akka.Persistence.SqlServer.Tests.dll" -- + "src/**/bin/Release/Akka.Persistence.PostgreSql.Tests.dll" mkdir testOutput @@ -271,6 +272,14 @@ Target "RunSqlServerTests" <| fun _ -> (fun p -> { p with OutputDir = testOutput; ToolPath = xunitToolPath }) sqlServerTests +Target "RunPostgreSqlTests" <| fun _ -> + let postgreSqlTests = !! "src/**/bin/Release/Akka.Persistence.PostgreSql.Tests.dll" + let xunitToolPath = findToolInSubPath "xunit.console.exe" "src/packages/xunit.runner.console*/tools" + printfn "Using XUnit runner: %s" xunitToolPath + xUnit2 + (fun p -> { p with OutputDir = testOutput; ToolPath = xunitToolPath }) + postgreSqlTests + //-------------------------------------------------------------------------------- // Nuget targets //-------------------------------------------------------------------------------- diff --git a/src/Akka.sln b/src/Akka.sln index bc127249502..334107d1972 100644 --- a/src/Akka.sln +++ b/src/Akka.sln @@ -200,6 +200,10 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Akka.MultiNodeTests", "core EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Akka.Persistence.Sql.Common", "contrib\persistence\Akka.Persistence.Sql.Common\Akka.Persistence.Sql.Common.csproj", "{3B9E6211-9488-4DB5-B714-24248693B38F}" EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Akka.Persistence.PostgreSql", "contrib\persistence\Akka.Persistence.PostgreSql\Akka.Persistence.PostgreSql.csproj", "{4B89227B-5AD1-4061-816F-570067C3727F}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Akka.Persistence.PostgreSql.Tests", "contrib\persistence\Akka.Persistence.PostgreSql.Tests\Akka.Persistence.PostgreSql.Tests.csproj", "{2D1812FD-70C0-43EE-9C25-3980E41F30E1}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug Mono|Any CPU = Debug Mono|Any CPU @@ -733,6 +737,22 @@ Global {3B9E6211-9488-4DB5-B714-24248693B38F}.Release Mono|Any CPU.Build.0 = Release|Any CPU {3B9E6211-9488-4DB5-B714-24248693B38F}.Release|Any CPU.ActiveCfg = Release|Any CPU {3B9E6211-9488-4DB5-B714-24248693B38F}.Release|Any CPU.Build.0 = Release|Any CPU + {4B89227B-5AD1-4061-816F-570067C3727F}.Debug Mono|Any CPU.ActiveCfg = Debug|Any CPU + {4B89227B-5AD1-4061-816F-570067C3727F}.Debug Mono|Any CPU.Build.0 = Debug|Any CPU + {4B89227B-5AD1-4061-816F-570067C3727F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {4B89227B-5AD1-4061-816F-570067C3727F}.Debug|Any CPU.Build.0 = Debug|Any CPU + {4B89227B-5AD1-4061-816F-570067C3727F}.Release Mono|Any CPU.ActiveCfg = Release|Any CPU + {4B89227B-5AD1-4061-816F-570067C3727F}.Release Mono|Any CPU.Build.0 = Release|Any CPU + {4B89227B-5AD1-4061-816F-570067C3727F}.Release|Any CPU.ActiveCfg = Release|Any CPU + {4B89227B-5AD1-4061-816F-570067C3727F}.Release|Any CPU.Build.0 = Release|Any CPU + {2D1812FD-70C0-43EE-9C25-3980E41F30E1}.Debug Mono|Any CPU.ActiveCfg = Debug|Any CPU + {2D1812FD-70C0-43EE-9C25-3980E41F30E1}.Debug Mono|Any CPU.Build.0 = Debug|Any CPU + {2D1812FD-70C0-43EE-9C25-3980E41F30E1}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {2D1812FD-70C0-43EE-9C25-3980E41F30E1}.Debug|Any CPU.Build.0 = Debug|Any CPU + {2D1812FD-70C0-43EE-9C25-3980E41F30E1}.Release Mono|Any CPU.ActiveCfg = Release|Any CPU + {2D1812FD-70C0-43EE-9C25-3980E41F30E1}.Release Mono|Any CPU.Build.0 = Release|Any CPU + {2D1812FD-70C0-43EE-9C25-3980E41F30E1}.Release|Any CPU.ActiveCfg = Release|Any CPU + {2D1812FD-70C0-43EE-9C25-3980E41F30E1}.Release|Any CPU.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE @@ -822,5 +842,7 @@ Global {7DBD5C17-5E9D-40C4-9201-D092751532A7} = {7625FD95-4B2C-4A5B-BDD5-94B1493FAC8E} {F0781BEA-5BA0-4AF0-BB15-E3F209B681F5} = {01167D3C-49C4-4CDE-9787-C176D139ACDD} {3B9E6211-9488-4DB5-B714-24248693B38F} = {264C22A4-CAFC-41F6-B82C-4DDC5C196767} + {4B89227B-5AD1-4061-816F-570067C3727F} = {264C22A4-CAFC-41F6-B82C-4DDC5C196767} + {2D1812FD-70C0-43EE-9C25-3980E41F30E1} = {264C22A4-CAFC-41F6-B82C-4DDC5C196767} EndGlobalSection EndGlobal diff --git a/src/contrib/persistence/Akka.Persistence.PostgreSql.Tests/Akka.Persistence.PostgreSql.Tests.csproj b/src/contrib/persistence/Akka.Persistence.PostgreSql.Tests/Akka.Persistence.PostgreSql.Tests.csproj new file mode 100644 index 00000000000..7acb38a43b3 --- /dev/null +++ b/src/contrib/persistence/Akka.Persistence.PostgreSql.Tests/Akka.Persistence.PostgreSql.Tests.csproj @@ -0,0 +1,106 @@ + + + + + Debug + AnyCPU + {2D1812FD-70C0-43EE-9C25-3980E41F30E1} + Library + Properties + Akka.Persistence.PostgreSql.Tests + Akka.Persistence.PostgreSql.Tests + v4.5 + 512 + ..\ + true + + + + true + full + false + bin\Debug\ + DEBUG;TRACE + prompt + 4 + + + pdbonly + true + bin\Release\ + TRACE + prompt + 4 + + + + ..\..\..\packages\Npgsql.2.2.5\lib\net45\Mono.Security.dll + + + ..\..\..\packages\Npgsql.2.2.5\lib\net45\Npgsql.dll + + + + + + + + + + + + + + + + + + + + + + + {7dbd5c17-5e9d-40c4-9201-d092751532a7} + Akka.TestKit.Xunit2 + + + {4b89227b-5ad1-4061-816f-570067c3727f} + Akka.Persistence.PostgreSql + + + {ad9418b6-c452-4169-94fb-d43de0bfa966} + Akka.Persistence.TestKit + + + {fca84dea-c118-424b-9eb8-34375dfef18a} + Akka.Persistence + + + {0d3cbad0-bbdb-43e5-afc4-ed1d3ecdc224} + Akka.TestKit + + + {5deddf90-37f0-48d3-a0b0-a5cbd8a7e377} + Akka + + + {3b9e6211-9488-4db5-b714-24248693b38f} + Akka.Persistence.Sql.Common + + + + + + + This project references NuGet package(s) that are missing on this computer. Enable NuGet Package Restore to download them. For more information, see http://go.microsoft.com/fwlink/?LinkID=322105. The missing file is {0}. + + + + + \ No newline at end of file diff --git a/src/contrib/persistence/Akka.Persistence.PostgreSql.Tests/DbUtils.cs b/src/contrib/persistence/Akka.Persistence.PostgreSql.Tests/DbUtils.cs new file mode 100644 index 00000000000..b9fbc9c43b0 --- /dev/null +++ b/src/contrib/persistence/Akka.Persistence.PostgreSql.Tests/DbUtils.cs @@ -0,0 +1,80 @@ +using System; +using System.Configuration; +using System.Data.SqlClient; +using Akka.Dispatch.SysMsg; +using Npgsql; + +namespace Akka.Persistence.PostgreSql.Tests +{ + public static class DbUtils + { + public static void Initialize() + { + var connectionString = ConfigurationManager.ConnectionStrings["TestDb"].ConnectionString; + var connectionBuilder = new NpgsqlConnectionStringBuilder(connectionString); + + //connect to postgres database to create a new database + var databaseName = connectionBuilder.Database; + connectionBuilder.Database = "postgres"; + connectionString = connectionBuilder.ToString(); + + using (var conn = new NpgsqlConnection(connectionString)) + { + conn.Open(); + + bool dbExists; + using (var cmd = new NpgsqlCommand()) + { + cmd.CommandText = string.Format(@"SELECT TRUE FROM pg_database WHERE datname='{0}'", databaseName); + cmd.Connection = conn; + + var result = cmd.ExecuteScalar(); + dbExists = result != null && Convert.ToBoolean(result); + } + + if (dbExists) + { + DoClean(conn); + } + else + { + DoCreate(conn, databaseName); + } + } + } + + public static void Clean() + { + var connectionString = ConfigurationManager.ConnectionStrings["TestDb"].ConnectionString; + + using (var conn = new NpgsqlConnection(connectionString)) + { + conn.Open(); + + DoClean(conn); + } + } + + private static void DoCreate(NpgsqlConnection conn, string databaseName) + { + using (var cmd = new NpgsqlCommand()) + { + cmd.CommandText = string.Format(@"CREATE DATABASE {0}", databaseName); + cmd.Connection = conn; + cmd.ExecuteNonQuery(); + } + } + + private static void DoClean(NpgsqlConnection conn) + { + using (var cmd = new NpgsqlCommand()) + { + cmd.CommandText = @" + DROP TABLE IF EXISTS public.event_journal; + DROP TABLE IF EXISTS public.snapshot_store"; + cmd.Connection = conn; + cmd.ExecuteNonQuery(); + } + } + } +} \ No newline at end of file diff --git a/src/contrib/persistence/Akka.Persistence.PostgreSql.Tests/PostgreSqlJournalSpec.cs b/src/contrib/persistence/Akka.Persistence.PostgreSql.Tests/PostgreSqlJournalSpec.cs new file mode 100644 index 00000000000..6c9ecd27e92 --- /dev/null +++ b/src/contrib/persistence/Akka.Persistence.PostgreSql.Tests/PostgreSqlJournalSpec.cs @@ -0,0 +1,49 @@ +using System.Configuration; +using Akka.Configuration; +using Akka.Persistence.TestKit.Journal; + +namespace Akka.Persistence.PostgreSql.Tests +{ + public class PostgreSqlJournalSpec : JournalSpec + { + private static readonly Config SpecConfig; + + static PostgreSqlJournalSpec() + { + var connectionString = ConfigurationManager.ConnectionStrings["TestDb"].ConnectionString; + + var config = @" + akka.persistence { + publish-plugin-commands = on + journal { + plugin = ""akka.persistence.journal.postgresql"" + postgresql { + class = ""Akka.Persistence.PostgreSql.Journal.PostgreSqlJournal, Akka.Persistence.PostgreSql"" + plugin-dispatcher = ""akka.actor.default-dispatcher"" + table-name = event_journal + schema-name = public + auto-initialize = on + connection-string = """ + connectionString + @""" + } + } + }"; + + SpecConfig = ConfigurationFactory.ParseString(config); + + //need to make sure db is created before the tests start + DbUtils.Initialize(); + } + + public PostgreSqlJournalSpec() + : base(SpecConfig, "PostgreSqlJournalSpec") + { + Initialize(); + } + + protected override void Dispose(bool disposing) + { + base.Dispose(disposing); + DbUtils.Clean(); + } + } +} \ No newline at end of file diff --git a/src/contrib/persistence/Akka.Persistence.PostgreSql.Tests/PostgreSqlSnapshotStoreSpec.cs b/src/contrib/persistence/Akka.Persistence.PostgreSql.Tests/PostgreSqlSnapshotStoreSpec.cs new file mode 100644 index 00000000000..1056459a04c --- /dev/null +++ b/src/contrib/persistence/Akka.Persistence.PostgreSql.Tests/PostgreSqlSnapshotStoreSpec.cs @@ -0,0 +1,49 @@ +using System.Configuration; +using Akka.Configuration; +using Akka.Persistence.TestKit.Snapshot; + +namespace Akka.Persistence.PostgreSql.Tests +{ + public class PostgreSqlSnapshotStoreSpec : SnapshotStoreSpec + { + private static readonly Config SpecConfig; + + static PostgreSqlSnapshotStoreSpec() + { + var connectionString = ConfigurationManager.ConnectionStrings["TestDb"].ConnectionString; + + var config = @" + akka.persistence { + publish-plugin-commands = on + snapshot-store { + plugin = ""akka.persistence.snapshot-store.postgresql"" + postgresql { + class = ""Akka.Persistence.PostgreSql.Snapshot.PostgreSqlSnapshotStore, Akka.Persistence.PostgreSql"" + plugin-dispatcher = ""akka.actor.default-dispatcher"" + table-name = snapshot_store + schema-name = public + auto-initialize = on + connection-string = """ + connectionString + @""" + } + } + }"; + + SpecConfig = ConfigurationFactory.ParseString(config); + + //need to make sure db is created before the tests start + DbUtils.Initialize(); + } + + public PostgreSqlSnapshotStoreSpec() + : base(SpecConfig, "PostgreSqlSnapshotStoreSpec") + { + Initialize(); + } + + protected override void Dispose(bool disposing) + { + base.Dispose(disposing); + DbUtils.Clean(); + } + } +} \ No newline at end of file diff --git a/src/contrib/persistence/Akka.Persistence.PostgreSql.Tests/Properties/AssemblyInfo.cs b/src/contrib/persistence/Akka.Persistence.PostgreSql.Tests/Properties/AssemblyInfo.cs new file mode 100644 index 00000000000..1970f490550 --- /dev/null +++ b/src/contrib/persistence/Akka.Persistence.PostgreSql.Tests/Properties/AssemblyInfo.cs @@ -0,0 +1,36 @@ +using System.Reflection; +using System.Runtime.CompilerServices; +using System.Runtime.InteropServices; + +// General Information about an assembly is controlled through the following +// set of attributes. Change these attribute values to modify the information +// associated with an assembly. +[assembly: AssemblyTitle("Akka.Persistence.PostgreSql.Tests")] +[assembly: AssemblyDescription("")] +[assembly: AssemblyConfiguration("")] +[assembly: AssemblyCompany("")] +[assembly: AssemblyProduct("Akka.Persistence.PostgreSql.Tests")] +[assembly: AssemblyCopyright("Copyright © 2015")] +[assembly: AssemblyTrademark("")] +[assembly: AssemblyCulture("")] + +// Setting ComVisible to false makes the types in this assembly not visible +// to COM components. If you need to access a type in this assembly from +// COM, set the ComVisible attribute to true on that type. +[assembly: ComVisible(false)] + +// The following GUID is for the ID of the typelib if this project is exposed to COM +[assembly: Guid("8494fd8c-15ae-489e-83aa-1ac37b458964")] + +// Version information for an assembly consists of the following four values: +// +// Major Version +// Minor Version +// Build Number +// Revision +// +// You can specify all the values or you can default the Build and Revision Numbers +// by using the '*' as shown below: +// [assembly: AssemblyVersion("1.0.*")] +[assembly: AssemblyVersion("1.0.0.0")] +[assembly: AssemblyFileVersion("1.0.0.0")] diff --git a/src/contrib/persistence/Akka.Persistence.PostgreSql.Tests/app.config b/src/contrib/persistence/Akka.Persistence.PostgreSql.Tests/app.config new file mode 100644 index 00000000000..010fe0e91f2 --- /dev/null +++ b/src/contrib/persistence/Akka.Persistence.PostgreSql.Tests/app.config @@ -0,0 +1,6 @@ + + + + + + diff --git a/src/contrib/persistence/Akka.Persistence.PostgreSql.Tests/packages.config b/src/contrib/persistence/Akka.Persistence.PostgreSql.Tests/packages.config new file mode 100644 index 00000000000..9c8e7e8768b --- /dev/null +++ b/src/contrib/persistence/Akka.Persistence.PostgreSql.Tests/packages.config @@ -0,0 +1,4 @@ + + + + \ No newline at end of file diff --git a/src/contrib/persistence/Akka.Persistence.PostgreSql/Akka.Persistence.PostgreSql.csproj b/src/contrib/persistence/Akka.Persistence.PostgreSql/Akka.Persistence.PostgreSql.csproj new file mode 100644 index 00000000000..08e88a4798e --- /dev/null +++ b/src/contrib/persistence/Akka.Persistence.PostgreSql/Akka.Persistence.PostgreSql.csproj @@ -0,0 +1,101 @@ + + + + + Debug + AnyCPU + {4B89227B-5AD1-4061-816F-570067C3727F} + Library + Properties + Akka.Persistence.PostgreSql + Akka.Persistence.PostgreSql + v4.5 + 512 + ..\ + true + + + + true + full + false + bin\Debug\ + DEBUG;TRACE + prompt + 4 + + + pdbonly + true + bin\Release\ + TRACE + prompt + 4 + + + + ..\..\..\packages\Npgsql.2.2.5\lib\net45\Mono.Security.dll + + + ..\..\..\packages\Npgsql.2.2.5\lib\net45\Npgsql.dll + + + + + + + + + + + + + + + + + + + + + + + + Always + + + + + {fca84dea-c118-424b-9eb8-34375dfef18a} + Akka.Persistence + + + {5deddf90-37f0-48d3-a0b0-a5cbd8a7e377} + Akka + + + {3b9e6211-9488-4db5-b714-24248693b38f} + Akka.Persistence.Sql.Common + + + + + + + + + + + + This project references NuGet package(s) that are missing on this computer. Enable NuGet Package Restore to download them. For more information, see http://go.microsoft.com/fwlink/?LinkID=322105. The missing file is {0}. + + + + + \ No newline at end of file diff --git a/src/contrib/persistence/Akka.Persistence.PostgreSql/Akka.Persistence.PostgreSql.nuspec b/src/contrib/persistence/Akka.Persistence.PostgreSql/Akka.Persistence.PostgreSql.nuspec new file mode 100644 index 00000000000..d85c4b4a521 --- /dev/null +++ b/src/contrib/persistence/Akka.Persistence.PostgreSql/Akka.Persistence.PostgreSql.nuspec @@ -0,0 +1,20 @@ + + + + @project@ + @project@@title@ + @build.number@ + @authors@ + @authors@ + Akka.NET Persistence journal and snapshot store backed by PostgreSql. + https://github.com/akkadotnet/akka.net/blob/master/LICENSE + https://github.com/akkadotnet/akka.net + http://getakka.net/images/AkkaNetLogo.Normal.png + false + @releaseNotes@ + @copyright@ + @tags@ persistence eventsource postgresql + @dependencies@ + @references@ + + diff --git a/src/contrib/persistence/Akka.Persistence.PostgreSql/Extension.cs b/src/contrib/persistence/Akka.Persistence.PostgreSql/Extension.cs new file mode 100644 index 00000000000..3f7bcbec513 --- /dev/null +++ b/src/contrib/persistence/Akka.Persistence.PostgreSql/Extension.cs @@ -0,0 +1,116 @@ +using System; +using Akka.Actor; +using Akka.Configuration; +using Akka.Persistence.Sql.Common; + +namespace Akka.Persistence.PostgreSql +{ + /// + /// Configuration settings representation targeting PostgreSql journal actor. + /// + public class PostgreSqlJournalSettings : JournalSettings + { + public const string JournalConfigPath = "akka.persistence.journal.postgresql"; + + /// + /// Flag determining in case of event journal table missing, it should be automatically initialized. + /// + public bool AutoInitialize { get; private set; } + + public PostgreSqlJournalSettings(Config config) + : base(config) + { + AutoInitialize = config.GetBoolean("auto-initialize"); + } + } + + /// + /// Configuration settings representation targeting PostgreSql snapshot store actor. + /// + public class PostgreSqlSnapshotStoreSettings : SnapshotStoreSettings + { + public const string SnapshotStoreConfigPath = "akka.persistence.snapshot-store.postgresql"; + + /// + /// Flag determining in case of snapshot store table missing, it should be automatically initialized. + /// + public bool AutoInitialize { get; private set; } + + public PostgreSqlSnapshotStoreSettings(Config config) + : base(config) + { + AutoInitialize = config.GetBoolean("auto-initialize"); + } + } + + /// + /// An actor system extension initializing support for PostgreSql persistence layer. + /// + public class PostgreSqlPersistenceExtension : IExtension + { + /// + /// Journal-related settings loaded from HOCON configuration. + /// + public readonly PostgreSqlJournalSettings JournalSettings; + + /// + /// Snapshot store related settings loaded from HOCON configuration. + /// + public readonly PostgreSqlSnapshotStoreSettings SnapshotStoreSettings; + + public PostgreSqlPersistenceExtension(ExtendedActorSystem system) + { + system.Settings.InjectTopLevelFallback(PostgreSqlPersistence.DefaultConfiguration()); + + JournalSettings = new PostgreSqlJournalSettings(system.Settings.Config.GetConfig(PostgreSqlJournalSettings.JournalConfigPath)); + SnapshotStoreSettings = new PostgreSqlSnapshotStoreSettings(system.Settings.Config.GetConfig(PostgreSqlSnapshotStoreSettings.SnapshotStoreConfigPath)); + + if (JournalSettings.AutoInitialize) + { + PostgreSqlInitializer.CreatePostgreSqlJournalTables(JournalSettings.ConnectionString, JournalSettings.SchemaName, JournalSettings.TableName); + } + + if (SnapshotStoreSettings.AutoInitialize) + { + PostgreSqlInitializer.CreatePostgreSqlSnapshotStoreTables(SnapshotStoreSettings.ConnectionString, SnapshotStoreSettings.SchemaName, SnapshotStoreSettings.TableName); + } + } + } + + /// + /// Singleton class used to setup PostgreSQL backend for akka persistence plugin. + /// + public class PostgreSqlPersistence : ExtensionIdProvider + { + public static readonly PostgreSqlPersistence Instance = new PostgreSqlPersistence(); + + /// + /// Initializes a PostgreSQL persistence plugin inside provided . + /// + public static void Init(ActorSystem actorSystem) + { + Instance.Apply(actorSystem); + } + + private PostgreSqlPersistence() { } + + /// + /// Creates an actor system extension for akka persistence PostgreSQL support. + /// + /// + /// + public override PostgreSqlPersistenceExtension CreateExtension(ExtendedActorSystem system) + { + return new PostgreSqlPersistenceExtension(system); + } + + /// + /// Returns a default configuration for akka persistence PostgreSQL-based journals and snapshot stores. + /// + /// + public static Config DefaultConfiguration() + { + return ConfigurationFactory.FromResource("Akka.Persistence.PostgreSql.postgresql.conf"); + } + } +} \ No newline at end of file diff --git a/src/contrib/persistence/Akka.Persistence.PostgreSql/InternalExtensions.cs b/src/contrib/persistence/Akka.Persistence.PostgreSql/InternalExtensions.cs new file mode 100644 index 00000000000..cec1132ebb5 --- /dev/null +++ b/src/contrib/persistence/Akka.Persistence.PostgreSql/InternalExtensions.cs @@ -0,0 +1,15 @@ +using System; +using System.Data.SqlClient; +using Npgsql; + +namespace Akka.Persistence.PostgreSql +{ + internal static class InternalExtensions + { + public static string QuoteSchemaAndTable(this string sqlQuery, string schemaName, string tableName) + { + var cb = new NpgsqlCommandBuilder(); + return string.Format(sqlQuery, cb.QuoteIdentifier(schemaName), cb.QuoteIdentifier(tableName)); + } + } +} \ No newline at end of file diff --git a/src/contrib/persistence/Akka.Persistence.PostgreSql/Journal/PostgreSqlJournal.cs b/src/contrib/persistence/Akka.Persistence.PostgreSql/Journal/PostgreSqlJournal.cs new file mode 100644 index 00000000000..d942eafa2d6 --- /dev/null +++ b/src/contrib/persistence/Akka.Persistence.PostgreSql/Journal/PostgreSqlJournal.cs @@ -0,0 +1,96 @@ +using System; +using System.Collections.Generic; +using System.Data.Common; +using System.Data.SqlClient; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using Akka.Persistence.Journal; +using Npgsql; +using Akka.Persistence.Sql.Common.Journal; +using Akka.Persistence.Sql.Common; + +namespace Akka.Persistence.PostgreSql.Journal +{ + public class PostgreSqlJournalEngine : JournalDbEngine + { + public PostgreSqlJournalEngine(JournalSettings journalSettings, Akka.Serialization.Serialization serialization) + : base(journalSettings, serialization) + { + QueryBuilder = new PostgreSqlJournalQueryBuilder(journalSettings.TableName, journalSettings.SchemaName); + QueryMapper = new PostgreSqlJournalQueryMapper(serialization); + } + + protected override DbConnection CreateDbConnection() + { + return new NpgsqlConnection(Settings.ConnectionString); + } + + protected override void CopyParamsToCommand(DbCommand sqlCommand, JournalEntry entry) + { + sqlCommand.Parameters[":persistence_id"].Value = entry.PersistenceId; + sqlCommand.Parameters[":sequence_nr"].Value = entry.SequenceNr; + sqlCommand.Parameters[":is_deleted"].Value = entry.IsDeleted; + sqlCommand.Parameters[":payload_type"].Value = entry.PayloadType; + sqlCommand.Parameters[":payload"].Value = entry.Payload; + } + } + + /// + /// Persistent journal actor using PostgreSQL as persistence layer. It processes write requests + /// one by one in synchronous manner, while reading results asynchronously. + /// + public class PostgreSqlJournal : SyncWriteJournal + { + private readonly PostgreSqlPersistenceExtension _extension; + private PostgreSqlJournalEngine _engine; + + public PostgreSqlJournal() + { + _extension = PostgreSqlPersistence.Instance.Apply(Context.System); + } + + /// + /// Gets an engine instance responsible for handling all database-related journal requests. + /// + protected virtual JournalDbEngine Engine + { + get + { + return _engine ?? (_engine = new PostgreSqlJournalEngine(_extension.JournalSettings, Context.System.Serialization)); + } + } + + protected override void PreStart() + { + base.PreStart(); + Engine.Open(); + } + + protected override void PostStop() + { + base.PostStop(); + Engine.Close(); + } + + public override Task ReplayMessagesAsync(string persistenceId, long fromSequenceNr, long toSequenceNr, long max, Action replayCallback) + { + return Engine.ReplayMessagesAsync(persistenceId, fromSequenceNr, toSequenceNr, max, Context.Sender, replayCallback); + } + + public override Task ReadHighestSequenceNrAsync(string persistenceId, long fromSequenceNr) + { + return Engine.ReadHighestSequenceNrAsync(persistenceId, fromSequenceNr); + } + + public override void WriteMessages(IEnumerable messages) + { + Engine.WriteMessages(messages); + } + + public override void DeleteMessagesTo(string persistenceId, long toSequenceNr, bool isPermanent) + { + Engine.DeleteMessagesTo(persistenceId, toSequenceNr, isPermanent); + } + } +} \ No newline at end of file diff --git a/src/contrib/persistence/Akka.Persistence.PostgreSql/Journal/QueryBuilder.cs b/src/contrib/persistence/Akka.Persistence.PostgreSql/Journal/QueryBuilder.cs new file mode 100644 index 00000000000..0356f757f8e --- /dev/null +++ b/src/contrib/persistence/Akka.Persistence.PostgreSql/Journal/QueryBuilder.cs @@ -0,0 +1,140 @@ +using System.Data; +using System.Data.SqlClient; +using System.Text; +using Npgsql; +using NpgsqlTypes; +using Akka.Persistence.Sql.Common.Journal; +using System.Data.Common; + +namespace Akka.Persistence.PostgreSql.Journal +{ + internal class PostgreSqlJournalQueryBuilder : IJournalQueryBuilder + { + private readonly string _schemaName; + private readonly string _tableName; + + private readonly string _selectHighestSequenceNrSql; + private readonly string _insertMessagesSql; + + public PostgreSqlJournalQueryBuilder(string tableName, string schemaName) + { + _tableName = tableName; + _schemaName = schemaName; + + _insertMessagesSql = "INSERT INTO {0}.{1} (persistence_id, sequence_nr, is_deleted, payload_type, payload) VALUES (:persistence_id, :sequence_nr, :is_deleted, :payload_type, :payload)" + .QuoteSchemaAndTable(_schemaName, _tableName); + _selectHighestSequenceNrSql = @"SELECT MAX(sequence_nr) FROM {0}.{1} WHERE persistence_id = :persistence_id".QuoteSchemaAndTable(_schemaName, _tableName); + } + + public DbCommand SelectMessages(string persistenceId, long fromSequenceNr, long toSequenceNr, long max) + { + var sql = BuildSelectMessagesSql(fromSequenceNr, toSequenceNr, max); + var command = new NpgsqlCommand(sql) + { + Parameters = { PersistenceIdToSqlParam(persistenceId) } + }; + + return command; + } + + public DbCommand SelectHighestSequenceNr(string persistenceId) + { + var command = new NpgsqlCommand(_selectHighestSequenceNrSql) + { + Parameters = { PersistenceIdToSqlParam(persistenceId) } + }; + + return command; + } + + public DbCommand InsertBatchMessages(IPersistentRepresentation[] messages) + { + var command = new NpgsqlCommand(_insertMessagesSql); + command.Parameters.Add(":persistence_id", NpgsqlDbType.Varchar); + command.Parameters.Add(":sequence_nr", NpgsqlDbType.Bigint); + command.Parameters.Add(":is_deleted", NpgsqlDbType.Boolean); + command.Parameters.Add(":payload_type", NpgsqlDbType.Varchar); + command.Parameters.Add(":payload", NpgsqlDbType.Bytea); + + return command; + } + + public DbCommand DeleteBatchMessages(string persistenceId, long toSequenceNr, bool permanent) + { + var sql = BuildDeleteSql(toSequenceNr, permanent); + var command = new NpgsqlCommand(sql) + { + Parameters = { PersistenceIdToSqlParam(persistenceId) } + }; + + return command; + } + + private string BuildDeleteSql(long toSequenceNr, bool permanent) + { + var sqlBuilder = new StringBuilder(); + + if (permanent) + { + sqlBuilder.Append("DELETE FROM {0}.{1} ".QuoteSchemaAndTable(_schemaName, _tableName)); + } + else + { + sqlBuilder.Append("UPDATE {0}.{1} SET is_deleted = true ".QuoteSchemaAndTable(_schemaName, _tableName)); + } + + sqlBuilder.Append("WHERE persistence_id = :persistence_id"); + + if (toSequenceNr != long.MaxValue) + { + sqlBuilder.Append(" AND sequence_nr <= ").Append(toSequenceNr); + } + + var sql = sqlBuilder.ToString(); + return sql; + } + + private string BuildSelectMessagesSql(long fromSequenceNr, long toSequenceNr, long max) + { + var sqlBuilder = new StringBuilder(); + sqlBuilder.AppendFormat( + @"SELECT + persistence_id, + sequence_nr, + is_deleted, + payload_type, + payload ") + .Append(" FROM {0}.{1} WHERE persistence_id = :persistence_id".QuoteSchemaAndTable(_schemaName, _tableName)); + + // since we guarantee type of fromSequenceNr, toSequenceNr and max + // we can inline them without risk of SQL injection + + if (fromSequenceNr > 0) + { + if (toSequenceNr != long.MaxValue) + sqlBuilder.Append(" AND sequence_nr BETWEEN ") + .Append(fromSequenceNr) + .Append(" AND ") + .Append(toSequenceNr); + else + sqlBuilder.Append(" AND sequence_nr >= ").Append(fromSequenceNr); + } + + if (toSequenceNr != long.MaxValue) + sqlBuilder.Append(" AND sequence_nr <= ").Append(toSequenceNr); + + if (max != long.MaxValue) + { + sqlBuilder.AppendFormat(" LIMIT {0}", max); + } + + var sql = sqlBuilder.ToString(); + return sql; + } + + private static NpgsqlParameter PersistenceIdToSqlParam(string persistenceId, string paramName = null) + { + return new NpgsqlParameter(paramName ?? ":persistence_id", NpgsqlDbType.Varchar, persistenceId.Length) { Value = persistenceId }; + } + } +} \ No newline at end of file diff --git a/src/contrib/persistence/Akka.Persistence.PostgreSql/Journal/QueryMapper.cs b/src/contrib/persistence/Akka.Persistence.PostgreSql/Journal/QueryMapper.cs new file mode 100644 index 00000000000..44b65b9790e --- /dev/null +++ b/src/contrib/persistence/Akka.Persistence.PostgreSql/Journal/QueryMapper.cs @@ -0,0 +1,39 @@ +using System; +using System.Data.Common; +using System.Data.SqlClient; +using Npgsql; +using Akka.Persistence.Sql.Common.Journal; +using Akka.Actor; + +namespace Akka.Persistence.PostgreSql.Journal +{ + internal class PostgreSqlJournalQueryMapper : IJournalQueryMapper + { + private readonly Akka.Serialization.Serialization _serialization; + + public PostgreSqlJournalQueryMapper(Akka.Serialization.Serialization serialization) + { + _serialization = serialization; + } + + public IPersistentRepresentation Map(DbDataReader reader, IActorRef sender = null) + { + var persistenceId = reader.GetString(0); + var sequenceNr = reader.GetInt64(1); + var isDeleted = reader.GetBoolean(2); + var payload = GetPayload(reader); + + return new Persistent(payload, sequenceNr, persistenceId, isDeleted, sender); + } + + private object GetPayload(DbDataReader reader) + { + var payloadType = reader.GetString(3); + var type = Type.GetType(payloadType, true); + var binary = (byte[]) reader[4]; + + var serializer = _serialization.FindSerializerForType(type); + return serializer.FromBinary(binary, type); + } + } +} \ No newline at end of file diff --git a/src/contrib/persistence/Akka.Persistence.PostgreSql/PostgreSqlInitializer.cs b/src/contrib/persistence/Akka.Persistence.PostgreSql/PostgreSqlInitializer.cs new file mode 100644 index 00000000000..6fd32f5a428 --- /dev/null +++ b/src/contrib/persistence/Akka.Persistence.PostgreSql/PostgreSqlInitializer.cs @@ -0,0 +1,99 @@ +using System; +using System.Data.SqlClient; +using Npgsql; + +namespace Akka.Persistence.PostgreSql +{ + internal static class PostgreSqlInitializer + { + private const string SqlJournalFormat = @" + DO + $do$ + BEGIN + IF NOT EXISTS (SELECT 1 FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_SCHEMA = '{2}' AND TABLE_NAME = '{3}') THEN + CREATE TABLE {0}.{1} ( + persistence_id VARCHAR(200) NOT NULL, + sequence_nr BIGINT NOT NULL, + is_deleted BOOLEAN NOT NULL, + payload_type VARCHAR(500) NOT NULL, + payload BYTEA NOT NULL, + CONSTRAINT {3}_pk PRIMARY KEY (persistence_id, sequence_nr) + ); + CREATE INDEX {3}_sequence_nr_idx ON {0}.{1}(sequence_nr); + END IF; + END + $do$ + "; + + private const string SqlSnapshotStoreFormat = @" + DO + $do$ + BEGIN + IF NOT EXISTS (SELECT 1 FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_SCHEMA = '{2}' AND TABLE_NAME = '{3}') THEN + CREATE TABLE {0}.{1} ( + persistence_id VARCHAR(200) NOT NULL, + sequence_nr BIGINT NOT NULL, + created_at TIMESTAMP WITH TIME ZONE NOT NULL, + created_at_ticks SMALLINT NOT NULL CHECK(created_at_ticks >= 0 AND created_at_ticks < 10), + snapshot_type VARCHAR(500) NOT NULL, + snapshot BYTEA NOT NULL, + CONSTRAINT {3}_pk PRIMARY KEY (persistence_id, sequence_nr) + ); + CREATE INDEX {3}_sequence_nr_idx ON {0}.{1}(sequence_nr); + CREATE INDEX {3}_created_at_idx ON {0}.{1}(created_at); + END IF; + END + $do$ + "; + + /// + /// Initializes a PostgreSQL journal-related tables according to 'schema-name', 'table-name' + /// and 'connection-string' values provided in 'akka.persistence.journal.postgresql' config. + /// + internal static void CreatePostgreSqlJournalTables(string connectionString, string schemaName, string tableName) + { + var sql = InitJournalSql(tableName, schemaName); + ExecuteSql(connectionString, sql); + } + + /// + /// Initializes a PostgreSQL snapshot store related tables according to 'schema-name', 'table-name' + /// and 'connection-string' values provided in 'akka.persistence.snapshot-store.postgresql' config. + /// + internal static void CreatePostgreSqlSnapshotStoreTables(string connectionString, string schemaName, string tableName) + { + var sql = InitSnapshotStoreSql(tableName, schemaName); + ExecuteSql(connectionString, sql); + } + + private static string InitJournalSql(string tableName, string schemaName = null) + { + if (string.IsNullOrEmpty(tableName)) throw new ArgumentNullException("tableName", "Akka.Persistence.PostgreSql journal table name is required"); + schemaName = schemaName ?? "public"; + + var cb = new NpgsqlCommandBuilder(); + return string.Format(SqlJournalFormat, cb.QuoteIdentifier(schemaName), cb.QuoteIdentifier(tableName), cb.UnquoteIdentifier(schemaName), cb.UnquoteIdentifier(tableName)); + } + + private static string InitSnapshotStoreSql(string tableName, string schemaName = null) + { + if (string.IsNullOrEmpty(tableName)) throw new ArgumentNullException("tableName", "Akka.Persistence.PostgreSql snapshot store table name is required"); + schemaName = schemaName ?? "public"; + + var cb = new NpgsqlCommandBuilder(); + return string.Format(SqlSnapshotStoreFormat, cb.QuoteIdentifier(schemaName), cb.QuoteIdentifier(tableName), cb.UnquoteIdentifier(schemaName), cb.UnquoteIdentifier(tableName)); + } + + private static void ExecuteSql(string connectionString, string sql) + { + using (var conn = new NpgsqlConnection(connectionString)) + using (var command = conn.CreateCommand()) + { + conn.Open(); + + command.CommandText = sql; + command.ExecuteNonQuery(); + } + } + } +} \ No newline at end of file diff --git a/src/contrib/persistence/Akka.Persistence.PostgreSql/Properties/AssemblyInfo.cs b/src/contrib/persistence/Akka.Persistence.PostgreSql/Properties/AssemblyInfo.cs new file mode 100644 index 00000000000..72164213d82 --- /dev/null +++ b/src/contrib/persistence/Akka.Persistence.PostgreSql/Properties/AssemblyInfo.cs @@ -0,0 +1,36 @@ +using System.Reflection; +using System.Runtime.CompilerServices; +using System.Runtime.InteropServices; + +// General Information about an assembly is controlled through the following +// set of attributes. Change these attribute values to modify the information +// associated with an assembly. +[assembly: AssemblyTitle("Akka.Persistence.PostgreSql")] +[assembly: AssemblyDescription("")] +[assembly: AssemblyConfiguration("")] +[assembly: AssemblyCompany("")] +[assembly: AssemblyProduct("Akka.Persistence.PostgreSql")] +[assembly: AssemblyCopyright("Copyright © 2015")] +[assembly: AssemblyTrademark("")] +[assembly: AssemblyCulture("")] + +// Setting ComVisible to false makes the types in this assembly not visible +// to COM components. If you need to access a type in this assembly from +// COM, set the ComVisible attribute to true on that type. +[assembly: ComVisible(false)] + +// The following GUID is for the ID of the typelib if this project is exposed to COM +[assembly: Guid("3b21dbd6-ebb9-44cb-8dee-edbfb5bf0a00")] + +// Version information for an assembly consists of the following four values: +// +// Major Version +// Minor Version +// Build Number +// Revision +// +// You can specify all the values or you can default the Build and Revision Numbers +// by using the '*' as shown below: +// [assembly: AssemblyVersion("1.0.*")] +[assembly: AssemblyVersion("1.0.0.0")] +[assembly: AssemblyFileVersion("1.0.0.0")] diff --git a/src/contrib/persistence/Akka.Persistence.PostgreSql/README.md b/src/contrib/persistence/Akka.Persistence.PostgreSql/README.md new file mode 100644 index 00000000000..1cd6b2af191 --- /dev/null +++ b/src/contrib/persistence/Akka.Persistence.PostgreSql/README.md @@ -0,0 +1,88 @@ +## Akka.Persistence.PostgreSql + +Akka Persistence journal and snapshot store backed by PostgreSql database. + +**WARNING: Akka.Persistence.PostgreSql plugin is still in beta and it's mechanics described below may be still subject to change**. + +### Setup + +To activate the journal plugin, add the following lines to actor system configuration file: + +``` +akka.persistence.journal.plugin = "akka.persistence.journal.postgresql" +akka.persistence.journal.postgresql.connection-string = "" +``` + +Similar configuration may be used to setup a PostgreSql snapshot store: + +``` +akka.persistence.snasphot-store.plugin = "akka.persistence.snasphot-store.postgresql" +akka.persistence.snasphot-store.postgresql.connection-string = "" +``` + +Remember that connection string must be provided separately to Journal and Snapshot Store. To finish setup simply initialize plugin using: `PostgreSqlPersistence.Init(actorSystem);` + +### Configuration + +Both journal and snapshot store share the same configuration keys (however they resides in separate scopes, so they are definied distinctly for either journal or snapshot store): + +- `class` (string with fully qualified type name) - determines class to be used as a persistent journal. Default: *Akka.Persistence.PostgreSql.Journal.PostgreSqlJournal, Akka.Persistence.PostgreSql* (for journal) and *Akka.Persistence.PostgreSql.Snapshot.PostgreSqlSnapshotStore, Akka.Persistence.PostgreSql* (for snapshot store). +- `plugin-dispatcher` (string with configuration path) - describes a message dispatcher for persistent journal. Default: *akka.actor.default-dispatcher* +- `connection-string` - connection string used to access PostgreSql database. Default: *none*. +- `connection-timeout` - timespan determining default connection timeouts on database-related operations. Default: *30s* +- `schema-name` - name of the database schema, where journal or snapshot store tables should be placed. Default: *public* +- `table-name` - name of the table used by either journal or snapshot store. Default: *event_journal* (for journal) or *snapshot_store* (for snapshot store) +- `auto-initialize` - flag determining if journal or snapshot store related tables should by automatically created when they have not been found in connected database. Default: *false* + +### Custom SQL data queries + +PostgreSql persistence plugin defines a default table schema used for both journal and snapshot store. + +**EventJournal table**: + + +----------------+-------------+------------+---------------+---------+ + | persistence_id | sequence_nr | is_deleted | payload_type | payload | + +----------------+-------------+------------+---------------+---------+ + | varchar(200) | bigint | boolean | varchar(500) | bytea | + +----------------+-------------+------------+---------------+---------+ + +**SnapshotStore table**: + + +----------------+--------------+--------------------------+------------------+---------------+----------+ + | persistence_id | sequence_nr | created_at | created_at_ticks | snapshot_type | snapshot | + +----------------+--------------+--------------------------+------------------+--------------------------+ + | varchar(200) | bigint | timestamp with time zone | smallint | varchar(500) | bytea | + +----------------+--------------+--------------------------+------------------+--------------------------+ + +**created_at and created_at_ticks - The max precision of a PostgreSQL timestamp is 6. The max precision of a .Net DateTime object is 7. Because of this differences, the additional ticks are saved in a separate column and combined during deserialization. There is also a check constraint restricting created_at_ticks to the range [0,10) to ensure that there are no precision differences in the opposite direction.** + +Underneath Akka.Persistence.PostgreSql uses the Npgsql library to communicate with the database. You may choose not to use a dedicated built in ones, but to create your own being better fit for your use case. To do so, you have to create your own versions of `IJournalQueryBuilder` and `IJournalQueryMapper` (for custom journals) or `ISnapshotQueryBuilder` and `ISnapshotQueryMapper` (for custom snapshot store) and then attach inside journal, just like in the example below: + +```csharp +class MyCustomPostgreSqlJournal: Akka.Persistence.PostgreSql.Journal.PostgreSqlJournal +{ + public MyCustomPostgreSqlJournal() : base() + { + QueryBuilder = new MyCustomJournalQueryBuilder(); + QueryMapper = new MyCustomJournalQueryMapper(); + } +} +``` + +The final step is to setup your custom journal using akka config: + +``` +akka.persistence.journal.postgresql.class = "MyModule.MyCustomPostgreSqlJournal, MyModule" +``` + +### Tests + +The PostgreSql tests are packaged as a separate build task with a target of "RunPostgreSqlTests". + +In order to run the tests, you must do the following things: + +1. Download and install PostgreSql from: http://www.postgresql.org/download/ +2. Install PostgreSql with the default settings. The default connection string uses the following credentials: + 1. Username: postgres + 2. Password: postgres +3. A custom app.config file can be used and needs to be placed in the same folder as the dll \ No newline at end of file diff --git a/src/contrib/persistence/Akka.Persistence.PostgreSql/Snapshot/PostgreSqlSnapshotStore.cs b/src/contrib/persistence/Akka.Persistence.PostgreSql/Snapshot/PostgreSqlSnapshotStore.cs new file mode 100644 index 00000000000..acd38690a94 --- /dev/null +++ b/src/contrib/persistence/Akka.Persistence.PostgreSql/Snapshot/PostgreSqlSnapshotStore.cs @@ -0,0 +1,44 @@ +using System.Collections.Generic; +using System.Data.SqlClient; +using System.Threading; +using System.Threading.Tasks; +using Akka.Persistence.Snapshot; +using Npgsql; +using Akka.Persistence.Sql.Common.Snapshot; +using Akka.Persistence.Sql.Common; +using System; +using System.Data.Common; + +namespace Akka.Persistence.PostgreSql.Snapshot +{ + /// + /// Actor used for storing incoming snapshots into persistent snapshot store backed by PostgreSQL database. + /// + public class PostgreSqlSnapshotStore : DbSnapshotStore + { + private readonly PostgreSqlPersistenceExtension _extension; + private readonly PostgreSqlSnapshotStoreSettings _settings; + + public PostgreSqlSnapshotStore() + { + _extension = PostgreSqlPersistence.Instance.Apply(Context.System); + + _settings = _extension.SnapshotStoreSettings; + QueryBuilder = new PostgreSqlSnapshotQueryBuilder(_settings.SchemaName, _settings.TableName); + QueryMapper = new PostgreSqlSnapshotQueryMapper(Context.System.Serialization); + } + + protected override SnapshotStoreSettings Settings + { + get + { + return _settings; + } + } + + protected override DbConnection CreateDbConnection() + { + return new NpgsqlConnection(Settings.ConnectionString); + } + } +} \ No newline at end of file diff --git a/src/contrib/persistence/Akka.Persistence.PostgreSql/Snapshot/QueryBuilder.cs b/src/contrib/persistence/Akka.Persistence.PostgreSql/Snapshot/QueryBuilder.cs new file mode 100644 index 00000000000..6bcb126f00b --- /dev/null +++ b/src/contrib/persistence/Akka.Persistence.PostgreSql/Snapshot/QueryBuilder.cs @@ -0,0 +1,166 @@ +using System; +using System.Data; +using System.Data.SqlClient; +using System.Text; +using Npgsql; +using NpgsqlTypes; +using Akka.Persistence.Sql.Common.Snapshot; +using System.Data.Common; + +namespace Akka.Persistence.PostgreSql.Snapshot +{ + internal class PostgreSqlSnapshotQueryBuilder : ISnapshotQueryBuilder + { + private readonly string _deleteSql; + private readonly string _insertSql; + private readonly string _selectSql; + + public PostgreSqlSnapshotQueryBuilder(string schemaName, string tableName) + { + _deleteSql = @"DELETE FROM {0}.{1} WHERE persistence_id = :persistence_id ".QuoteSchemaAndTable(schemaName, tableName); + _insertSql = @"INSERT INTO {0}.{1} (persistence_id, sequence_nr, created_at, created_at_ticks, snapshot_type, snapshot) VALUES (:persistence_id, :sequence_nr, :created_at, :created_at_ticks, :snapshot_type, :snapshot)".QuoteSchemaAndTable(schemaName, tableName); + _selectSql = @"SELECT persistence_id, sequence_nr, created_at, created_at_ticks, snapshot_type, snapshot FROM {0}.{1} WHERE persistence_id = :persistence_id".QuoteSchemaAndTable(schemaName, tableName); + } + + public DbCommand DeleteOne(string persistenceId, long sequenceNr, DateTime timestamp) + { + var sqlCommand = new NpgsqlCommand(); + sqlCommand.Parameters.Add(new NpgsqlParameter(":persistence_id", NpgsqlDbType.Varchar, persistenceId.Length) + { + Value = persistenceId + }); + var sb = new StringBuilder(_deleteSql); + + if (sequenceNr < long.MaxValue && sequenceNr > 0) + { + sb.Append(@"AND sequence_nr = :sequence_nr "); + sqlCommand.Parameters.Add(new NpgsqlParameter(":sequence_nr", NpgsqlDbType.Bigint) {Value = sequenceNr}); + } + + if (timestamp > DateTime.MinValue && timestamp < DateTime.MaxValue) + { + sb.Append(@"AND created_at = :created_at AND created_at_ticks = :created_at_ticks"); + sqlCommand.Parameters.Add(new NpgsqlParameter(":created_at", NpgsqlDbType.Timestamp) + { + Value = GetMaxPrecisionTicks(timestamp) + }); + sqlCommand.Parameters.Add(new NpgsqlParameter(":created_at_ticks", NpgsqlDbType.Smallint) + { + Value = GetExtraTicks(timestamp) + }); + } + + sqlCommand.CommandText = sb.ToString(); + + return sqlCommand; + } + + public DbCommand DeleteMany(string persistenceId, long maxSequenceNr, DateTime maxTimestamp) + { + var sqlCommand = new NpgsqlCommand(); + sqlCommand.Parameters.Add(new NpgsqlParameter(":persistence_id", NpgsqlDbType.Varchar, persistenceId.Length) + { + Value = persistenceId + }); + var sb = new StringBuilder(_deleteSql); + + if (maxSequenceNr < long.MaxValue && maxSequenceNr > 0) + { + sb.Append(@" AND sequence_nr <= :sequence_nr "); + sqlCommand.Parameters.Add(new NpgsqlParameter(":sequence_nr", NpgsqlDbType.Bigint) + { + Value = maxSequenceNr + }); + } + + if (maxTimestamp > DateTime.MinValue && maxTimestamp < DateTime.MaxValue) + { + sb.Append( + @" AND (created_at < :created_at OR (created_at = :created_at AND created_at_ticks <= :created_at_ticks)) "); + sqlCommand.Parameters.Add(new NpgsqlParameter(":created_at", NpgsqlDbType.Timestamp) + { + Value = GetMaxPrecisionTicks(maxTimestamp) + }); + sqlCommand.Parameters.Add(new NpgsqlParameter(":created_at_ticks", NpgsqlDbType.Smallint) + { + Value = GetExtraTicks(maxTimestamp) + }); + } + + sqlCommand.CommandText = sb.ToString(); + + return sqlCommand; + } + + public DbCommand InsertSnapshot(SnapshotEntry entry) + { + var sqlCommand = new NpgsqlCommand(_insertSql) + { + Parameters = + { + new NpgsqlParameter(":persistence_id", NpgsqlDbType.Varchar, entry.PersistenceId.Length) { Value = entry.PersistenceId }, + new NpgsqlParameter(":sequence_nr", NpgsqlDbType.Bigint) { Value = entry.SequenceNr }, + new NpgsqlParameter(":created_at", NpgsqlDbType.Timestamp) { Value = GetMaxPrecisionTicks(entry.Timestamp) }, + new NpgsqlParameter(":created_at_ticks", NpgsqlDbType.Smallint) { Value = GetExtraTicks(entry.Timestamp) }, + new NpgsqlParameter(":snapshot_type", NpgsqlDbType.Varchar, entry.SnapshotType.Length) { Value = entry.SnapshotType }, + new NpgsqlParameter(":snapshot", NpgsqlDbType.Bytea, entry.Snapshot.Length) { Value = entry.Snapshot } + } + }; + + return sqlCommand; + } + + public DbCommand SelectSnapshot(string persistenceId, long maxSequenceNr, DateTime maxTimestamp) + { + var sqlCommand = new NpgsqlCommand(); + sqlCommand.Parameters.Add(new NpgsqlParameter(":persistence_id", NpgsqlDbType.Varchar, persistenceId.Length) + { + Value = persistenceId + }); + + var sb = new StringBuilder(_selectSql); + if (maxSequenceNr > 0 && maxSequenceNr < long.MaxValue) + { + sb.Append(" AND sequence_nr <= :sequence_nr "); + sqlCommand.Parameters.Add(new NpgsqlParameter(":sequence_nr", NpgsqlDbType.Bigint) + { + Value = maxSequenceNr + }); + } + + if (maxTimestamp > DateTime.MinValue && maxTimestamp < DateTime.MaxValue) + { + sb.Append( + @" AND (created_at < :created_at OR (created_at = :created_at AND created_at_ticks <= :created_at_ticks)) "); + sqlCommand.Parameters.Add(new NpgsqlParameter(":created_at", NpgsqlDbType.Timestamp) + { + Value = GetMaxPrecisionTicks(maxTimestamp) + }); + sqlCommand.Parameters.Add(new NpgsqlParameter(":created_at_ticks", NpgsqlDbType.Smallint) + { + Value = GetExtraTicks(maxTimestamp) + }); + } + + sb.Append(" ORDER BY sequence_nr DESC"); + sqlCommand.CommandText = sb.ToString(); + return sqlCommand; + } + + private static DateTime GetMaxPrecisionTicks(DateTime date) + { + var ticks = (date.Ticks / 10) * 10; + + ticks = date.Ticks - ticks; + + return date.AddTicks(-1 * ticks); + } + + private static short GetExtraTicks(DateTime date) + { + var ticks = date.Ticks; + + return (short)(ticks % 10); + } + } +} \ No newline at end of file diff --git a/src/contrib/persistence/Akka.Persistence.PostgreSql/Snapshot/QueryMapper.cs b/src/contrib/persistence/Akka.Persistence.PostgreSql/Snapshot/QueryMapper.cs new file mode 100644 index 00000000000..e50c8fdc264 --- /dev/null +++ b/src/contrib/persistence/Akka.Persistence.PostgreSql/Snapshot/QueryMapper.cs @@ -0,0 +1,44 @@ +using System; +using System.Data.Common; +using System.Data.SqlClient; +using Npgsql; +using Akka.Persistence.Sql.Common.Snapshot; + +namespace Akka.Persistence.PostgreSql.Snapshot +{ + internal class PostgreSqlSnapshotQueryMapper : ISnapshotQueryMapper + { + private readonly Akka.Serialization.Serialization _serialization; + + public PostgreSqlSnapshotQueryMapper(Akka.Serialization.Serialization serialization) + { + _serialization = serialization; + } + + public SelectedSnapshot Map(DbDataReader reader) + { + var persistenceId = reader.GetString(0); + var sequenceNr = reader.GetInt64(1); + + var timestamp = reader.GetDateTime(2); + var timestampTicks = reader.GetInt16(3); + timestamp = timestamp.AddTicks(timestampTicks); + + var metadata = new SnapshotMetadata(persistenceId, sequenceNr, timestamp); + var snapshot = GetSnapshot(reader); + + return new SelectedSnapshot(metadata, snapshot); + } + + private object GetSnapshot(DbDataReader reader) + { + var type = Type.GetType(reader.GetString(4), true); + var serializer = _serialization.FindSerializerForType(type); + var binary = (byte[])reader[5]; + + var obj = serializer.FromBinary(binary, type); + + return obj; + } + } +} \ No newline at end of file diff --git a/src/contrib/persistence/Akka.Persistence.PostgreSql/packages.config b/src/contrib/persistence/Akka.Persistence.PostgreSql/packages.config new file mode 100644 index 00000000000..9c8e7e8768b --- /dev/null +++ b/src/contrib/persistence/Akka.Persistence.PostgreSql/packages.config @@ -0,0 +1,4 @@ + + + + \ No newline at end of file diff --git a/src/contrib/persistence/Akka.Persistence.PostgreSql/postgresql.conf b/src/contrib/persistence/Akka.Persistence.PostgreSql/postgresql.conf new file mode 100644 index 00000000000..aba8293065f --- /dev/null +++ b/src/contrib/persistence/Akka.Persistence.PostgreSql/postgresql.conf @@ -0,0 +1,54 @@ +akka.persistence{ + + journal { + postgresql { + + # qualified type name of the PostgreSql persistence journal actor + class = "Akka.Persistence.PostgreSql.Journal.PostgreSqlJournal, Akka.Persistence.PostgreSql" + + # dispatcher used to drive journal actor + plugin-dispatcher = "akka.actor.default-dispatcher" + + # connection string used for database access + connection-string = "" + + # default SQL commands timeout + connection-timeout = 30s + + # PostgreSql schema name to table corresponding with persistent journal + schema-name = public + + # PostgreSql table corresponding with persistent journal + table-name = event_journal + + # should corresponding journal table be initialized automatically + auto-initialize = off + } + } + + snapshot-store { + postgresql { + + # qualified type name of the PostgreSql persistence journal actor + class = "Akka.Persistence.PostgreSql.Snapshot.PostgreSqlSnapshotStore, Akka.Persistence.PostgreSql" + + # dispatcher used to drive journal actor + plugin-dispatcher = ""akka.actor.default-dispatcher"" + + # connection string used for database access + connection-string = "" + + # default SQL commands timeout + connection-timeout = 30s + + # PostgreSql schema name to table corresponding with persistent journal + schema-name = public + + # PostgreSql table corresponding with persistent journal + table-name = snapshot_store + + # should corresponding journal table be initialized automatically + auto-initialize = off + } + } +} \ No newline at end of file From 68a4833364decfd676ae0d5d764dc9f272d9b720 Mon Sep 17 00:00:00 2001 From: Joshua Benjamin Date: Wed, 13 May 2015 22:58:18 -0700 Subject: [PATCH 31/66] fix to beta tagging --- build.fsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.fsx b/build.fsx index d8e3d65acae..6d7a3a0c3d2 100644 --- a/build.fsx +++ b/build.fsx @@ -43,7 +43,7 @@ let version = parsedRelease.AssemblyVersion + "." + buildNumber let isUnstableDocs = hasBuildParam "unstable" let isPreRelease = hasBuildParam "nugetprerelease" -let release = if isPreRelease then ReleaseNotesHelper.ReleaseNotes.New(version, version + " -beta", parsedRelease.Notes) else parsedRelease +let release = if isPreRelease then ReleaseNotesHelper.ReleaseNotes.New(version, version + "-beta", parsedRelease.Notes) else parsedRelease printfn "Assembly version: %s\nNuget version; %s\n" release.AssemblyVersion release.NugetVersion //-------------------------------------------------------------------------------- From 7c5a7a344e41da4afd7a53e301d09a24650557f1 Mon Sep 17 00:00:00 2001 From: Adam Hathcock Date: Thu, 14 May 2015 14:12:49 +0100 Subject: [PATCH 32/66] Add PipeTo for non-generic Tasks for exception handling --- src/core/Akka/Actor/PipeToSupport.cs | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/src/core/Akka/Actor/PipeToSupport.cs b/src/core/Akka/Actor/PipeToSupport.cs index 3fd3a1ddbc3..aee24df3625 100644 --- a/src/core/Akka/Actor/PipeToSupport.cs +++ b/src/core/Akka/Actor/PipeToSupport.cs @@ -24,12 +24,26 @@ public static Task PipeTo(this Task taskToPipe, ICanTell recipient, IActor sender = sender ?? ActorRefs.NoSender; return taskToPipe.ContinueWith(tresult => { - if(tresult.IsCanceled || tresult.IsFaulted) + if (tresult.IsCanceled || tresult.IsFaulted) recipient.Tell(new Status.Failure(tresult.Exception), sender); else if (tresult.IsCompleted) recipient.Tell(tresult.Result, sender); }, TaskContinuationOptions.ExecuteSynchronously & TaskContinuationOptions.AttachedToParent); } + + /// + /// Pipes the output of a Task directly to the 's mailbox once + /// the task completes. As this task has no result, only exceptions will be piped to the + /// + public static Task PipeTo(this Task taskToPipe, ICanTell recipient, IActorRef sender = null) + { + sender = sender ?? ActorRefs.NoSender; + return taskToPipe.ContinueWith(tresult => + { + if (tresult.IsCanceled || tresult.IsFaulted) + recipient.Tell(new Status.Failure(tresult.Exception), sender); + }, TaskContinuationOptions.ExecuteSynchronously & TaskContinuationOptions.AttachedToParent); + } } } From e4c38adfb4d776caadf0e75e400b28e5f625cfd4 Mon Sep 17 00:00:00 2001 From: Sean Gilliam Date: Thu, 14 May 2015 12:51:11 -0500 Subject: [PATCH 33/66] Fixed missing/boilerplate xmldoc in Configuration --- src/core/Akka/Configuration/Config.cs | 185 +++++++++++++++++- .../Configuration/ConfigurationException.cs | 19 +- .../Configuration/ConfigurationFactory.cs | 55 ++++-- 3 files changed, 236 insertions(+), 23 deletions(-) diff --git a/src/core/Akka/Configuration/Config.cs b/src/core/Akka/Configuration/Config.cs index e20ee0e515a..f4e4c925a5c 100644 --- a/src/core/Akka/Configuration/Config.cs +++ b/src/core/Akka/Configuration/Config.cs @@ -11,12 +11,26 @@ namespace Akka.Configuration { + /// + /// This class represents the main configuration object used by Akka.NET + /// when configuring objects within the system. To put it simply, it's + /// the internal representation of a HOCON (Human-Optimized Config Object Notation) + /// configuration string. + /// public class Config { + /// + /// Initializes a new instance of the class. + /// public Config() { } + /// + /// Initializes a new instance of the class. + /// + /// The root node to base this configuration. + /// "The root value cannot be null." public Config(HoconRoot root) { if (root.Value == null) @@ -26,6 +40,12 @@ public Config(HoconRoot root) Substitutions = root.Substitutions; } + /// + /// Initializes a new instance of the class. + /// + /// The configuration to use as the primary source. + /// The configuration to use as a secondary source. + /// The source configuration cannot be null. public Config(Config source, Config fallback) { if (source == null) @@ -35,10 +55,13 @@ public Config(Config source, Config fallback) Fallback = fallback; } + /// + /// The configuration used as a secondary source. + /// public Config Fallback { get; private set; } /// - /// Lets the caller know if this root node contains any values + /// Determines if this root node contains any values /// public virtual bool IsEmpty { @@ -46,12 +69,19 @@ public virtual bool IsEmpty } /// - /// Returns the root node of this configuration section + /// The root node of this configuration section /// public virtual HoconValue Root { get; private set; } + /// + /// An enumeration of substitutions values + /// public IEnumerable Substitutions { get; set; } + /// + /// Generates a deep clone of the current configuration. + /// + /// A deep clone of the current configuration protected Config Copy() { //deep clone @@ -85,6 +115,12 @@ private HoconValue GetNode(string path) return currentNode; } + /// + /// Retrieves a boolean value from the specified path in the configuration. + /// + /// The path that contains the value to retrieve. + /// The default value to return if the value doesn't exist. + /// The boolean value defined in the specified path. public virtual bool GetBoolean(string path, bool @default = false) { HoconValue value = GetNode(path); @@ -94,6 +130,11 @@ public virtual bool GetBoolean(string path, bool @default = false) return value.GetBoolean(); } + /// + /// Retrieves a long value, optionally suffixed with a 'b', from the specified path in the configuration. + /// + /// The path that contains the value to retrieve. + /// The long value defined in the specified path. public virtual long? GetByteSize(string path) { HoconValue value = GetNode(path); @@ -101,6 +142,12 @@ public virtual bool GetBoolean(string path, bool @default = false) return value.GetByteSize(); } + /// + /// Retrieves an integer value from the specified path in the configuration. + /// + /// The path that contains the value to retrieve. + /// The default value to return if the value doesn't exist. + /// The integer value defined in the specified path. public virtual int GetInt(string path, int @default = 0) { HoconValue value = GetNode(path); @@ -110,6 +157,12 @@ public virtual int GetInt(string path, int @default = 0) return value.GetInt(); } + /// + /// Retrieves a long value from the specified path in the configuration. + /// + /// The path that contains the value to retrieve. + /// The default value to return if the value doesn't exist. + /// The long value defined in the specified path. public virtual long GetLong(string path, long @default = 0) { HoconValue value = GetNode(path); @@ -119,6 +172,12 @@ public virtual long GetLong(string path, long @default = 0) return value.GetLong(); } + /// + /// Retrieves a string value from the specified path in the configuration. + /// + /// The path that contains the value to retrieve. + /// The default value to return if the value doesn't exist. + /// The string value defined in the specified path. public virtual string GetString(string path, string @default = null) { HoconValue value = GetNode(path); @@ -128,6 +187,12 @@ public virtual string GetString(string path, string @default = null) return value.GetString(); } + /// + /// Retrieves a float value from the specified path in the configuration. + /// + /// The path that contains the value to retrieve. + /// The default value to return if the value doesn't exist. + /// The float value defined in the specified path. public virtual float GetFloat(string path, float @default = 0) { HoconValue value = GetNode(path); @@ -137,6 +202,12 @@ public virtual float GetFloat(string path, float @default = 0) return value.GetFloat(); } + /// + /// Retrieves a decimal value from the specified path in the configuration. + /// + /// The path that contains the value to retrieve. + /// The default value to return if the value doesn't exist. + /// The decimal value defined in the specified path. public virtual decimal GetDecimal(string path, decimal @default = 0) { HoconValue value = GetNode(path); @@ -146,6 +217,12 @@ public virtual decimal GetDecimal(string path, decimal @default = 0) return value.GetDecimal(); } + /// + /// Retrieves a double value from the specified path in the configuration. + /// + /// The path that contains the value to retrieve. + /// The default value to return if the value doesn't exist. + /// The double value defined in the specified path. public virtual double GetDouble(string path, double @default = 0) { HoconValue value = GetNode(path); @@ -155,48 +232,88 @@ public virtual double GetDouble(string path, double @default = 0) return value.GetDouble(); } + /// + /// Retrieves a list of boolean values from the specified path in the configuration. + /// + /// The path that contains the values to retrieve. + /// The list of boolean values defined in the specified path. public virtual IList GetBooleanList(string path) { HoconValue value = GetNode(path); return value.GetBooleanList(); } + /// + /// Retrieves a list of decimal values from the specified path in the configuration. + /// + /// The path that contains the values to retrieve. + /// The list of decimal values defined in the specified path. public virtual IList GetDecimalList(string path) { HoconValue value = GetNode(path); return value.GetDecimalList(); } + /// + /// Retrieves a list of float values from the specified path in the configuration. + /// + /// The path that contains the values to retrieve. + /// The list of float values defined in the specified path. public virtual IList GetFloatList(string path) { HoconValue value = GetNode(path); return value.GetFloatList(); } + /// + /// Retrieves a list of double values from the specified path in the configuration. + /// + /// The path that contains the values to retrieve. + /// The list of double values defined in the specified path. public virtual IList GetDoubleList(string path) { HoconValue value = GetNode(path); return value.GetDoubleList(); } + /// + /// Retrieves a list of int values from the specified path in the configuration. + /// + /// The path that contains the values to retrieve. + /// The list of int values defined in the specified path. public virtual IList GetIntList(string path) { HoconValue value = GetNode(path); return value.GetIntList(); } + /// + /// Retrieves a list of long values from the specified path in the configuration. + /// + /// The path that contains the values to retrieve. + /// The list of long values defined in the specified path. public virtual IList GetLongList(string path) { HoconValue value = GetNode(path); return value.GetLongList(); } + /// + /// Retrieves a list of byte values from the specified path in the configuration. + /// + /// The path that contains the values to retrieve. + /// The list of byte values defined in the specified path. public virtual IList GetByteList(string path) { HoconValue value = GetNode(path); return value.GetByteList(); } + /// + /// Retrieves a list of string values from the specified path in the configuration. + /// + /// The path that contains the values to retrieve. + /// The list of string values defined in the specified path. public virtual IList GetStringList(string path) { HoconValue value = GetNode(path); @@ -204,6 +321,12 @@ public virtual IList GetStringList(string path) return value.GetStringList(); } + /// + /// Retrieves a new configuration from the current configuration + /// with the root node being the supplied path. + /// + /// The path that contains the configuration to retrieve. + /// A new configuration with the root node being the supplied path. public virtual Config GetConfig(string path) { HoconValue value = GetNode(path); @@ -225,9 +348,9 @@ public virtual Config GetConfig(string path) } /// - /// Return a from a specific path. + /// Retrieves a from a specific path. /// - /// The path for which we're loading a value. + /// The path that contains the value to retrieve. /// The found at the location if one exists, otherwise null. public HoconValue GetValue(string path) { @@ -241,6 +364,13 @@ public TimeSpan GetMillisDuration(string path, TimeSpan? @default = null, bool a return GetTimeSpan(path, @default, allowInfinite); } + /// + /// Retrieves a value from the specified path in the configuration. + /// + /// The path that contains the value to retrieve. + /// The default value to return if the value doesn't exist. + /// true if infinite timespans are allowed; otherwise false. + /// The value defined in the specified path. public virtual TimeSpan GetTimeSpan(string path, TimeSpan? @default = null, bool allowInfinite = true) { HoconValue value = GetNode(path); @@ -250,6 +380,10 @@ public virtual TimeSpan GetTimeSpan(string path, TimeSpan? @default = null, bool return value.GetTimeSpan(allowInfinite); } + /// + /// Converts the current configuration to a string. + /// + /// A string containing the current configuration. public override string ToString() { if (Root == null) @@ -258,6 +392,12 @@ public override string ToString() return Root.ToString(); } + /// + /// Configure the current configuration with a secondary source. + /// + /// The configuration to use as a secondary source. + /// The current configuration configured with the specified fallback. + /// Config can not have itself as fallback. public virtual Config WithFallback(Config fallback) { if (fallback == this) @@ -287,24 +427,45 @@ public virtual bool HasPath(string path) return value != null; } + /// + /// Adds the supplied configuration string as a fallback to the supplied configuration. + /// + /// The configuration used as the source. + /// The string used as the fallback configuration. + /// The supplied configuration configured with the supplied fallback. public static Config operator +(Config config, string fallback) { Config fallbackConfig = ConfigurationFactory.ParseString(fallback); return config.WithFallback(fallbackConfig); } + /// + /// Adds the supplied configuration as a fallback to the supplied configuration string. + /// + /// The configuration string used as the source. + /// The configuration used as the fallback. + /// A configuration configured with the supplied fallback. public static Config operator +(string configHocon, Config fallbackConfig) { Config config = ConfigurationFactory.ParseString(configHocon); return config.WithFallback(fallbackConfig); } + /// + /// Performs an implicit conversion from to . + /// + /// The string that contains a configuration. + /// A configuration based on the supplied string. public static implicit operator Config(string str) { Config config = ConfigurationFactory.ParseString(str); return config; } + /// + /// Retrieves an enumerable key value pair representation of the current configuration. + /// + /// The current configuration represented as an enumerable key value pair. public virtual IEnumerable> AsEnumerable() { var used = new HashSet(); @@ -324,8 +485,18 @@ public virtual IEnumerable> AsEnumerable() } } + /// + /// This class contains convenience methods for working with . + /// public static class ConfigExtensions { + /// + /// Retrieves the current configuration or the fallback + /// configuration if the current one is null. + /// + /// The configuration used as the source. + /// The configuration to use as a secondary source. + /// The current configuration or the fallback configuration if the current one is null. public static Config SafeWithFallback(this Config config, Config fallback) { return config == null @@ -336,13 +507,13 @@ public static Config SafeWithFallback(this Config config, Config fallback) } /// - /// Convenience method for determining if has any usable content period. + /// Determines if the supplied configuration has any usable content period. /// - /// true if the is null or return true; false otherwise. + /// The configuration used as the source. + /// true> if the is null or ; otherwise false. public static bool IsNullOrEmpty(this Config config) { return config == null || config.IsEmpty; } } } - diff --git a/src/core/Akka/Configuration/ConfigurationException.cs b/src/core/Akka/Configuration/ConfigurationException.cs index 5609ba7535d..9b5ba2e88f3 100644 --- a/src/core/Akka/Configuration/ConfigurationException.cs +++ b/src/core/Akka/Configuration/ConfigurationException.cs @@ -6,21 +6,38 @@ //----------------------------------------------------------------------- using System; -using Akka.Actor; using System.Runtime.Serialization; +using Akka.Actor; namespace Akka.Configuration { + /// + /// The exception that is thrown when a configuration is invalid. + /// public class ConfigurationException : AkkaException { + /// + /// Initializes a new instance of the class. + /// + /// The message that describes the error. public ConfigurationException(string message) : base(message) { } + /// + /// Initializes a new instance of the class. + /// + /// The message that describes the error. + /// The exception that is the cause of the current exception. public ConfigurationException(string message, Exception exception): base(message, exception) { } + /// + /// Initializes a new instance of the class. + /// + /// The object that holds the serialized object data. + /// The contextual information about the source or destination. protected ConfigurationException(SerializationInfo info, StreamingContext context) : base(info, context) { diff --git a/src/core/Akka/Configuration/ConfigurationFactory.cs b/src/core/Akka/Configuration/ConfigurationFactory.cs index 14792d716e2..ee6d1bac8fa 100644 --- a/src/core/Akka/Configuration/ConfigurationFactory.cs +++ b/src/core/Akka/Configuration/ConfigurationFactory.cs @@ -15,24 +15,26 @@ namespace Akka.Configuration { /// - /// Class ConfigurationFactory. + /// This class contains methods used to retrieve configuration information + /// from a variety of sources including user-supplied strings, configuration + /// files and assembly resources. /// public class ConfigurationFactory { /// - /// Gets the empty. + /// Generates an empty configuration. /// - /// The empty. public static Config Empty { get { return ParseString(""); } } /// - /// Parses the string. + /// Generates a configuration defined in the supplied + /// HOCON (Human-Optimized Config Object Notation) string. /// - /// The json. - /// Config. + /// A string that contains configuration options to use. + /// The configuration defined in the supplied HOCON string. public static Config ParseString(string hocon) { HoconRoot res = Parser.Parse(hocon); @@ -40,9 +42,10 @@ public static Config ParseString(string hocon) } /// - /// Loads this instance. + /// Loads a configuration defined in the current application's + /// configuration file, e.g. app.config or web.config /// - /// Config. + /// The configuration defined in the configuration file. public static Config Load() { var section = (AkkaConfigurationSection)ConfigurationManager.GetSection("akka") ?? new AkkaConfigurationSection(); @@ -52,19 +55,21 @@ public static Config Load() } /// - /// Defaults this instance. + /// Retrieves the default configuration that Akka.NET uses + /// when no configuration has been defined. /// - /// Config. + /// The configuration that contains default values for all options. public static Config Default() { return FromResource("Akka.Configuration.Pigeon.conf"); } /// - /// Froms the resource. + /// Retrieves a configuration defined in a resource of the + /// current executing assembly. /// - /// Name of the resource. - /// Config. + /// The name of the resource that contains the configuration. + /// The configuration defined in the current executing assembly. internal static Config FromResource(string resourceName) { Assembly assembly = Assembly.GetExecutingAssembly(); @@ -72,6 +77,13 @@ internal static Config FromResource(string resourceName) return FromResource(resourceName, assembly); } + /// + /// Retrieves a configuration defined in a resource of the + /// assembly containing the supplied instance object. + /// + /// The name of the resource that contains the configuration. + /// An instance object located in the assembly to search. + /// The configuration defined in the assembly that contains the instanced object. public static Config FromResource(string resourceName, object instanceInAssembly) { var type = instanceInAssembly as Type; @@ -83,11 +95,24 @@ public static Config FromResource(string resourceName, object instanceInAssembly return FromResource(resourceName, instanceInAssembly.GetType().Assembly); } - public static Config FromResource(string resourceName) + /// + /// Retrieves a configuration defined in a resource of the assembly + /// containing the supplied type . + /// + /// A type located in the assembly to search. + /// The name of the resource that contains the configuration. + /// The configuration defined in the assembly that contains the type . + public static Config FromResource(string resourceName) { - return FromResource(resourceName, typeof(TypeInAssembly).Assembly); + return FromResource(resourceName, typeof(TAssembly).Assembly); } + /// + /// Retrieves a configuration defined in a resource of the supplied assembly. + /// + /// The name of the resource that contains the configuration. + /// The assembly that contains the given resource. + /// The configuration defined in the assembly that contains the given resource. public static Config FromResource(string resourceName, Assembly assembly) { using(Stream stream = assembly.GetManifestResourceStream(resourceName)) From 51907905301f9f869c4929f0efeaf3e0b4a9c07d Mon Sep 17 00:00:00 2001 From: rogeralsing Date: Fri, 15 May 2015 08:10:18 +0200 Subject: [PATCH 34/66] Updated default throughput values to conform with JVM and promote fairness amongst actors and tasks --- src/core/Akka/Configuration/Pigeon.conf | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/core/Akka/Configuration/Pigeon.conf b/src/core/Akka/Configuration/Pigeon.conf index 47ab68804d4..11b0b239dee 100644 --- a/src/core/Akka/Configuration/Pigeon.conf +++ b/src/core/Akka/Configuration/Pigeon.conf @@ -246,12 +246,12 @@ akka { task-dispatcher { type = "Akka.Dispatch.TaskDispatcherConfigurator" - throughput = 100 + throughput = 30 } default-fork-join-dispatcher{ type = ForkJoinDispatcher - throughput = 100 + throughput = 30 dedicated-thread-pool{ #settings for Helios.DedicatedThreadPool thread-count = 3 #number of threads #deadlock-timeout = 3s #optional timeout for deadlock detection @@ -273,7 +273,7 @@ akka { # Throughput defines the number of messages that are processed in a batch # before the thread is returned to the pool. Set to 1 for as fair as possible. - throughput = 100 + throughput = 30 # Throughput deadline for Dispatcher, set to 0 or negative for no deadline throughput-deadline-time = 0ms From 330faab71c1264a8fdb81922125cbbf1f6e645aa Mon Sep 17 00:00:00 2001 From: rogeralsing Date: Fri, 15 May 2015 15:32:19 +0200 Subject: [PATCH 35/66] Dedicated thread Scheduler Updated DedicatedThreadScheduler to use MonotonicClock Updated DedicatedThreadScheduler to use pigeonconf scheduler ticks Udated the shutdown handler --- .../Akka/Actor/Internals/ActorSystemImpl.cs | 6 +- .../Scheduler/DedicatedThreadScheduler.cs | 161 ++++++++++++++++++ src/core/Akka/Akka.csproj | 1 + 3 files changed, 165 insertions(+), 3 deletions(-) create mode 100644 src/core/Akka/Actor/Scheduler/DedicatedThreadScheduler.cs diff --git a/src/core/Akka/Actor/Internals/ActorSystemImpl.cs b/src/core/Akka/Actor/Internals/ActorSystemImpl.cs index 3449b0bba88..0d85a5d4515 100644 --- a/src/core/Akka/Actor/Internals/ActorSystemImpl.cs +++ b/src/core/Akka/Actor/Internals/ActorSystemImpl.cs @@ -51,11 +51,11 @@ public ActorSystemImpl(string name, Config config) if(config == null) throw new ArgumentNullException("config"); - _name = name; - ConfigureScheduler(); + _name = name; ConfigureSettings(config); ConfigureEventStream(); ConfigureProvider(); + ConfigureScheduler(); ConfigureSerialization(); ConfigureMailboxes(); ConfigureDispatchers(); @@ -127,7 +127,7 @@ public override ActorSelection ActorSelection(string actorPath) private void ConfigureScheduler() { - _scheduler = new TaskBasedScheduler(); + _scheduler = new DedicatedThreadScheduler(this);;// new TaskBasedScheduler(); } /// diff --git a/src/core/Akka/Actor/Scheduler/DedicatedThreadScheduler.cs b/src/core/Akka/Actor/Scheduler/DedicatedThreadScheduler.cs new file mode 100644 index 00000000000..ef4fb9df9a4 --- /dev/null +++ b/src/core/Akka/Actor/Scheduler/DedicatedThreadScheduler.cs @@ -0,0 +1,161 @@ +using System; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.Threading; + +namespace Akka.Actor +{ + public class DedicatedThreadScheduler : SchedulerBase, IDateTimeOffsetNowTimeProvider + { + private readonly ConcurrentQueue _workQueue = new ConcurrentQueue(); + protected override DateTimeOffset TimeNow { get { return DateTimeOffset.Now; } } + public override TimeSpan MonotonicClock { get { return Util.MonotonicClock.Elapsed; } } + public override TimeSpan HighResMonotonicClock { get { return Util.MonotonicClock.ElapsedHighRes; } } + + //TODO: use some more efficient approach to handle future work + public DedicatedThreadScheduler(ActorSystem system) + { + var precision = system.Settings.Config.GetTimeSpan("akka.scheduler.tick-duration"); + var thread = new Thread(_ => + { + var allWork = new List(); + while (true) + { + if (system.TerminationTask.IsCompleted) + { + return; + } + + Thread.Sleep(precision); + var now = HighResMonotonicClock.Ticks; + ScheduledWork work; + while(_workQueue.TryDequeue(out work)) + { + //has work already expired? + if (work.UtcTickExpires < now) + { + work.Action(); + } + else + { + //buffer it for later + allWork.Add(work); + } + } + //this is completely stupid, but does work.. + if (allWork.Count > 0) + { + var tmp = allWork; + allWork = new List(); + foreach (var bufferedWork in tmp) + { + if (bufferedWork.UtcTickExpires < now) + { + bufferedWork.Action(); + } + else + { + allWork.Add(bufferedWork); + } + } + } + } + }) {IsBackground = true}; + + thread.Start(); + } + + protected override void InternalScheduleTellOnce(TimeSpan delay, ICanTell receiver, object message, IActorRef sender, ICancelable cancelable) + { + var cancellationToken = cancelable == null ? CancellationToken.None : cancelable.Token; + InternalScheduleOnce(delay, () => + { + receiver.Tell(message, sender); + }, cancellationToken); + } + + protected override void InternalScheduleTellRepeatedly(TimeSpan initialDelay, TimeSpan interval, ICanTell receiver, object message, IActorRef sender, ICancelable cancelable) + { + var cancellationToken = cancelable == null ? CancellationToken.None : cancelable.Token; + InternalScheduleRepeatedly(initialDelay, interval, () => receiver.Tell(message, sender), cancellationToken); + } + + protected override void InternalScheduleOnce(TimeSpan delay, Action action, ICancelable cancelable) + { + var cancellationToken = cancelable == null ? CancellationToken.None : cancelable.Token; + InternalScheduleOnce(delay, action, cancellationToken); + } + + protected override void InternalScheduleRepeatedly(TimeSpan initialDelay, TimeSpan interval, Action action, ICancelable cancelable) + { + var cancellationToken = cancelable == null ? CancellationToken.None : cancelable.Token; + InternalScheduleRepeatedly(initialDelay, interval, action, cancellationToken); + } + + + private void InternalScheduleOnce(TimeSpan initialDelay, Action action, CancellationToken token) + { + Action executeAction = () => + { + if (token.IsCancellationRequested) + return; + + try + { + action(); + } + catch (OperationCanceledException) { } + //TODO: Should we log other exceptions? /@hcanber + }; + AddWork(initialDelay, executeAction, token); + + } + + + private void InternalScheduleRepeatedly(TimeSpan initialDelay, TimeSpan interval, Action action, CancellationToken token) + { + Action executeAction = null; + executeAction = () => + { + if (token.IsCancellationRequested) + return; + + try + { + action(); + } + catch (OperationCanceledException) { } + //TODO: Should we log other exceptions? /@hcanber + + if (token.IsCancellationRequested) + return; + + AddWork(interval, executeAction,token); + + }; + AddWork(initialDelay, executeAction, token); + + } + + private void AddWork(TimeSpan delay, Action work,CancellationToken token) + { + var expected = HighResMonotonicClock + delay; + var scheduledWord = new ScheduledWork(expected.Ticks, work,token); + _workQueue.Enqueue(scheduledWord); + } + } + + public class ScheduledWork + { + public ScheduledWork(long utcTickExpires, Action action,CancellationToken token) + { + UtcTickExpires = utcTickExpires; + Action = action; + Token = token; + } + + public CancellationToken Token { get; set; } + public long UtcTickExpires { get; set; } + public Action Action { get; set; } + } +} diff --git a/src/core/Akka/Akka.csproj b/src/core/Akka/Akka.csproj index b1f9d2e73dc..0c14585b69b 100644 --- a/src/core/Akka/Akka.csproj +++ b/src/core/Akka/Akka.csproj @@ -89,6 +89,7 @@ + From aec0660f7416dc8f49dc555f7f49f465345e4b53 Mon Sep 17 00:00:00 2001 From: rogeralsing Date: Fri, 15 May 2015 19:05:58 +0200 Subject: [PATCH 36/66] updated Property name to not include Utc --- .../Akka/Actor/Scheduler/DedicatedThreadScheduler.cs | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/core/Akka/Actor/Scheduler/DedicatedThreadScheduler.cs b/src/core/Akka/Actor/Scheduler/DedicatedThreadScheduler.cs index ef4fb9df9a4..2c4733c781d 100644 --- a/src/core/Akka/Actor/Scheduler/DedicatedThreadScheduler.cs +++ b/src/core/Akka/Actor/Scheduler/DedicatedThreadScheduler.cs @@ -32,7 +32,7 @@ public DedicatedThreadScheduler(ActorSystem system) while(_workQueue.TryDequeue(out work)) { //has work already expired? - if (work.UtcTickExpires < now) + if (work.TickExpires < now) { work.Action(); } @@ -49,7 +49,7 @@ public DedicatedThreadScheduler(ActorSystem system) allWork = new List(); foreach (var bufferedWork in tmp) { - if (bufferedWork.UtcTickExpires < now) + if (bufferedWork.TickExpires < now) { bufferedWork.Action(); } @@ -147,15 +147,15 @@ private void AddWork(TimeSpan delay, Action work,CancellationToken token) public class ScheduledWork { - public ScheduledWork(long utcTickExpires, Action action,CancellationToken token) + public ScheduledWork(long tickExpires, Action action,CancellationToken token) { - UtcTickExpires = utcTickExpires; + TickExpires = tickExpires; Action = action; Token = token; } public CancellationToken Token { get; set; } - public long UtcTickExpires { get; set; } + public long TickExpires { get; set; } public Action Action { get; set; } } } From b4818e66de4316e0bee8de9d7ac460a67daafdad Mon Sep 17 00:00:00 2001 From: Joshua Benjamin Date: Wed, 13 May 2015 20:13:19 -0700 Subject: [PATCH 37/66] Added dispatcher fixes for remote and cluster --- src/core/Akka.Cluster/ClusterDaemon.cs | 10 +- .../TestRunCoordinatorSpec.cs | 1 + .../Reporting/TestRunCoordinator.cs | 14 +- src/core/Akka.Remote/Endpoint.cs | 157 +++++++++++------- src/core/Akka.Remote/EndpointManager.cs | 22 ++- src/core/Akka.Remote/RemoteSettings.cs | 4 +- src/core/Akka.Tests/Dispatch/MailboxesSpec.cs | 1 + .../Actor/Scheduler/TaskBasedScheduler.cs | 10 +- src/core/Akka/Configuration/Pigeon.conf | 88 +++++----- src/core/Akka/Dispatch/ForkJoinDispatcher.cs | 15 +- .../Akka/Dispatch/SingleThreadDispatcher.cs | 1 + src/core/Akka/Dispatch/ThreadPoolBuilder.cs | 2 +- .../Helios.Concurrency.DedicatedThreadPool.cs | 39 +++-- .../Cluster/Samples.Cluster.Simple/App.config | 2 +- .../Cluster/Samples.Cluster.Simple/Program.cs | 4 +- 15 files changed, 214 insertions(+), 156 deletions(-) diff --git a/src/core/Akka.Cluster/ClusterDaemon.cs b/src/core/Akka.Cluster/ClusterDaemon.cs index 2a693a69fc6..2bc25933d8e 100644 --- a/src/core/Akka.Cluster/ClusterDaemon.cs +++ b/src/core/Akka.Cluster/ClusterDaemon.cs @@ -565,9 +565,9 @@ public ClusterDaemon(ClusterSettings settings) // cause deadlock. The Cluster extension is currently being created and is waiting // for response from GetClusterCoreRef in its constructor. _coreSupervisor = - Context.ActorOf(Props.Create().WithDispatcher(Context.Props.Dispatcher), "core"); + Context.ActorOf(Props.Create().WithDispatcher(settings.UseDispatcher), "core"); - Context.ActorOf(Props.Create().WithDispatcher(Context.Props.Dispatcher), "heartbeatReceiver"); + Context.ActorOf(Props.Create().WithDispatcher(settings.UseDispatcher), "heartbeatReceiver"); _settings = settings; } @@ -578,13 +578,13 @@ protected override void OnReceive(object message) .With(msg => _coreSupervisor.Forward(msg)) .With( msg => - Context.ActorOf(Props.Create(() => new OnMemberUpListener(msg.Callback)).WithDeploy(Deploy.Local))) + Context.ActorOf(Props.Create(() => new OnMemberUpListener(msg.Callback)).WithDispatcher(_settings.UseDispatcher).WithDeploy(Deploy.Local))) .With( msg => { if (_settings.MetricsEnabled) Context.ActorOf( - Props.Create().WithDispatcher(Context.Props.Dispatcher), + Props.Create().WithDispatcher(_settings.UseDispatcher), "metrics"); }); } @@ -724,7 +724,7 @@ protected override void PreStart() if (_cluster.Settings.AutoDownUnreachableAfter != null) Context.ActorOf( - AutoDown.Props(_cluster.Settings.AutoDownUnreachableAfter.Value).WithDispatcher(Context.Props.Dispatcher), + AutoDown.Props(_cluster.Settings.AutoDownUnreachableAfter.Value).WithDispatcher(_cluster.Settings.UseDispatcher), "autoDown"); if (_cluster.Settings.SeedNodes.IsEmpty) diff --git a/src/core/Akka.MultiNodeTestRunner.Shared.Tests/TestRunCoordinatorSpec.cs b/src/core/Akka.MultiNodeTestRunner.Shared.Tests/TestRunCoordinatorSpec.cs index b3b7b2d4c26..07bf62b86e9 100644 --- a/src/core/Akka.MultiNodeTestRunner.Shared.Tests/TestRunCoordinatorSpec.cs +++ b/src/core/Akka.MultiNodeTestRunner.Shared.Tests/TestRunCoordinatorSpec.cs @@ -5,6 +5,7 @@ // //----------------------------------------------------------------------- +using System; using System.Collections.Generic; using System.Linq; using Akka.Actor; diff --git a/src/core/Akka.MultiNodeTestRunner.Shared/Reporting/TestRunCoordinator.cs b/src/core/Akka.MultiNodeTestRunner.Shared/Reporting/TestRunCoordinator.cs index c42d7fe2746..ac30789073e 100644 --- a/src/core/Akka.MultiNodeTestRunner.Shared/Reporting/TestRunCoordinator.cs +++ b/src/core/Akka.MultiNodeTestRunner.Shared/Reporting/TestRunCoordinator.cs @@ -8,6 +8,7 @@ using System; using System.Collections.Generic; using System.Linq; +using System.Threading.Tasks; using Akka.Actor; using Akka.MultiNodeTestRunner.Shared.Sinks; @@ -115,12 +116,12 @@ private void SetReceive() Receive(state => Sender.Tell(TestRunData.Copy(TestRunPassed(TestRunData)))); Receive(messages => AddSubscriber(messages)); Receive(messages => RemoveSubscriber(messages)); - Receive(run => + Receive(async run => { //clean up the current spec, if it hasn't been done already if (_currentSpecRunActor != null) { - ReceiveEndSpecRun(new EndSpec()); + await ReceiveEndSpecRun(new EndSpec()); } //Mark the test run as finished @@ -154,16 +155,11 @@ private void ReceiveBeginSpecRun(BeginNewSpec spec) Props.Create(() => new SpecRunCoordinator(spec.ClassName, spec.MethodName, spec.Nodes))); } - private void ReceiveEndSpecRun(EndSpec spec) + private async Task ReceiveEndSpecRun(EndSpec spec) { //Should receive a FactData in return - var specCompleteTask = _currentSpecRunActor.Ask(spec, TimeSpan.FromSeconds(2)); + var factData = await _currentSpecRunActor.Ask(spec, TimeSpan.FromSeconds(2)); - //Going to block so we can't accidentally start processing messages for a new spec yet.. - specCompleteTask.Wait(); - - //Got the result we needed - var factData = specCompleteTask.Result; TestRunData.AddSpec(factData); //Publish the FactData back to any subscribers who wanted it diff --git a/src/core/Akka.Remote/Endpoint.cs b/src/core/Akka.Remote/Endpoint.cs index dc2c6b79ee9..72389e4367e 100644 --- a/src/core/Akka.Remote/Endpoint.cs +++ b/src/core/Akka.Remote/Endpoint.cs @@ -262,28 +262,35 @@ internal class ReliableDeliverySupervisor : UntypedActor private readonly ILoggingAdapter _log = Context.GetLogger(); private AkkaProtocolHandle handleOrActive; - private Address localAddress; - private Address remoteAddress; - private int? refuseUid; - private AkkaProtocolTransport transport; - private RemoteSettings settings; + private readonly Address _localAddress; + private readonly Address _remoteAddress; + private readonly int? _refuseUid; + private readonly AkkaProtocolTransport _transport; + private readonly RemoteSettings _settings; private AkkaPduCodec codec; - private AkkaProtocolHandle currentHandle; - private ConcurrentDictionary receiveBuffers; + private AkkaProtocolHandle _currentHandle; + private readonly ConcurrentDictionary _receiveBuffers; private EndpointRegistry _endpoints = new EndpointRegistry(); - public ReliableDeliverySupervisor(AkkaProtocolHandle handleOrActive, Address localAddress, Address remoteAddress, - int? refuseUid, AkkaProtocolTransport transport, RemoteSettings settings, AkkaPduCodec codec, ConcurrentDictionary receiveBuffers) + public ReliableDeliverySupervisor( + AkkaProtocolHandle handleOrActive, + Address localAddress, + Address remoteAddress, + int? refuseUid, + AkkaProtocolTransport transport, + RemoteSettings settings, + AkkaPduCodec codec, + ConcurrentDictionary receiveBuffers) { this.handleOrActive = handleOrActive; - this.localAddress = localAddress; - this.remoteAddress = remoteAddress; - this.refuseUid = refuseUid; - this.transport = transport; - this.settings = settings; + _localAddress = localAddress; + _remoteAddress = remoteAddress; + _refuseUid = refuseUid; + _transport = transport; + _settings = settings; this.codec = codec; - currentHandle = handleOrActive; - this.receiveBuffers = receiveBuffers; + _currentHandle = handleOrActive; + _receiveBuffers = receiveBuffers; Reset(); _writer = CreateWriter(); Uid = handleOrActive != null ? (int?)handleOrActive.HandshakeInfo.Uid : null; @@ -305,20 +312,20 @@ public ReliableDeliverySupervisor(AkkaProtocolHandle handleOrActive, Address loc public Deadline BailoutAt { - get { return Deadline.Now + settings.InitialSysMsgDeliveryTimeout; } + get { return Deadline.Now + _settings.InitialSysMsgDeliveryTimeout; } } - private ICancelable _autoResendTimer = null; - private AckedSendBuffer _resendBuffer = null; - private SeqNo _lastCumulativeAck = null; + private ICancelable _autoResendTimer; + private AckedSendBuffer _resendBuffer; + private SeqNo _lastCumulativeAck; private long _seqCounter; - private List _pendingAcks = null; + private List _pendingAcks; private IActorRef _writer; private void Reset() { - _resendBuffer = new AckedSendBuffer(settings.SysMsgBufferSize); + _resendBuffer = new AckedSendBuffer(_settings.SysMsgBufferSize); ScheduleAutoResend(); _lastCumulativeAck = new SeqNo(-1); _seqCounter = 0L; @@ -329,7 +336,7 @@ private void ScheduleAutoResend() { if (_autoResendTimer == null) { - _autoResendTimer = Context.System.Scheduler.ScheduleTellOnceCancelable(settings.SysResendTimeout, Self, new AttemptSysMsgRedelivery(), + _autoResendTimer = Context.System.Scheduler.ScheduleTellOnceCancelable(_settings.SysResendTimeout, Self, new AttemptSysMsgRedelivery(), Self); } } @@ -365,10 +372,10 @@ protected override SupervisorStrategy SupervisorStrategy() .With(problem => directive = Directive.Escalate) .Default(e => { - _log.Warning("Association with remote system {0} has failed; address is now gated for {1} ms. Reason is: [{2}]", remoteAddress, settings.RetryGateClosedFor.TotalMilliseconds, ex.Message); + _log.Warning("Association with remote system {0} has failed; address is now gated for {1} ms. Reason is: [{2}]", _remoteAddress, _settings.RetryGateClosedFor.TotalMilliseconds, ex.Message); UidConfirmed = false; Context.Become(Gated); - currentHandle = null; + _currentHandle = null; Context.Parent.Tell(new EndpointWriter.StoppedReading(Self)); directive = Directive.Stop; }); @@ -384,7 +391,7 @@ protected override void PostStop() Context.System.DeadLetters.Tell(msg.Copy(opt: null)); } EndpointManager.ResendState value; - receiveBuffers.TryRemove(new EndpointManager.Link(localAddress, remoteAddress), out value); + _receiveBuffers.TryRemove(new EndpointManager.Link(_localAddress, _remoteAddress), out value); } protected override void PostRestart(Exception reason) @@ -442,10 +449,10 @@ protected override void OnReceive(object message) }) .With(terminated => { - currentHandle = null; + _currentHandle = null; Context.Parent.Tell(new EndpointWriter.StoppedReading(Self)); if (_resendBuffer.NonAcked.Count > 0 || _resendBuffer.Nacked.Count > 0) - Context.System.Scheduler.ScheduleTellOnce(settings.SysResendTimeout, Self, + Context.System.Scheduler.ScheduleTellOnce(_settings.SysResendTimeout, Self, new AttemptSysMsgRedelivery(), Self); Context.Become(Idle); }) @@ -456,7 +463,7 @@ protected override void OnReceive(object message) UidConfirmed = true; if (Uid.HasValue && Uid.Value != g.Uid) Reset(); else UnstashAcks(); - Uid = refuseUid; + Uid = _refuseUid; }) .With(stopped => { @@ -468,7 +475,7 @@ protected void Gated(object message) { message.Match() .With( - terminated => Context.System.Scheduler.ScheduleTellOnce(settings.RetryGateClosedFor, Self, new Ungate(), Self)) + terminated => Context.System.Scheduler.ScheduleTellOnce(_settings.RetryGateClosedFor, Self, new Ungate(), Self)) .With(ungate => { if (_resendBuffer.NonAcked.Count > 0 || _resendBuffer.Nacked.Count > 0) @@ -480,7 +487,7 @@ protected void Gated(object message) // In other words, this action is safe. if (!UidConfirmed && BailoutAt.IsOverdue) { - throw new InvalidAssociation(localAddress, remoteAddress, + throw new InvalidAssociation(_localAddress, _remoteAddress, new TimeoutException("Delivery of system messages timed out and they were dropped")); } @@ -552,14 +559,23 @@ public GotUid(int uid) public int Uid { get; private set; } } - public static Props ReliableDeliverySupervisorProps(AkkaProtocolHandle handleOrActive, Address localAddress, Address remoteAddress, - int? refuseUid, AkkaProtocolTransport transport, RemoteSettings settings, AkkaPduCodec codec, ConcurrentDictionary receiveBuffers) + public static Props ReliableDeliverySupervisorProps( + AkkaProtocolHandle handleOrActive, + Address localAddress, + Address remoteAddress, + int? refuseUid, + AkkaProtocolTransport transport, + RemoteSettings settings, + AkkaPduCodec codec, + ConcurrentDictionary receiveBuffers, + string dispatcher) { return Props.Create( () => new ReliableDeliverySupervisor(handleOrActive, localAddress, remoteAddress, refuseUid, transport, - settings, codec, receiveBuffers)); + settings, codec, receiveBuffers)) + .WithDispatcher(dispatcher); } #endregion @@ -611,7 +627,7 @@ private void TryBuffer(EndpointManager.Send s) } catch (Exception ex) { - throw new HopelessAssociation(localAddress, remoteAddress, Uid, ex); + throw new HopelessAssociation(_localAddress, _remoteAddress, Uid, ex); } } @@ -620,9 +636,11 @@ private void TryBuffer(EndpointManager.Send s) private IActorRef CreateWriter() { var writer = - Context.ActorOf(RARP.For(Context.System).ConfigureDispatcher( - EndpointWriter.EndpointWriterProps(currentHandle, localAddress, remoteAddress, refuseUid, transport, - settings, new AkkaPduProtobuffCodec(), receiveBuffers, Self)).WithDeploy(Deploy.Local), + Context.ActorOf(RARP.For(Context.System) + .ConfigureDispatcher( + EndpointWriter.EndpointWriterProps(_currentHandle, _localAddress, _remoteAddress, _refuseUid, _transport, + _settings, new AkkaPduProtobuffCodec(), _receiveBuffers, Self) + .WithDeploy(Deploy.Local)), "endpointWriter"); Context.Watch(writer); return writer; @@ -703,7 +721,10 @@ internal abstract class EndpointActor : FSM protected bool Inbound { get; set; } - protected EndpointActor(Address localAddress, Address remoteAddress, AkkaProtocolTransport transport, + protected EndpointActor( + Address localAddress, + Address remoteAddress, + AkkaProtocolTransport transport, RemoteSettings settings) { EventPublisher = new EventPublisher(Context.System, _log, Logging.LogLevelFor(settings.RemoteLifecycleEventsLogLevel)); @@ -746,9 +767,15 @@ private void TryPublish(RemotingLifecycleEvent ev) /// internal class EndpointWriter : EndpointActor { - public EndpointWriter(AkkaProtocolHandle handleOrActive, Address localAddress, Address remoteAddress, - int? refuseUid, AkkaProtocolTransport transport, RemoteSettings settings, - AkkaPduCodec codec, ConcurrentDictionary receiveBuffers, + public EndpointWriter( + AkkaProtocolHandle handleOrActive, + Address localAddress, + Address remoteAddress, + int? refuseUid, + AkkaProtocolTransport transport, + RemoteSettings settings, + AkkaPduCodec codec, + ConcurrentDictionary receiveBuffers, IActorRef reliableDeliverySupervisor = null) : base(localAddress, remoteAddress, transport, settings) { @@ -1057,10 +1084,11 @@ private void PublishAndThrow(Exception reason, LogLevel level) private IActorRef StartReadEndpoint(AkkaProtocolHandle handle) { var newReader = - Context.ActorOf(RARP.For(Context.System).ConfigureDispatcher( + Context.ActorOf(RARP.For(Context.System) + .ConfigureDispatcher( EndpointReader.ReaderProps(LocalAddress, RemoteAddress, Transport, Settings, _codec, _msgDispatcher, - Inbound, (int)handle.HandshakeInfo.Uid, _receiveBuffers, _reliableDeliverySupervisor)) - .WithDeploy(Deploy.Local), + Inbound, (int)handle.HandshakeInfo.Uid, _receiveBuffers, _reliableDeliverySupervisor) + .WithDeploy(Deploy.Local)), string.Format("endpointReader-{0}-{1}", AddressUrlEncoder.Encode(RemoteAddress), _readerId.Next())); Context.Watch(newReader); handle.ReadHandlerSource.SetResult(new ActorHandleEventListener(newReader)); @@ -1470,9 +1498,16 @@ public OutboundAck(Ack ack) /// internal class EndpointReader : EndpointActor { - public EndpointReader(Address localAddress, Address remoteAddress, AkkaProtocolTransport transport, - RemoteSettings settings, AkkaPduCodec codec, IInboundMessageDispatcher msgDispatch, bool inbound, - int uid, ConcurrentDictionary receiveBuffers, + public EndpointReader( + Address localAddress, + Address remoteAddress, + AkkaProtocolTransport transport, + RemoteSettings settings, + AkkaPduCodec codec, + IInboundMessageDispatcher msgDispatch, + bool inbound, + int uid, + ConcurrentDictionary receiveBuffers, IActorRef reliableDeliverySupervisor = null) : base(localAddress, remoteAddress, transport, settings) { @@ -1485,13 +1520,13 @@ public EndpointReader(Address localAddress, Address remoteAddress, AkkaProtocolT _provider = RARP.For(Context.System).Provider; } - private AkkaPduCodec _codec; - private IActorRef _reliableDeliverySupervisor; - private ConcurrentDictionary _receiveBuffers; - private int _uid; - private IInboundMessageDispatcher _msgDispatch; + private readonly AkkaPduCodec _codec; + private readonly IActorRef _reliableDeliverySupervisor; + private readonly ConcurrentDictionary _receiveBuffers; + private readonly int _uid; + private readonly IInboundMessageDispatcher _msgDispatch; - private RemoteActorRefProvider _provider; + private readonly RemoteActorRefProvider _provider; private AckedReceiveBuffer _ackedReceiveBuffer = new AckedReceiveBuffer(); #region ActorBase overrides @@ -1666,8 +1701,15 @@ private AckAndMessage TryDecodeMessageAndAck(ByteString pdu) #region Static members - public static Props ReaderProps(Address localAddress, Address remoteAddress, AkkaProtocolTransport transport, - RemoteSettings settings, AkkaPduCodec codec, IInboundMessageDispatcher dispatcher, bool inbound, int uid, + public static Props ReaderProps( + Address localAddress, + Address remoteAddress, + AkkaProtocolTransport transport, + RemoteSettings settings, + AkkaPduCodec codec, + IInboundMessageDispatcher dispatcher, + bool inbound, + int uid, ConcurrentDictionary receiveBuffers, IActorRef reliableDeliverySupervisor = null) { @@ -1675,7 +1717,8 @@ public static Props ReaderProps(Address localAddress, Address remoteAddress, Akk Props.Create( () => new EndpointReader(localAddress, remoteAddress, transport, settings, codec, dispatcher, inbound, - uid, receiveBuffers, reliableDeliverySupervisor)); + uid, receiveBuffers, reliableDeliverySupervisor)) + .WithDispatcher(settings.Dispatcher); } #endregion diff --git a/src/core/Akka.Remote/EndpointManager.cs b/src/core/Akka.Remote/EndpointManager.cs index 993bd18f030..6ae06b8e7cb 100644 --- a/src/core/Akka.Remote/EndpointManager.cs +++ b/src/core/Akka.Remote/EndpointManager.cs @@ -793,8 +793,14 @@ private void CreateAndRegisterEndpoint(AkkaProtocolHandle handle, int? refuseId) } } - private IActorRef CreateEndpoint(Address remoteAddress, Address localAddress, AkkaProtocolTransport transport, - RemoteSettings endpointSettings, bool writing, AkkaProtocolHandle handleOption = null, int? refuseUid = null) + private IActorRef CreateEndpoint( + Address remoteAddress, + Address localAddress, + AkkaProtocolTransport transport, + RemoteSettings endpointSettings, + bool writing, + AkkaProtocolHandle handleOption = null, + int? refuseUid = null) { System.Diagnostics.Debug.Assert(_transportMapping.ContainsKey(localAddress)); System.Diagnostics.Debug.Assert(writing || refuseUid == null); @@ -804,20 +810,24 @@ private IActorRef CreateEndpoint(Address remoteAddress, Address localAddress, Ak if (writing) { endpointActor = - Context.ActorOf(RARP.For(Context.System).ConfigureDispatcher( + Context.ActorOf(RARP.For(Context.System) + .ConfigureDispatcher( ReliableDeliverySupervisor.ReliableDeliverySupervisorProps(handleOption, localAddress, remoteAddress, refuseUid, transport, endpointSettings, new AkkaPduProtobuffCodec(), - _receiveBuffers).WithDeploy(Deploy.Local)), + _receiveBuffers, endpointSettings.Dispatcher) + .WithDeploy(Deploy.Local)), string.Format("reliableEndpointWriter-{0}-{1}", AddressUrlEncoder.Encode(remoteAddress), endpointId.Next())); } else { endpointActor = - Context.ActorOf(RARP.For(Context.System).ConfigureDispatcher( + Context.ActorOf(RARP.For(Context.System) + .ConfigureDispatcher( EndpointWriter.EndpointWriterProps(handleOption, localAddress, remoteAddress, refuseUid, transport, endpointSettings, new AkkaPduProtobuffCodec(), _receiveBuffers, - reliableDeliverySupervisor: null).WithDeploy(Deploy.Local)), + reliableDeliverySupervisor: null) + .WithDeploy(Deploy.Local)), string.Format("endpointWriter-{0}-{1}", AddressUrlEncoder.Encode(remoteAddress), endpointId.Next())); } diff --git a/src/core/Akka.Remote/RemoteSettings.cs b/src/core/Akka.Remote/RemoteSettings.cs index 2d1eeb5c220..624a98bac74 100644 --- a/src/core/Akka.Remote/RemoteSettings.cs +++ b/src/core/Akka.Remote/RemoteSettings.cs @@ -117,7 +117,9 @@ private Config TransportConfigFor(string transportName) public Props ConfigureDispatcher(Props props) { - return String.IsNullOrEmpty(Dispatcher) ? props : props.WithDispatcher(Dispatcher); + return String.IsNullOrEmpty(Dispatcher) + ? props + : props.WithDispatcher(Dispatcher); } public class TransportSettings diff --git a/src/core/Akka.Tests/Dispatch/MailboxesSpec.cs b/src/core/Akka.Tests/Dispatch/MailboxesSpec.cs index 0cddf909080..493c3e389c7 100644 --- a/src/core/Akka.Tests/Dispatch/MailboxesSpec.cs +++ b/src/core/Akka.Tests/Dispatch/MailboxesSpec.cs @@ -104,6 +104,7 @@ public void PriorityMailboxKeepsOrderingWithManyPriorityValues() //pause mailbox until all messages have been told actor.Tell(Suspend.Instance); + AwaitCondition(()=> ((LocalActorRef)actor).Cell.Mailbox.IsSuspended); // creates 50 messages with values spanning from Int32.MinValue to Int32.MaxValue var values = new int[50]; var increment = (int)(UInt32.MaxValue / values.Length); diff --git a/src/core/Akka/Actor/Scheduler/TaskBasedScheduler.cs b/src/core/Akka/Actor/Scheduler/TaskBasedScheduler.cs index ce9276af0c4..612ac9a9e6a 100644 --- a/src/core/Akka/Actor/Scheduler/TaskBasedScheduler.cs +++ b/src/core/Akka/Actor/Scheduler/TaskBasedScheduler.cs @@ -50,14 +50,14 @@ private void InternalScheduleOnce(TimeSpan initialDelay, Action action, Cancella { Task.Delay(initialDelay, token).ContinueWith(t => { - if(token.IsCancellationRequested) return; + if (token.IsCancellationRequested) return; token.ThrowIfCancellationRequested(); try { action(); } - catch(OperationCanceledException) { } + catch (OperationCanceledException) { } //TODO: Should we log other exceptions? /@hcanber }, token, TaskContinuationOptions.OnlyOnRanToCompletion, TaskScheduler.Current); @@ -69,15 +69,15 @@ private void InternalScheduleRepeatedly(TimeSpan initialDelay, TimeSpan interval Action executeAction = null; executeAction = t => { - if(token.IsCancellationRequested) return; + if (token.IsCancellationRequested) return; try { action(); } - catch(OperationCanceledException) { } + catch (OperationCanceledException) { } //TODO: Should we log other exceptions? /@hcanber - if(token.IsCancellationRequested) return; + if (token.IsCancellationRequested) return; Task.Delay(interval, token) .ContinueWith(executeAction, token, TaskContinuationOptions.OnlyOnRanToCompletion, TaskScheduler.Current); diff --git a/src/core/Akka/Configuration/Pigeon.conf b/src/core/Akka/Configuration/Pigeon.conf index 11b0b239dee..ee30ea77c81 100644 --- a/src/core/Akka/Configuration/Pigeon.conf +++ b/src/core/Akka/Configuration/Pigeon.conf @@ -94,34 +94,34 @@ akka { unstarted-push-timeout = 10 - # THIS DOES NOT APPLY TO .NET - # + # THIS DOES NOT APPLY TO .NET + # typed { # Default timeout for typed actor methods with non-void return type timeout = 5 } - inbox { - inbox-size = 1000, - default-timeout = 5s - } + inbox { + inbox-size = 1000, + default-timeout = 5s + } # Mapping between ´deployment.router' short names to fully qualified class names router.type-mapping { - from-code = "Akka.Routing.NoRouter" - round-robin-pool = "Akka.Routing.RoundRobinPool" - round-robin-group = "Akka.Routing.RoundRobinGroup" - random-pool = "Akka.Routing.RandomPool" - random-group = "Akka.Routing.RandomGroup" - balancing-pool = "Akka.Routing.BalancingPool" - smallest-mailbox-pool = "Akka.Routing.SmallestMailboxPool" - broadcast-pool = "Akka.Routing.BroadcastPool" - broadcast-group = "Akka.Routing.BroadcastGroup" - scatter-gather-pool = "Akka.Routing.ScatterGatherFirstCompletedPool" - scatter-gather-group = "Akka.Routing.ScatterGatherFirstCompletedGroup" - consistent-hashing-pool = "Akka.Routing.ConsistentHashingPool" - consistent-hashing-group = "Akka.Routing.ConsistentHashingGroup" - } + from-code = "Akka.Routing.NoRouter" + round-robin-pool = "Akka.Routing.RoundRobinPool" + round-robin-group = "Akka.Routing.RoundRobinGroup" + random-pool = "Akka.Routing.RandomPool" + random-group = "Akka.Routing.RandomGroup" + balancing-pool = "Akka.Routing.BalancingPool" + smallest-mailbox-pool = "Akka.Routing.SmallestMailboxPool" + broadcast-pool = "Akka.Routing.BroadcastPool" + broadcast-group = "Akka.Routing.BroadcastGroup" + scatter-gather-pool = "Akka.Routing.ScatterGatherFirstCompletedPool" + scatter-gather-group = "Akka.Routing.ScatterGatherFirstCompletedGroup" + consistent-hashing-pool = "Akka.Routing.ConsistentHashingPool" + consistent-hashing-group = "Akka.Routing.ConsistentHashingGroup" + } deployment { @@ -238,26 +238,26 @@ akka { } } - #used for GUI applications - synchronized-dispatcher { - type = "SynchronizedDispatcher" - throughput = 10 - } + #used for GUI applications + synchronized-dispatcher { + type = "SynchronizedDispatcher" + throughput = 10 + } - task-dispatcher { - type = "Akka.Dispatch.TaskDispatcherConfigurator" - throughput = 30 - } + task-dispatcher { + type = "Akka.Dispatch.TaskDispatcherConfigurator" + throughput = 30 + } - default-fork-join-dispatcher{ - type = ForkJoinDispatcher - throughput = 30 - dedicated-thread-pool{ #settings for Helios.DedicatedThreadPool - thread-count = 3 #number of threads - #deadlock-timeout = 3s #optional timeout for deadlock detection - threadtype = background #values can be "background" or "foreground" - } - } + default-fork-join-dispatcher{ + type = ForkJoinDispatcher + throughput = 30 + dedicated-thread-pool{ #settings for Helios.DedicatedThreadPool + thread-count = 3 #number of threads + #deadlock-timeout = 3s #optional timeout for deadlock detection + threadtype = background #values can be "background" or "foreground" + } + } default-dispatcher { # Must be one of the following @@ -267,7 +267,7 @@ akka { # akka.dispatch.DispatcherPrerequisites parameters. # PinnedDispatcher must be used together with executor=thread-pool-executor. type = "Dispatcher" - + # How long time the dispatcher will wait for new actors until it shuts down shutdown-timeout = 1 @@ -386,11 +386,11 @@ akka { # Entries for pluggable serializers and their bindings. - serializers { - json = "Akka.Serialization.NewtonSoftJsonSerializer" - java = "Akka.Serialization.JavaSerializer" # not used, reserves java serializer identifier - bytes = "Akka.Serialization.ByteArraySerializer" - } + serializers { + json = "Akka.Serialization.NewtonSoftJsonSerializer" + java = "Akka.Serialization.JavaSerializer" # not used, reserves java serializer identifier + bytes = "Akka.Serialization.ByteArraySerializer" + } # Class to Serializer binding. You only need to specify the name of an # interface or abstract base class of the messages. In case of ambiguity it diff --git a/src/core/Akka/Dispatch/ForkJoinDispatcher.cs b/src/core/Akka/Dispatch/ForkJoinDispatcher.cs index f31c26e8110..ec042df57df 100644 --- a/src/core/Akka/Dispatch/ForkJoinDispatcher.cs +++ b/src/core/Akka/Dispatch/ForkJoinDispatcher.cs @@ -26,7 +26,8 @@ public ForkJoinDispatcherConfigurator(Config config, IDispatcherPrerequisites pr if (dtp == null || dtp.IsEmpty) throw new ConfigurationException(string.Format("must define section dedicated-thread-pool for ForkJoinDispatcher {0}", config.GetString("id", "unknown"))); var settings = new DedicatedThreadPoolSettings(dtp.GetInt("thread-count"), - DedicatedThreadPoolConfigHelpers.ConfigureThreadType(dtp.GetString("threadtype", ThreadType.Background.ToString())), + DedicatedThreadPoolConfigHelpers.ConfigureThreadType(dtp.GetString("threadtype", ThreadType.Background.ToString())), + config.GetString("id"), DedicatedThreadPoolConfigHelpers.GetSafeDeadlockTimeout(dtp)); _instance = new ForkJoinDispatcher(this, settings); } @@ -48,12 +49,12 @@ public override MessageDispatcher Dispatcher() /// /// my-forkjoin-dispatcher{ /// type = ForkJoinDispatcher - /// throughput = 100 - /// dedicated-thread-pool{ #settings for Helios.DedicatedThreadPool - /// thread-count = 3 #number of threads - /// #deadlock-timeout = 3s #optional timeout for deadlock detection - /// threadtype = background #values can be "background" or "foreground" - /// } + /// throughput = 100 + /// dedicated-thread-pool{ #settings for Helios.DedicatedThreadPool + /// thread-count = 3 #number of threads + /// #deadlock-timeout = 3s #optional timeout for deadlock detection + /// threadtype = background #values can be "background" or "foreground" + /// } /// } /// /// diff --git a/src/core/Akka/Dispatch/SingleThreadDispatcher.cs b/src/core/Akka/Dispatch/SingleThreadDispatcher.cs index 9a207205e8b..a1cd0b78b78 100644 --- a/src/core/Akka/Dispatch/SingleThreadDispatcher.cs +++ b/src/core/Akka/Dispatch/SingleThreadDispatcher.cs @@ -36,6 +36,7 @@ public PinnedDispatcherConfigurator(Config config, IDispatcherPrerequisites prer { _settings = new DedicatedThreadPoolSettings(1, DedicatedThreadPoolConfigHelpers.ConfigureThreadType(dtp.GetString("threadtype", ThreadType.Background.ToString())), + config.GetString("id"), DedicatedThreadPoolConfigHelpers.GetSafeDeadlockTimeout(dtp)); } } diff --git a/src/core/Akka/Dispatch/ThreadPoolBuilder.cs b/src/core/Akka/Dispatch/ThreadPoolBuilder.cs index b23c84c747c..797acd66081 100644 --- a/src/core/Akka/Dispatch/ThreadPoolBuilder.cs +++ b/src/core/Akka/Dispatch/ThreadPoolBuilder.cs @@ -34,7 +34,7 @@ internal static ThreadType ConfigureThreadType(string threadType) /// /// Default settings for instances. /// - internal static readonly DedicatedThreadPoolSettings DefaultSingleThreadPoolSettings = new DedicatedThreadPoolSettings(1); + internal static readonly DedicatedThreadPoolSettings DefaultSingleThreadPoolSettings = new DedicatedThreadPoolSettings(1, "DefaultSingleThreadPool"); } /// diff --git a/src/core/Akka/Helios.Concurrency.DedicatedThreadPool.cs b/src/core/Akka/Helios.Concurrency.DedicatedThreadPool.cs index c475314c215..fe414b2bc2d 100644 --- a/src/core/Akka/Helios.Concurrency.DedicatedThreadPool.cs +++ b/src/core/Akka/Helios.Concurrency.DedicatedThreadPool.cs @@ -31,10 +31,12 @@ internal class DedicatedThreadPoolSettings /// public const ThreadType DefaultThreadType = ThreadType.Background; - public DedicatedThreadPoolSettings(int numThreads, TimeSpan? deadlockTimeout = null) : this(numThreads, DefaultThreadType, deadlockTimeout) { } + public DedicatedThreadPoolSettings(int numThreads, string name, TimeSpan? deadlockTimeout = null) + : this(numThreads, DefaultThreadType, name, deadlockTimeout) { } - public DedicatedThreadPoolSettings(int numThreads, ThreadType threadType, TimeSpan? deadlockTimeout = null) + public DedicatedThreadPoolSettings(int numThreads, ThreadType threadType, string name, TimeSpan? deadlockTimeout = null) { + Name = name; ThreadType = threadType; NumThreads = numThreads; DeadlockTimeout = deadlockTimeout; @@ -61,6 +63,8 @@ public DedicatedThreadPoolSettings(int numThreads, ThreadType threadType, TimeSp /// and replaced. /// public TimeSpan? DeadlockTimeout { get; private set; } + + public string Name { get; private set; } } /// @@ -229,24 +233,20 @@ internal DedicatedThreadPoolSupervisor(DedicatedThreadPool pool) //bail in the event of a shutdown if (pool.ShutdownRequested) return; - foreach (var worker in pool.Workers) + for (var i = 0; i < pool.Workers.Length; i++) { - var w = worker; + var w = pool.Workers[i]; if (Interlocked.Exchange(ref w.Status, 0) == 0) { //this requests a new new worker and calls ForceTermination on the old worker //Potential problem here: if the thread is not dead for real, we might abort real work.. there is no way to tell the difference between //deadlocked or just very long running tasks - var newWorker = pool.RequestThread(w); + var newWorker = pool.RequestThread(w, i); continue; } //schedule heartbeat action to worker - worker.AddWork(() => - { - Interlocked.Increment(ref w.Status); - }); - + pool.Workers[i].AddWork(() => Interlocked.Increment(ref w.Status)); } }, null, pool.Settings.DeadlockTimeout.Value, pool.Settings.DeadlockTimeout.Value); } @@ -269,9 +269,9 @@ public DedicatedThreadPool(DedicatedThreadPoolSettings settings) Settings = settings; Workers = Enumerable.Repeat(0, settings.NumThreads).Select(_ => new WorkerQueue()).ToArray(); - foreach (var worker in Workers) + for (var i = 0; i < Workers.Length; i++) { - new PoolWorker(worker, this, false); + new PoolWorker(Workers[i], this, false, i); } _supervisor = new DedicatedThreadPoolSupervisor(this); } @@ -288,7 +288,7 @@ public DedicatedThreadPool(DedicatedThreadPoolSettings settings) /// /// index for round-robin load-balancing across worker threads /// - private volatile int _index = 0; + private volatile int _index; private readonly DedicatedThreadPoolSupervisor _supervisor; @@ -299,9 +299,9 @@ private void Shutdown() ShutdownRequested = true; } - private PoolWorker RequestThread(WorkerQueue unclaimedQueue, bool errorRecovery = false) + private PoolWorker RequestThread(WorkerQueue unclaimedQueue, int workerNumber, bool errorRecovery = false) { - var worker = new PoolWorker(unclaimedQueue, this, errorRecovery); + var worker = new PoolWorker(unclaimedQueue, this, errorRecovery, workerNumber); return worker; } @@ -390,19 +390,22 @@ internal class PoolWorker { private WorkerQueue _work; private DedicatedThreadPool _pool; + private readonly int _workerNumber; private BlockingCollection _workQueue; private readonly Thread _thread; - public PoolWorker(WorkerQueue work, DedicatedThreadPool pool, bool errorRecovery) + public PoolWorker(WorkerQueue work, DedicatedThreadPool pool, bool errorRecovery, int workerNumber) { _work = work; _pool = pool; + _workerNumber = workerNumber; _workQueue = _work.WorkQueue; _work.ReplacePoolWorker(this, errorRecovery); - + _thread = new Thread(() => { + Thread.CurrentThread.Name = string.Format("{0}_{1}", pool.Settings.Name, _workerNumber); CurrentWorker = this; foreach (var action in _workQueue.GetConsumingEnumerable()) @@ -430,7 +433,7 @@ public PoolWorker(WorkerQueue work, DedicatedThreadPool pool, bool errorRecovery private void Failover(bool errorRecovery = false) { /* request a new thread then shut down */ - _pool.RequestThread(_work, errorRecovery); + _pool.RequestThread(_work, _workerNumber, errorRecovery); CurrentWorker = null; _work = null; _workQueue = null; diff --git a/src/examples/Cluster/Samples.Cluster.Simple/App.config b/src/examples/Cluster/Samples.Cluster.Simple/App.config index ac5cd466feb..702c085578b 100644 --- a/src/examples/Cluster/Samples.Cluster.Simple/App.config +++ b/src/examples/Cluster/Samples.Cluster.Simple/App.config @@ -27,7 +27,7 @@ "akka.tcp://ClusterSystem@127.0.0.1:2551", "akka.tcp://ClusterSystem@127.0.0.1:2552"] - auto-down-unreachable-after = 10s + auto-down-unreachable-after = 30s } } ]]> diff --git a/src/examples/Cluster/Samples.Cluster.Simple/Program.cs b/src/examples/Cluster/Samples.Cluster.Simple/Program.cs index 112eb5aacf5..0b590ee7048 100644 --- a/src/examples/Cluster/Samples.Cluster.Simple/Program.cs +++ b/src/examples/Cluster/Samples.Cluster.Simple/Program.cs @@ -17,7 +17,7 @@ class Program { private static void Main(string[] args) { - StartUp(args.Length == 0 ? new String[] {"2551", "2552", "0"} : args); + StartUp(args.Length == 0 ? new String[] { "2551", "2552", "0" } : args); Console.WriteLine("Press any key to exit"); Console.ReadKey(); } @@ -36,7 +36,7 @@ public static void StartUp(string[] ports) var system = ActorSystem.Create("ClusterSystem", config); //create an actor that handles cluster domain events - system.ActorOf(Props.Create(typeof (SimpleClusterListener)), "clusterListener"); + system.ActorOf(Props.Create(typeof(SimpleClusterListener)), "clusterListener"); } } } From 224a578d46d7a3e94ddf2e8a2c17cb5cfe41a923 Mon Sep 17 00:00:00 2001 From: Bartosz Sypytkowski Date: Sun, 17 May 2015 14:12:32 +0300 Subject: [PATCH 38/66] renamed GuaranteedDelivery classes to AtLeastOnceDelivery --- src/core/Akka.Persistence.FSharp/FsApi.fs | 8 +- .../Akka.Persistence.Tests.csproj | 22 +- ...pec.cs => AtLeastOnceDeliveryCrashSpec.cs} | 12 +- ...c.cs => AtLeastOnceDeliveryFailureSpec.cs} | 18 +- ...verySpec.cs => AtLeastOnceDeliverySpec.cs} | 28 +- src/core/Akka.Persistence.Tests/CHANGES.txt | 99 ++++ .../licenses/license.txt | 31 + .../licenses/protoc-license.txt | 36 ++ .../Akka.Persistence.Tests/packages.config | 11 +- .../google/protobuf/csharp_options.proto | 115 ++++ .../protos/google/protobuf/descriptor.proto | 533 ++++++++++++++++++ .../protos/tutorial/addressbook.proto | 31 + .../Akka.Persistence/Akka.Persistence.csproj | 2 +- ...teedDelivery.cs => AtLeastOnceDelivery.cs} | 34 +- src/core/Akka.Persistence/Persistence.cs | 8 +- src/core/Akka.Persistence/README.md | 2 +- .../Serialization/MessageSerializer.cs | 16 +- ....cs => AtLeastOnceDeliveryExampleActor.cs} | 12 +- .../PersistenceExample.csproj | 2 +- src/examples/PersistenceExample/Program.cs | 8 +- 20 files changed, 945 insertions(+), 83 deletions(-) rename src/core/Akka.Persistence.Tests/{GuaranteedDeliveryCrashSpec.cs => AtLeastOnceDeliveryCrashSpec.cs} (90%) rename src/core/Akka.Persistence.Tests/{GuaranteedDeliveryFailureSpec.cs => AtLeastOnceDeliveryFailureSpec.cs} (94%) rename src/core/Akka.Persistence.Tests/{GuaranteedDeliverySpec.cs => AtLeastOnceDeliverySpec.cs} (94%) create mode 100644 src/core/Akka.Persistence.Tests/CHANGES.txt create mode 100644 src/core/Akka.Persistence.Tests/licenses/license.txt create mode 100644 src/core/Akka.Persistence.Tests/licenses/protoc-license.txt create mode 100644 src/core/Akka.Persistence.Tests/protos/google/protobuf/csharp_options.proto create mode 100644 src/core/Akka.Persistence.Tests/protos/google/protobuf/descriptor.proto create mode 100644 src/core/Akka.Persistence.Tests/protos/tutorial/addressbook.proto rename src/core/Akka.Persistence/{GuaranteedDelivery.cs => AtLeastOnceDelivery.cs} (90%) rename src/examples/PersistenceExample/{GuaranteedDeliveryExampleActor.cs => AtLeastOnceDeliveryExampleActor.cs} (89%) diff --git a/src/core/Akka.Persistence.FSharp/FsApi.fs b/src/core/Akka.Persistence.FSharp/FsApi.fs index f0d70de2204..082c194bb37 100644 --- a/src/core/Akka.Persistence.FSharp/FsApi.fs +++ b/src/core/Akka.Persistence.FSharp/FsApi.fs @@ -279,8 +279,8 @@ type Delivery<'Command, 'Event, 'State> = abstract Deliver: (int64 -> obj) -> ActorPath -> unit abstract ConfirmDelivery: int64 -> bool - abstract GetDeliverySnapshot: unit -> GuaranteedDeliverySnapshot - abstract SetDeliverySnapshot: GuaranteedDeliverySnapshot -> unit + abstract GetDeliverySnapshot: unit -> AtLeastOnceDeliverySnapshot + abstract SetDeliverySnapshot: AtLeastOnceDeliverySnapshot -> unit abstract UnconfirmedCount: unit -> int type DeliveryAggregate<'Command, 'Event, 'State> = { @@ -290,13 +290,13 @@ type DeliveryAggregate<'Command, 'Event, 'State> = { } type Deliverer<'Command, 'Event, 'State>(aggregate: DeliveryAggregate<'Command, 'Event, 'State>, name: PersistenceId) as this = - inherit GuaranteedDeliveryActor() + inherit AtLeastOnceDeliveryActor() let mutable deferables = [] let mutable state: 'State = aggregate.state let mailbox = let self' = this.Self - let context = GuaranteedDeliveryActor.Context :> IActorContext + let context = AtLeastOnceDeliveryActor.Context :> IActorContext let updateState (updater: 'Event -> 'State) e : unit = state <- updater e () diff --git a/src/core/Akka.Persistence.Tests/Akka.Persistence.Tests.csproj b/src/core/Akka.Persistence.Tests/Akka.Persistence.Tests.csproj index 2af424bb17b..03471c551af 100644 --- a/src/core/Akka.Persistence.Tests/Akka.Persistence.Tests.csproj +++ b/src/core/Akka.Persistence.Tests/Akka.Persistence.Tests.csproj @@ -34,6 +34,14 @@ 4 + + ..\..\packages\Google.ProtocolBuffers.2.4.1.521\lib\net40\Google.ProtocolBuffers.dll + True + + + ..\..\packages\Google.ProtocolBuffers.2.4.1.521\lib\net40\Google.ProtocolBuffers.Serialization.dll + True + @@ -51,9 +59,9 @@ - - - + + + @@ -91,6 +99,14 @@ + + + + + + + + diff --git a/src/core/Akka.Persistence.Tests/GuaranteedDeliveryCrashSpec.cs b/src/core/Akka.Persistence.Tests/AtLeastOnceDeliveryCrashSpec.cs similarity index 90% rename from src/core/Akka.Persistence.Tests/GuaranteedDeliveryCrashSpec.cs rename to src/core/Akka.Persistence.Tests/AtLeastOnceDeliveryCrashSpec.cs index 66134766aa4..8e7b7dd85fc 100644 --- a/src/core/Akka.Persistence.Tests/GuaranteedDeliveryCrashSpec.cs +++ b/src/core/Akka.Persistence.Tests/AtLeastOnceDeliveryCrashSpec.cs @@ -1,5 +1,5 @@ //----------------------------------------------------------------------- -// +// // Copyright (C) 2009-2015 Typesafe Inc. // Copyright (C) 2013-2015 Akka.NET project // @@ -13,7 +13,7 @@ namespace Akka.Persistence.Tests { - public class GuaranteedDeliveryCrashSpec : AkkaSpec + public class AtLeastOnceDeliveryCrashSpec : AkkaSpec { #region internal test classes @@ -67,7 +67,7 @@ public SendingMessage(long deliveryId, bool isRecovering) public bool IsRecovering { get; private set; } } - internal class CrashingActor : GuaranteedDeliveryActor + internal class CrashingActor : AtLeastOnceDeliveryActor { private readonly IActorRef _testProbe; private ILoggingAdapter _adapter; @@ -116,13 +116,13 @@ private void Send() #endregion - public GuaranteedDeliveryCrashSpec() - : base(PersistenceSpec.Configuration("inmem", "GuaranteedDeliveryCrashSpec", serialization: "off")) + public AtLeastOnceDeliveryCrashSpec() + : base(PersistenceSpec.Configuration("inmem", "AtLeastOnceDeliveryCrashSpec", serialization: "off")) { } [Fact(Skip = "FIXME")] - public void GuaranteedDelivery_should_not_send_when_actor_crashes() + public void AtLeastOnceDelivery_should_not_send_when_actor_crashes() { var testProbe = CreateTestProbe(); var supervisor = Sys.ActorOf(Props.Create(() => new StoppingStrategySupervisor(testProbe.Ref)), "supervisor"); diff --git a/src/core/Akka.Persistence.Tests/GuaranteedDeliveryFailureSpec.cs b/src/core/Akka.Persistence.Tests/AtLeastOnceDeliveryFailureSpec.cs similarity index 94% rename from src/core/Akka.Persistence.Tests/GuaranteedDeliveryFailureSpec.cs rename to src/core/Akka.Persistence.Tests/AtLeastOnceDeliveryFailureSpec.cs index f78cdebeda9..27fc3691273 100644 --- a/src/core/Akka.Persistence.Tests/GuaranteedDeliveryFailureSpec.cs +++ b/src/core/Akka.Persistence.Tests/AtLeastOnceDeliveryFailureSpec.cs @@ -1,5 +1,5 @@ //----------------------------------------------------------------------- -// +// // Copyright (C) 2009-2015 Typesafe Inc. // Copyright (C) 2013-2015 Akka.NET project // @@ -20,12 +20,12 @@ static class ChaosSupportExtensions { private static readonly Random random = new Random(); - internal static void Add(this GuaranteedDeliveryFailureSpec.IChaosSupport chaos, int i) + internal static void Add(this AtLeastOnceDeliveryFailureSpec.IChaosSupport chaos, int i) { chaos.State.Add(i); - if (chaos.State.Count >= GuaranteedDeliveryFailureSpec.NumberOfMessages) + if (chaos.State.Count >= AtLeastOnceDeliveryFailureSpec.NumberOfMessages) { - chaos.Probe.Tell(new GuaranteedDeliveryFailureSpec.Done(chaos.State.ToArray())); + chaos.Probe.Tell(new AtLeastOnceDeliveryFailureSpec.Done(chaos.State.ToArray())); } } @@ -35,7 +35,7 @@ internal static bool ShouldFail(double rate) } } - public class GuaranteedDeliveryFailureSpec : AkkaSpec + public class AtLeastOnceDeliveryFailureSpec : AkkaSpec { #region internal test classes @@ -135,7 +135,7 @@ internal interface IChaosSupport List State { get; set; } } - internal class ChaosSender : GuaranteedDeliveryActor + internal class ChaosSender : AtLeastOnceDeliveryActor { private readonly string _persistenceId; private readonly IActorRef _destination; @@ -293,7 +293,7 @@ public ChaosApp(IActorRef probe) Receive(_ => { - for (int i = 1; i < GuaranteedDeliveryFailureSpec.NumberOfMessages; i++) + for (int i = 1; i < AtLeastOnceDeliveryFailureSpec.NumberOfMessages; i++) { _sender.Tell(i); } @@ -320,13 +320,13 @@ public ChaosApp(IActorRef probe) internal const int NumberOfMessages = 10; - public GuaranteedDeliveryFailureSpec() + public AtLeastOnceDeliveryFailureSpec() : base(FailureSpecConfig.WithFallback(Persistence.DefaultConfig())) { } [Fact(Skip = "FIXME")] - public void GuaranteedDelivery_must_tolerate_and_recover_from_random_failures() + public void AtLeastOnceDelivery_must_tolerate_and_recover_from_random_failures() { var chaos = Sys.ActorOf(Props.Create(() => new ChaosApp(TestActor)), "chaosApp"); chaos.Tell(Start.Instance); diff --git a/src/core/Akka.Persistence.Tests/GuaranteedDeliverySpec.cs b/src/core/Akka.Persistence.Tests/AtLeastOnceDeliverySpec.cs similarity index 94% rename from src/core/Akka.Persistence.Tests/GuaranteedDeliverySpec.cs rename to src/core/Akka.Persistence.Tests/AtLeastOnceDeliverySpec.cs index 782bd2a77eb..bc3784c40b3 100644 --- a/src/core/Akka.Persistence.Tests/GuaranteedDeliverySpec.cs +++ b/src/core/Akka.Persistence.Tests/AtLeastOnceDeliverySpec.cs @@ -1,5 +1,5 @@ //----------------------------------------------------------------------- -// +// // Copyright (C) 2009-2015 Typesafe Inc. // Copyright (C) 2013-2015 Akka.NET project // @@ -15,12 +15,12 @@ namespace Akka.Persistence.Tests { - public class GuaranteedDeliverySpec : PersistenceSpec + public class AtLeastOnceDeliverySpec : PersistenceSpec { #region internal test classes - class Sender : GuaranteedDeliveryActor + class Sender : AtLeastOnceDeliveryActor { private readonly IActorRef _testActor; private readonly string _name; @@ -291,23 +291,23 @@ public bool Equals(ActionAck other) [Serializable] sealed class Snap { - public Snap(GuaranteedDeliverySnapshot deliverySnapshot) + public Snap(AtLeastOnceDeliverySnapshot deliverySnapshot) { DeliverySnapshot = deliverySnapshot; } - public GuaranteedDeliverySnapshot DeliverySnapshot { get; private set; } + public AtLeastOnceDeliverySnapshot DeliverySnapshot { get; private set; } } #endregion - public GuaranteedDeliverySpec() - : base(PersistenceSpec.Configuration("inmem", "GuaranteedDeliverySpec")) + public AtLeastOnceDeliverySpec() + : base(PersistenceSpec.Configuration("inmem", "AtLeastOnceDeliverySpec")) { } [Fact] - public void GuaranteedDelivery_must_deliver_messages_in_order_when_nothing_is_lost() + public void AtLeastOnceDelivery_must_deliver_messages_in_order_when_nothing_is_lost() { var probe = CreateTestProbe(); var destinations = new Dictionary { { "A", Sys.ActorOf(Props.Create(() => new Destination(probe.Ref))).Path } }; @@ -320,7 +320,7 @@ public void GuaranteedDelivery_must_deliver_messages_in_order_when_nothing_is_lo } [Fact] - public void GuaranteedDelivery_must_redeliver_lost_messages() + public void AtLeastOnceDelivery_must_redeliver_lost_messages() { var probe = CreateTestProbe(); var dest = Sys.ActorOf(Props.Create(() => new Destination(probe.Ref))); @@ -347,7 +347,7 @@ public void GuaranteedDelivery_must_redeliver_lost_messages() } [Fact] - public void GuaranteedDelivery_must_redeliver_lost_messages_after_restart() + public void AtLeastOnceDelivery_must_redeliver_lost_messages_after_restart() { var probe = CreateTestProbe(); var dest = Sys.ActorOf(Props.Create(() => new Destination(probe.Ref))); @@ -381,7 +381,7 @@ public void GuaranteedDelivery_must_redeliver_lost_messages_after_restart() } [Fact] - public void GuaranteedDelivery_must_resend_replayed_deliveries_with_an_initially_in_order_strategy_before_delivering_fresh_messages() + public void AtLeastOnceDelivery_must_resend_replayed_deliveries_with_an_initially_in_order_strategy_before_delivering_fresh_messages() { var probe = CreateTestProbe(); var dest = Sys.ActorOf(Props.Create(() => new Destination(probe.Ref))); @@ -420,7 +420,7 @@ public void GuaranteedDelivery_must_resend_replayed_deliveries_with_an_initially } [Fact] - public void GuaranteedDelivery_must_restore_state_from_snapshot() + public void AtLeastOnceDelivery_must_restore_state_from_snapshot() { var probe = CreateTestProbe(); var dest = Sys.ActorOf(Props.Create(() => new Destination(probe.Ref))); @@ -459,7 +459,7 @@ public void GuaranteedDelivery_must_restore_state_from_snapshot() } [Fact] - public void GuaranteedDelivery_must_warn_about_unconfirmed_messages() + public void AtLeastOnceDelivery_must_warn_about_unconfirmed_messages() { var probeA = CreateTestProbe(); var probeB = CreateTestProbe(); @@ -487,7 +487,7 @@ public void GuaranteedDelivery_must_warn_about_unconfirmed_messages() } [Fact(Skip = "FIXME")] - public void GuaranteedDelivery_must_redeliver_many_lost_messages() + public void AtLeastOnceDelivery_must_redeliver_many_lost_messages() { var probeA = CreateTestProbe(); var probeB = CreateTestProbe(); diff --git a/src/core/Akka.Persistence.Tests/CHANGES.txt b/src/core/Akka.Persistence.Tests/CHANGES.txt new file mode 100644 index 00000000000..d80368c78f2 --- /dev/null +++ b/src/core/Akka.Persistence.Tests/CHANGES.txt @@ -0,0 +1,99 @@ +=============================================================================== +Welcome to the C# port of Google Protocol Buffers, written by Jon Skeet +(skeet@pobox.com) based on the work of many talented people. + +For more information about this port, visit its homepage: +http://protobuf-csharp-port.googlecode.com + +For more information about Protocol Buffers in general, visit the project page +for the C++, Java and Python project: +http://protobuf.googlecode.com +=============================================================================== +RELEASE NOTES - Version 2.4.1.473 +=============================================================================== + +Features: +- Added option service_generator_type to control service generation with + NONE, GENERIC, INTERFACE, or IRPCDISPATCH +- Added interfaces IRpcDispatch and IRpcServerStub to provide for blocking + services and implementations. +- Added ProtoGen.exe command-line argument "--protoc_dir=" to specify the + location of protoc.exe. +- Extracted interfaces for ICodedInputStream and ICodedOutputStream to allow + custom implementation of writers with both speed and size optimizations. +- Addition of the "Google.ProtoBuffers.Serialization" assembly to support + reading and writing messages to/from XML, JSON, IDictionary<,> and others. +- Several performance related fixes and tweeks +- Issue 3: Add option to mark generated code with attribute +- Issue 20: Support for decorating classes [Serializable] +- Issue 21: Decorate fields with [deprecated=true] as [System.Obsolete] +- Issue 22: Reusable Builder classes +- Issue 24: Support for using Json/Xml formats with ICodedInputStream +- Issue 25: Added support for NuGet packages +- Issue 31: Upgraded protoc.exe and descriptor to 2.4.1 + +Fixes: +- Issue 13: Message with Field same name as message causes uncompilable .cs +- Issue 16: Does not integrate well with other tooling +- Issue 19: Support for negative enum values +- Issue 26: AddRange in GeneratedBuilder iterates twice. +- Issue 27: Remove XML documentation output from test projects to clear + warnings/errors. +- Issue 28: Circular message dependencies result in null default values for + Message fields. +- Issue 29: Message classes generated have a public default constructor. You + can disable private ctor generation with the option generate_private_ctor. +- Issue 35: Fixed a bug in ProtoGen handling of arguments with trailing \ +- Big-endian support for float, and double on Silverlight +- Packed and Unpacked parsing allow for all repeated, as per version 2.3 +- Fix for leaving Builder a public ctor on internal classes for use with + generic "where T: new()" constraints. + +Other: +- Changed the code signing key to a privately held key +- Reformatted all code and line-endings to C# defaults +- Reworking of performance benchmarks to produce reliable results, option /v2 +- Issue 34: Silverlight assemblies are now unit tested + +=============================================================================== +RELEASE NOTES - Version 2.3.0.277 +=============================================================================== + +Features: +- Added cls_compliance option to generate attributes indicating + non-CLS-compliance. +- Added file_extension option to control the generated output file's extension. +- Added umbrella_namespace option to place the umbrella class into a nested + namespace to address issues with proto files having the same name as a + message it contains. +- Added output_directory option to set the output path for the source file(s). +- Added ignore_google_protobuf option to avoid generating code for includes + from the google.protobuf package. +- Added the LITE framework (Google.ProtoBuffersLite.dll) and the ability to + generate code with "option optimize_for = LITE_RUNTIME;". +- Added ability to invoke protoc.exe from within ProtoGen.exe. +- Upgraded to protoc.exe (2.3) compiler. + +Fixes: +- Issue 9: Class cannot be static and sealed error +- Issue 12: default value for enumerate fields must be filled out + +Other: +- Rewrite of build using MSBbuild instead of NAnt +- Moved to NUnit Version 2.2.8.0 +- Changed to using secure .snk for releases + +=============================================================================== +RELEASE NOTES - Version 0.9.1 +=============================================================================== + +Fixes: +- issue 10: Incorrect encoding of packed fields when serialized + +=============================================================================== +RELEASE NOTES - Version 0.9.0 +=============================================================================== + +- Initial release + +=============================================================================== \ No newline at end of file diff --git a/src/core/Akka.Persistence.Tests/licenses/license.txt b/src/core/Akka.Persistence.Tests/licenses/license.txt new file mode 100644 index 00000000000..b8e773b2e05 --- /dev/null +++ b/src/core/Akka.Persistence.Tests/licenses/license.txt @@ -0,0 +1,31 @@ +Protocol Buffers - Google's data interchange format +Copyright 2008-2010 Google Inc. All rights reserved. +http://github.com/jskeet/dotnet-protobufs/ +Original C++/Java/Python code: +http://code.google.com/p/protobuf/ + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/src/core/Akka.Persistence.Tests/licenses/protoc-license.txt b/src/core/Akka.Persistence.Tests/licenses/protoc-license.txt new file mode 100644 index 00000000000..c779cb0e1ed --- /dev/null +++ b/src/core/Akka.Persistence.Tests/licenses/protoc-license.txt @@ -0,0 +1,36 @@ +protoc.exe was built from the original source at http://code.google.com/p/protobuf/ +The licence for this code is as follows: + +Copyright 2008, Google Inc. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +Code generated by the Protocol Buffer compiler is owned by the owner +of the input file used when generating it. This code is not +standalone and requires a support library to be linked with it. This +support library is itself covered by the above license. \ No newline at end of file diff --git a/src/core/Akka.Persistence.Tests/packages.config b/src/core/Akka.Persistence.Tests/packages.config index 3109db107fb..769769641c3 100644 --- a/src/core/Akka.Persistence.Tests/packages.config +++ b/src/core/Akka.Persistence.Tests/packages.config @@ -1,8 +1,9 @@  - - - - - + + + + + + \ No newline at end of file diff --git a/src/core/Akka.Persistence.Tests/protos/google/protobuf/csharp_options.proto b/src/core/Akka.Persistence.Tests/protos/google/protobuf/csharp_options.proto new file mode 100644 index 00000000000..152df766f03 --- /dev/null +++ b/src/core/Akka.Persistence.Tests/protos/google/protobuf/csharp_options.proto @@ -0,0 +1,115 @@ +// Extra options for C# generator + +import "google/protobuf/descriptor.proto"; + +package google.protobuf; + +message CSharpFileOptions { + + // Namespace for generated classes; defaults to the package. + optional string namespace = 1; + + // Name of the "umbrella" class used for metadata about all + // the messages within this file. Default is based on the name + // of the file. + optional string umbrella_classname = 2; + + // Whether classes should be public (true) or internal (false) + optional bool public_classes = 3 [default = true]; + + // Whether to generate a single file for everything within the + // .proto file (false), or one file per message (true). + // This option is not currently honored; please log a feature + // request if you really want it. + optional bool multiple_files = 4; + + // Whether to nest messages within a single umbrella class (true) + // or create the umbrella class as a peer, with messages as + // top-level classes in the namespace (false) + optional bool nest_classes = 5; + + // Generate appropriate support for Code Contracts + // (Ongoing; support should improve over time) + optional bool code_contracts = 6; + + // Create subdirectories for namespaces, e.g. namespace "Foo.Bar" + // would generate files within [output directory]/Foo/Bar + optional bool expand_namespace_directories = 7; + + // Generate attributes indicating non-CLS-compliance + optional bool cls_compliance = 8 [default = true]; + + // Generate messages/builders with the [Serializable] attribute + optional bool add_serializable = 9 [default = false]; + + // Generates a private ctor for Message types + optional bool generate_private_ctor = 10 [default = true]; + + // The extension that should be appended to the umbrella_classname when creating files. + optional string file_extension = 221 [default = ".cs"]; + + // A nested namespace for the umbrella class. Helpful for name collisions caused by + // umbrella_classname conflicting with an existing type. This will be automatically + // set to 'Proto' if a collision is detected with types being generated. This value + // is ignored when nest_classes == true + optional string umbrella_namespace = 222; + + // The output path for the source file(s) generated + optional string output_directory = 223 [default = "."]; + + // Will ignore the type generations and remove dependencies for the descriptor proto + // files that declare their package to be "google.protobuf" + optional bool ignore_google_protobuf = 224 [default = false]; + + // Controls how services are generated, GENERIC is the deprecated original implementation + // INTERFACE generates service interfaces only, RPCINTEROP generates interfaces and + // implementations using the included Windows RPC interop libarary. + optional CSharpServiceType service_generator_type = 225 [default = NONE]; + + // Used to add the System.Runtime.CompilerServices.CompilerGeneratedAttribute and + // System.CodeDom.Compiler.GeneratedCodeAttribute attributes to generated code. + optional bool generated_code_attributes = 226 [default = false]; +} + +enum CSharpServiceType { + // Services are ignored by the generator + NONE = 0; + // Generates the original Java generic service implementations + GENERIC = 1; + // Generates an interface for the service and nothing else + INTERFACE = 2; + // Generates an interface for the service and client/server wrappers for the interface + IRPCDISPATCH = 3; +} + +extend FileOptions { + optional CSharpFileOptions csharp_file_options = 1000; +} + +extend FieldOptions { + optional CSharpFieldOptions csharp_field_options = 1000; +} + +message CSharpFieldOptions { + // Provides the ability to override the name of the property + // generated for this field. This is applied to all properties + // and methods to do with this field, including HasFoo, FooCount, + // FooList etc. + optional string property_name = 1; +} + +message CSharpServiceOptions { + optional string interface_id = 1; +} + +extend ServiceOptions { + optional CSharpServiceOptions csharp_service_options = 1000; +} + +message CSharpMethodOptions { + optional int32 dispatch_id = 1; +} + +extend MethodOptions { + optional CSharpMethodOptions csharp_method_options = 1000; +} \ No newline at end of file diff --git a/src/core/Akka.Persistence.Tests/protos/google/protobuf/descriptor.proto b/src/core/Akka.Persistence.Tests/protos/google/protobuf/descriptor.proto new file mode 100644 index 00000000000..233f879410e --- /dev/null +++ b/src/core/Akka.Persistence.Tests/protos/google/protobuf/descriptor.proto @@ -0,0 +1,533 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// http://code.google.com/p/protobuf/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// The messages in this file describe the definitions found in .proto files. +// A valid .proto file can be translated directly to a FileDescriptorProto +// without any other information (e.g. without reading its imports). + + + +package google.protobuf; +option java_package = "com.google.protobuf"; +option java_outer_classname = "DescriptorProtos"; + +// descriptor.proto must be optimized for speed because reflection-based +// algorithms don't work during bootstrapping. +option optimize_for = SPEED; + +// The protocol compiler can output a FileDescriptorSet containing the .proto +// files it parses. +message FileDescriptorSet { + repeated FileDescriptorProto file = 1; +} + +// Describes a complete .proto file. +message FileDescriptorProto { + optional string name = 1; // file name, relative to root of source tree + optional string package = 2; // e.g. "foo", "foo.bar", etc. + + // Names of files imported by this file. + repeated string dependency = 3; + + // All top-level definitions in this file. + repeated DescriptorProto message_type = 4; + repeated EnumDescriptorProto enum_type = 5; + repeated ServiceDescriptorProto service = 6; + repeated FieldDescriptorProto extension = 7; + + optional FileOptions options = 8; + + // This field contains optional information about the original source code. + // You may safely remove this entire field whithout harming runtime + // functionality of the descriptors -- the information is needed only by + // development tools. + optional SourceCodeInfo source_code_info = 9; +} + +// Describes a message type. +message DescriptorProto { + optional string name = 1; + + repeated FieldDescriptorProto field = 2; + repeated FieldDescriptorProto extension = 6; + + repeated DescriptorProto nested_type = 3; + repeated EnumDescriptorProto enum_type = 4; + + message ExtensionRange { + optional int32 start = 1; + optional int32 end = 2; + } + repeated ExtensionRange extension_range = 5; + + optional MessageOptions options = 7; +} + +// Describes a field within a message. +message FieldDescriptorProto { + enum Type { + // 0 is reserved for errors. + // Order is weird for historical reasons. + TYPE_DOUBLE = 1; + TYPE_FLOAT = 2; + TYPE_INT64 = 3; // Not ZigZag encoded. Negative numbers + // take 10 bytes. Use TYPE_SINT64 if negative + // values are likely. + TYPE_UINT64 = 4; + TYPE_INT32 = 5; // Not ZigZag encoded. Negative numbers + // take 10 bytes. Use TYPE_SINT32 if negative + // values are likely. + TYPE_FIXED64 = 6; + TYPE_FIXED32 = 7; + TYPE_BOOL = 8; + TYPE_STRING = 9; + TYPE_GROUP = 10; // Tag-delimited aggregate. + TYPE_MESSAGE = 11; // Length-delimited aggregate. + + // New in version 2. + TYPE_BYTES = 12; + TYPE_UINT32 = 13; + TYPE_ENUM = 14; + TYPE_SFIXED32 = 15; + TYPE_SFIXED64 = 16; + TYPE_SINT32 = 17; // Uses ZigZag encoding. + TYPE_SINT64 = 18; // Uses ZigZag encoding. + }; + + enum Label { + // 0 is reserved for errors + LABEL_OPTIONAL = 1; + LABEL_REQUIRED = 2; + LABEL_REPEATED = 3; + // TODO(sanjay): Should we add LABEL_MAP? + }; + + optional string name = 1; + optional int32 number = 3; + optional Label label = 4; + + // If type_name is set, this need not be set. If both this and type_name + // are set, this must be either TYPE_ENUM or TYPE_MESSAGE. + optional Type type = 5; + + // For message and enum types, this is the name of the type. If the name + // starts with a '.', it is fully-qualified. Otherwise, C++-like scoping + // rules are used to find the type (i.e. first the nested types within this + // message are searched, then within the parent, on up to the root + // namespace). + optional string type_name = 6; + + // For extensions, this is the name of the type being extended. It is + // resolved in the same manner as type_name. + optional string extendee = 2; + + // For numeric types, contains the original text representation of the value. + // For booleans, "true" or "false". + // For strings, contains the default text contents (not escaped in any way). + // For bytes, contains the C escaped value. All bytes >= 128 are escaped. + // TODO(kenton): Base-64 encode? + optional string default_value = 7; + + optional FieldOptions options = 8; +} + +// Describes an enum type. +message EnumDescriptorProto { + optional string name = 1; + + repeated EnumValueDescriptorProto value = 2; + + optional EnumOptions options = 3; +} + +// Describes a value within an enum. +message EnumValueDescriptorProto { + optional string name = 1; + optional int32 number = 2; + + optional EnumValueOptions options = 3; +} + +// Describes a service. +message ServiceDescriptorProto { + optional string name = 1; + repeated MethodDescriptorProto method = 2; + + optional ServiceOptions options = 3; +} + +// Describes a method of a service. +message MethodDescriptorProto { + optional string name = 1; + + // Input and output type names. These are resolved in the same way as + // FieldDescriptorProto.type_name, but must refer to a message type. + optional string input_type = 2; + optional string output_type = 3; + + optional MethodOptions options = 4; +} + +// =================================================================== +// Options + +// Each of the definitions above may have "options" attached. These are +// just annotations which may cause code to be generated slightly differently +// or may contain hints for code that manipulates protocol messages. +// +// Clients may define custom options as extensions of the *Options messages. +// These extensions may not yet be known at parsing time, so the parser cannot +// store the values in them. Instead it stores them in a field in the *Options +// message called uninterpreted_option. This field must have the same name +// across all *Options messages. We then use this field to populate the +// extensions when we build a descriptor, at which point all protos have been +// parsed and so all extensions are known. +// +// Extension numbers for custom options may be chosen as follows: +// * For options which will only be used within a single application or +// organization, or for experimental options, use field numbers 50000 +// through 99999. It is up to you to ensure that you do not use the +// same number for multiple options. +// * For options which will be published and used publicly by multiple +// independent entities, e-mail kenton@google.com to reserve extension +// numbers. Simply tell me how many you need and I'll send you back a +// set of numbers to use -- there's no need to explain how you intend to +// use them. If this turns out to be popular, a web service will be set up +// to automatically assign option numbers. + + +message FileOptions { + + // Sets the Java package where classes generated from this .proto will be + // placed. By default, the proto package is used, but this is often + // inappropriate because proto packages do not normally start with backwards + // domain names. + optional string java_package = 1; + + + // If set, all the classes from the .proto file are wrapped in a single + // outer class with the given name. This applies to both Proto1 + // (equivalent to the old "--one_java_file" option) and Proto2 (where + // a .proto always translates to a single class, but you may want to + // explicitly choose the class name). + optional string java_outer_classname = 8; + + // If set true, then the Java code generator will generate a separate .java + // file for each top-level message, enum, and service defined in the .proto + // file. Thus, these types will *not* be nested inside the outer class + // named by java_outer_classname. However, the outer class will still be + // generated to contain the file's getDescriptor() method as well as any + // top-level extensions defined in the file. + optional bool java_multiple_files = 10 [default=false]; + + // If set true, then the Java code generator will generate equals() and + // hashCode() methods for all messages defined in the .proto file. This is + // purely a speed optimization, as the AbstractMessage base class includes + // reflection-based implementations of these methods. + optional bool java_generate_equals_and_hash = 20 [default=false]; + + // Generated classes can be optimized for speed or code size. + enum OptimizeMode { + SPEED = 1; // Generate complete code for parsing, serialization, + // etc. + CODE_SIZE = 2; // Use ReflectionOps to implement these methods. + LITE_RUNTIME = 3; // Generate code using MessageLite and the lite runtime. + } + optional OptimizeMode optimize_for = 9 [default=SPEED]; + + + + + // Should generic services be generated in each language? "Generic" services + // are not specific to any particular RPC system. They are generated by the + // main code generators in each language (without additional plugins). + // Generic services were the only kind of service generation supported by + // early versions of proto2. + // + // Generic services are now considered deprecated in favor of using plugins + // that generate code specific to your particular RPC system. Therefore, + // these default to false. Old code which depends on generic services should + // explicitly set them to true. + optional bool cc_generic_services = 16 [default=false]; + optional bool java_generic_services = 17 [default=false]; + optional bool py_generic_services = 18 [default=false]; + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message MessageOptions { + // Set true to use the old proto1 MessageSet wire format for extensions. + // This is provided for backwards-compatibility with the MessageSet wire + // format. You should not use this for any other reason: It's less + // efficient, has fewer features, and is more complicated. + // + // The message must be defined exactly as follows: + // message Foo { + // option message_set_wire_format = true; + // extensions 4 to max; + // } + // Note that the message cannot have any defined fields; MessageSets only + // have extensions. + // + // All extensions of your type must be singular messages; e.g. they cannot + // be int32s, enums, or repeated messages. + // + // Because this is an option, the above two restrictions are not enforced by + // the protocol compiler. + optional bool message_set_wire_format = 1 [default=false]; + + // Disables the generation of the standard "descriptor()" accessor, which can + // conflict with a field of the same name. This is meant to make migration + // from proto1 easier; new code should avoid fields named "descriptor". + optional bool no_standard_descriptor_accessor = 2 [default=false]; + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message FieldOptions { + // The ctype option instructs the C++ code generator to use a different + // representation of the field than it normally would. See the specific + // options below. This option is not yet implemented in the open source + // release -- sorry, we'll try to include it in a future version! + optional CType ctype = 1 [default = STRING]; + enum CType { + // Default mode. + STRING = 0; + + CORD = 1; + + STRING_PIECE = 2; + } + // The packed option can be enabled for repeated primitive fields to enable + // a more efficient representation on the wire. Rather than repeatedly + // writing the tag and type for each element, the entire array is encoded as + // a single length-delimited blob. + optional bool packed = 2; + + + // Is this field deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for accessors, or it will be completely ignored; in the very least, this + // is a formalization for deprecating fields. + optional bool deprecated = 3 [default=false]; + + // EXPERIMENTAL. DO NOT USE. + // For "map" fields, the name of the field in the enclosed type that + // is the key for this map. For example, suppose we have: + // message Item { + // required string name = 1; + // required string value = 2; + // } + // message Config { + // repeated Item items = 1 [experimental_map_key="name"]; + // } + // In this situation, the map key for Item will be set to "name". + // TODO: Fully-implement this, then remove the "experimental_" prefix. + optional string experimental_map_key = 9; + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message EnumOptions { + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message EnumValueOptions { + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message ServiceOptions { + + // Note: Field numbers 1 through 32 are reserved for Google's internal RPC + // framework. We apologize for hoarding these numbers to ourselves, but + // we were already using them long before we decided to release Protocol + // Buffers. + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message MethodOptions { + + // Note: Field numbers 1 through 32 are reserved for Google's internal RPC + // framework. We apologize for hoarding these numbers to ourselves, but + // we were already using them long before we decided to release Protocol + // Buffers. + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +// A message representing a option the parser does not recognize. This only +// appears in options protos created by the compiler::Parser class. +// DescriptorPool resolves these when building Descriptor objects. Therefore, +// options protos in descriptor objects (e.g. returned by Descriptor::options(), +// or produced by Descriptor::CopyTo()) will never have UninterpretedOptions +// in them. +message UninterpretedOption { + // The name of the uninterpreted option. Each string represents a segment in + // a dot-separated name. is_extension is true iff a segment represents an + // extension (denoted with parentheses in options specs in .proto files). + // E.g.,{ ["foo", false], ["bar.baz", true], ["qux", false] } represents + // "foo.(bar.baz).qux". + message NamePart { + required string name_part = 1; + required bool is_extension = 2; + } + repeated NamePart name = 2; + + // The value of the uninterpreted option, in whatever type the tokenizer + // identified it as during parsing. Exactly one of these should be set. + optional string identifier_value = 3; + optional uint64 positive_int_value = 4; + optional int64 negative_int_value = 5; + optional double double_value = 6; + optional bytes string_value = 7; + optional string aggregate_value = 8; +} + +// =================================================================== +// Optional source code info + +// Encapsulates information about the original source file from which a +// FileDescriptorProto was generated. +message SourceCodeInfo { + // A Location identifies a piece of source code in a .proto file which + // corresponds to a particular definition. This information is intended + // to be useful to IDEs, code indexers, documentation generators, and similar + // tools. + // + // For example, say we have a file like: + // message Foo { + // optional string foo = 1; + // } + // Let's look at just the field definition: + // optional string foo = 1; + // ^ ^^ ^^ ^ ^^^ + // a bc de f ghi + // We have the following locations: + // span path represents + // [a,i) [ 4, 0, 2, 0 ] The whole field definition. + // [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). + // [c,d) [ 4, 0, 2, 0, 5 ] The type (string). + // [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). + // [g,h) [ 4, 0, 2, 0, 3 ] The number (1). + // + // Notes: + // - A location may refer to a repeated field itself (i.e. not to any + // particular index within it). This is used whenever a set of elements are + // logically enclosed in a single code segment. For example, an entire + // extend block (possibly containing multiple extension definitions) will + // have an outer location whose path refers to the "extensions" repeated + // field without an index. + // - Multiple locations may have the same path. This happens when a single + // logical declaration is spread out across multiple places. The most + // obvious example is the "extend" block again -- there may be multiple + // extend blocks in the same scope, each of which will have the same path. + // - A location's span is not always a subset of its parent's span. For + // example, the "extendee" of an extension declaration appears at the + // beginning of the "extend" block and is shared by all extensions within + // the block. + // - Just because a location's span is a subset of some other location's span + // does not mean that it is a descendent. For example, a "group" defines + // both a type and a field in a single declaration. Thus, the locations + // corresponding to the type and field and their components will overlap. + // - Code which tries to interpret locations should probably be designed to + // ignore those that it doesn't understand, as more types of locations could + // be recorded in the future. + repeated Location location = 1; + message Location { + // Identifies which part of the FileDescriptorProto was defined at this + // location. + // + // Each element is a field number or an index. They form a path from + // the root FileDescriptorProto to the place where the definition. For + // example, this path: + // [ 4, 3, 2, 7, 1 ] + // refers to: + // file.message_type(3) // 4, 3 + // .field(7) // 2, 7 + // .name() // 1 + // This is because FileDescriptorProto.message_type has field number 4: + // repeated DescriptorProto message_type = 4; + // and DescriptorProto.field has field number 2: + // repeated FieldDescriptorProto field = 2; + // and FieldDescriptorProto.name has field number 1: + // optional string name = 1; + // + // Thus, the above path gives the location of a field name. If we removed + // the last element: + // [ 4, 3, 2, 7 ] + // this path refers to the whole field declaration (from the beginning + // of the label to the terminating semicolon). + repeated int32 path = 1 [packed=true]; + + // Always has exactly three or four elements: start line, start column, + // end line (optional, otherwise assumed same as start line), end column. + // These are packed into a single field for efficiency. Note that line + // and column numbers are zero-based -- typically you will want to add + // 1 to each before displaying to a user. + repeated int32 span = 2 [packed=true]; + + // TODO(kenton): Record comments appearing before and after the + // declaration. + } +} diff --git a/src/core/Akka.Persistence.Tests/protos/tutorial/addressbook.proto b/src/core/Akka.Persistence.Tests/protos/tutorial/addressbook.proto new file mode 100644 index 00000000000..5abe35ce39b --- /dev/null +++ b/src/core/Akka.Persistence.Tests/protos/tutorial/addressbook.proto @@ -0,0 +1,31 @@ +package tutorial; + +import "google/protobuf/csharp_options.proto"; +option (google.protobuf.csharp_file_options).namespace = "Google.ProtocolBuffers.Examples.AddressBook"; +option (google.protobuf.csharp_file_options).umbrella_classname = "AddressBookProtos"; + +option optimize_for = SPEED; + +message Person { + required string name = 1; + required int32 id = 2; // Unique ID number for this person. + optional string email = 3; + + enum PhoneType { + MOBILE = 0; + HOME = 1; + WORK = 2; + } + + message PhoneNumber { + required string number = 1; + optional PhoneType type = 2 [default = HOME]; + } + + repeated PhoneNumber phone = 4; +} + +// Our address book file is just one of these. +message AddressBook { + repeated Person person = 1; +} diff --git a/src/core/Akka.Persistence/Akka.Persistence.csproj b/src/core/Akka.Persistence/Akka.Persistence.csproj index f20788c1198..98576915575 100644 --- a/src/core/Akka.Persistence/Akka.Persistence.csproj +++ b/src/core/Akka.Persistence/Akka.Persistence.csproj @@ -54,7 +54,7 @@ Properties\SharedAssemblyInfo.cs - + diff --git a/src/core/Akka.Persistence/GuaranteedDelivery.cs b/src/core/Akka.Persistence/AtLeastOnceDelivery.cs similarity index 90% rename from src/core/Akka.Persistence/GuaranteedDelivery.cs rename to src/core/Akka.Persistence/AtLeastOnceDelivery.cs index d8e3d093da5..940f05f583f 100644 --- a/src/core/Akka.Persistence/GuaranteedDelivery.cs +++ b/src/core/Akka.Persistence/AtLeastOnceDelivery.cs @@ -1,5 +1,5 @@ //----------------------------------------------------------------------- -// +// // Copyright (C) 2009-2015 Typesafe Inc. // Copyright (C) 2013-2015 Akka.NET project // @@ -19,15 +19,15 @@ namespace Akka.Persistence #region Messages /// - /// Snapshot of a current state. Can be retrieved with - /// and saved with . + /// Snapshot of a current state. Can be retrieved with + /// and saved with . /// During recovery the snapshot received in should be sent with - /// . + /// . /// [Serializable] - public sealed class GuaranteedDeliverySnapshot : IMessage + public sealed class AtLeastOnceDeliverySnapshot : IMessage { - public GuaranteedDeliverySnapshot(long deliveryId, UnconfirmedDelivery[] unconfirmedDeliveries) + public AtLeastOnceDeliverySnapshot(long deliveryId, UnconfirmedDelivery[] unconfirmedDeliveries) { DeliveryId = deliveryId; UnconfirmedDeliveries = unconfirmedDeliveries; @@ -39,7 +39,7 @@ public GuaranteedDeliverySnapshot(long deliveryId, UnconfirmedDelivery[] unconfi /// /// message should be sent after - /// limit will be reached. + /// limit will be reached. /// public sealed class UnconfirmedWarning { @@ -53,7 +53,7 @@ public UnconfirmedWarning(UnconfirmedDelivery[] unconfirmedDeliveries) /// /// contains details about unconfirmed messages. - /// It's included inside and . + /// It's included inside and . /// public sealed class UnconfirmedDelivery { @@ -106,7 +106,7 @@ public override bool Equals(object obj) #endregion /// - /// An exception thrown, when threshold has been exceeded. + /// An exception thrown, when threshold has been exceeded. /// public class MaxUnconfirmedMessagesExceededException : AkkaException { @@ -141,7 +141,7 @@ protected MaxUnconfirmedMessagesExceededException(SerializationInfo info, Stream /// Support for snapshot is provided by get and set delivery snapshot methods. These snapshots contains full /// delivery state including unconfirmed messages. For custom snapshots remember to include those delivery ones. /// - public abstract class GuaranteedDeliveryActor : PersistentActor, IInitializableActor + public abstract class AtLeastOnceDeliveryActor : PersistentActor, IInitializableActor { private ICancelable _redeliverScheduleCancelable; private long _deliverySequenceNr = 0L; @@ -160,21 +160,21 @@ public void Init() /// Interval between redelivery attempts. /// public virtual TimeSpan RedeliverInterval { get { return DefaultRedeliverInterval; } } - protected TimeSpan DefaultRedeliverInterval { get { return Extension.Settings.GuaranteedDelivery.RedeliverInterval; } } + protected TimeSpan DefaultRedeliverInterval { get { return Extension.Settings.AtLeastOnceDelivery.RedeliverInterval; } } /// /// Maximum number of unconfirmed messages that will be sent at each redelivery burst. This is to help to /// prevent overflowing amount of messages to be sent at once, for eg. when destination cannot be reached for a long time. /// public virtual int RedeliveryBurstLimit { get { return DefaultRedeliveryBurstLimit; } } - protected int DefaultRedeliveryBurstLimit { get { return Extension.Settings.GuaranteedDelivery.RedeliveryBurstLimit; } } + protected int DefaultRedeliveryBurstLimit { get { return Extension.Settings.AtLeastOnceDelivery.RedeliveryBurstLimit; } } /// /// After this number of delivery attempts a message will be sent to . /// The count is reset after restart. /// public virtual int UnconfirmedDeliveryAttemptsToWarn { get { return DefaultUnconfirmedDeliveryAttemptsToWarn; } } - protected int DefaultUnconfirmedDeliveryAttemptsToWarn { get { return Extension.Settings.GuaranteedDelivery.UnconfirmedAttemptsToWarn; } } + protected int DefaultUnconfirmedDeliveryAttemptsToWarn { get { return Extension.Settings.AtLeastOnceDelivery.UnconfirmedAttemptsToWarn; } } /// /// Maximum number of unconfirmed messages, that this actor is allowed to hold in the memory. When this @@ -182,7 +182,7 @@ public void Init() /// instead of accepting messages. /// public virtual int MaxUnconfirmedMessages { get { return DefaultMaxUnconfirmedMessages; } } - protected int DefaultMaxUnconfirmedMessages { get { return Extension.Settings.GuaranteedDelivery.MaxUnconfirmedMessages; } } + protected int DefaultMaxUnconfirmedMessages { get { return Extension.Settings.AtLeastOnceDelivery.MaxUnconfirmedMessages; } } /// /// Number of messages, that have not been confirmed yet. @@ -237,13 +237,13 @@ public bool ConfirmDelivery(long deliveryId) /// Returns full state of the current delivery actor. Could be saved using method. /// During recovery a snapshot received in should be set with . /// - public GuaranteedDeliverySnapshot GetDeliverySnapshot() + public AtLeastOnceDeliverySnapshot GetDeliverySnapshot() { var unconfirmedDeliveries = _unconfirmed .Select(e => new UnconfirmedDelivery(e.Key, e.Value.Destination, e.Value.Message)) .ToArray(); - return new GuaranteedDeliverySnapshot(_deliverySequenceNr, unconfirmedDeliveries); + return new AtLeastOnceDeliverySnapshot(_deliverySequenceNr, unconfirmedDeliveries); } /// @@ -251,7 +251,7 @@ public GuaranteedDeliverySnapshot GetDeliverySnapshot() /// message and should be set with this method. /// /// - public void SetDeliverySnapshot(GuaranteedDeliverySnapshot snapshot) + public void SetDeliverySnapshot(AtLeastOnceDeliverySnapshot snapshot) { _deliverySequenceNr = snapshot.DeliveryId; var now = DateTime.Now; diff --git a/src/core/Akka.Persistence/Persistence.cs b/src/core/Akka.Persistence/Persistence.cs index f6416d0c08f..fc253837290 100644 --- a/src/core/Akka.Persistence/Persistence.cs +++ b/src/core/Akka.Persistence/Persistence.cs @@ -170,10 +170,10 @@ public ViewSettings(Config config) public long AutoUpdateReplayMax { get; private set; } } - public GuaranteedDeliverySettings GuaranteedDelivery { get; set; } - public class GuaranteedDeliverySettings + public AtLeastOnceDeliverySettings AtLeastOnceDelivery { get; set; } + public class AtLeastOnceDeliverySettings { - public GuaranteedDeliverySettings(Config config) + public AtLeastOnceDeliverySettings(Config config) { RedeliverInterval = config.GetTimeSpan("at-least-once-delivery.redeliver-interval"); MaxUnconfirmedMessages = config.GetInt("at-least-once-delivery.max-unconfirmed-messages"); @@ -205,7 +205,7 @@ public PersistenceSettings(ActorSystem system, Config config) { Journal = new JournalSettings(config); View = new ViewSettings(config); - GuaranteedDelivery = new GuaranteedDeliverySettings(config); + AtLeastOnceDelivery = new AtLeastOnceDeliverySettings(config); Internal = new InternalSettings(config); } } diff --git a/src/core/Akka.Persistence/README.md b/src/core/Akka.Persistence/README.md index 0d275b8dce3..f1502dde2cf 100644 --- a/src/core/Akka.Persistence/README.md +++ b/src/core/Akka.Persistence/README.md @@ -4,7 +4,7 @@ - **PersistentActor**: Is a persistent, stateful actor. It is able to persist events to a journal and can react to them in a thread-safe manner. It can be used to implement both command as well as event sourced actors. When a persistent actor is started or restarted, journaled messages are replayed to that actor, so that it can recover internal state from these messages. - **PersistentView**: A view is a persistent, stateful actor that receives journaled messages that have been written by another persistent actor. A view itself does not journal new messages, instead, it updates internal state only from a persistent actor's replicated message stream. -- **GuaranteedDelivery**: To send messages with at-least-once delivery semantics to destinations, also in case of sender and receiver virtual machine crashes. +- **AtLeastOnceDelivery**: To send messages with at-least-once delivery semantics to destinations, also in case of sender and receiver virtual machine crashes. - **Journal**: A journal stores the sequence of messages sent to a persistent actor. An application can control which messages are journaled and which are received by the persistent actor without being journaled. The storage backend of a journal is pluggable. The default journal storage plugin writes to the operating system's memory, replicated journals are available as Community plugins. - **SnapshotStore**: A snapshot store persists snapshots of a persistent actor's or a view's internal state. Snapshots are used for optimizing recovery times. The storage backend of a snapshot store is pluggable. The default snapshot storage plugin writes to the local filesystem. diff --git a/src/core/Akka.Persistence/Serialization/MessageSerializer.cs b/src/core/Akka.Persistence/Serialization/MessageSerializer.cs index 38416b411c1..b99207ebf5d 100644 --- a/src/core/Akka.Persistence/Serialization/MessageSerializer.cs +++ b/src/core/Akka.Persistence/Serialization/MessageSerializer.cs @@ -44,7 +44,7 @@ public override bool IncludeManifest public override byte[] ToBinary(object obj) { if (obj is IPersistentRepresentation) return PersistentToProto(obj as IPersistentRepresentation).Build().ToByteArray(); - if (obj is GuaranteedDeliverySnapshot) return SnapshotToProto(obj as GuaranteedDeliverySnapshot).Build().ToByteArray(); + if (obj is AtLeastOnceDeliverySnapshot) return SnapshotToProto(obj as AtLeastOnceDeliverySnapshot).Build().ToByteArray(); throw new ArgumentException(typeof(MessageSerializer) + " cannot serialize object of type " + obj.GetType()); } @@ -52,14 +52,14 @@ public override byte[] ToBinary(object obj) public override object FromBinary(byte[] bytes, Type type) { if (type == null || type == typeof(Persistent) || type == typeof(IPersistentRepresentation)) return PersistentMessageFrom(bytes); - if (type == typeof(GuaranteedDeliverySnapshot)) return SnapshotFrom(bytes); + if (type == typeof(AtLeastOnceDeliverySnapshot)) return SnapshotFrom(bytes); throw new ArgumentException(typeof(MessageSerializer) + " cannot deserialize object of type " + type); } - private GuaranteedDeliverySnapshot SnapshotFrom(byte[] bytes) + private AtLeastOnceDeliverySnapshot SnapshotFrom(byte[] bytes) { - var snap = AtLeastOnceDeliverySnapshot.ParseFrom(bytes); + var snap = global::AtLeastOnceDeliverySnapshot.ParseFrom(bytes); var unconfirmedDeliveries = new UnconfirmedDelivery[snap.UnconfirmedDeliveriesCount]; for (int i = 0; i < snap.UnconfirmedDeliveriesCount; i++) @@ -72,7 +72,7 @@ private GuaranteedDeliverySnapshot SnapshotFrom(byte[] bytes) unconfirmedDeliveries[i] = unconfirmedDelivery; } - return new GuaranteedDeliverySnapshot(snap.CurrentDeliveryId, unconfirmedDeliveries); + return new AtLeastOnceDeliverySnapshot(snap.CurrentDeliveryId, unconfirmedDeliveries); } private IPersistentRepresentation PersistentMessageFrom(byte[] bytes) @@ -96,14 +96,14 @@ private object PayloadFromProto(PersistentPayload persistentPayload) return system.Serialization.Deserialize(persistentPayload.Payload.ToByteArray(), persistentPayload.SerializerId, payloadType); } - private AtLeastOnceDeliverySnapshot.Builder SnapshotToProto(GuaranteedDeliverySnapshot snap) + private global::AtLeastOnceDeliverySnapshot.Builder SnapshotToProto(AtLeastOnceDeliverySnapshot snap) { - var builder = AtLeastOnceDeliverySnapshot.CreateBuilder(); + var builder = global::AtLeastOnceDeliverySnapshot.CreateBuilder(); builder.SetCurrentDeliveryId(snap.DeliveryId); foreach (var unconfirmed in snap.UnconfirmedDeliveries) { - var unconfirmedBuilder = AtLeastOnceDeliverySnapshot.Types.UnconfirmedDelivery.CreateBuilder() + var unconfirmedBuilder = global::AtLeastOnceDeliverySnapshot.Types.UnconfirmedDelivery.CreateBuilder() .SetDeliveryId(unconfirmed.DeliveryId) .SetDestination(unconfirmed.Destination.ToString()) .SetPayload(PersistentPayloadToProto(unconfirmed.Message)); diff --git a/src/examples/PersistenceExample/GuaranteedDeliveryExampleActor.cs b/src/examples/PersistenceExample/AtLeastOnceDeliveryExampleActor.cs similarity index 89% rename from src/examples/PersistenceExample/GuaranteedDeliveryExampleActor.cs rename to src/examples/PersistenceExample/AtLeastOnceDeliveryExampleActor.cs index 5e862cb58fc..097ea23844d 100644 --- a/src/examples/PersistenceExample/GuaranteedDeliveryExampleActor.cs +++ b/src/examples/PersistenceExample/AtLeastOnceDeliveryExampleActor.cs @@ -39,12 +39,12 @@ public Confirmation(long deliveryId) [Serializable] public class Snap { - public Snap(GuaranteedDeliverySnapshot snapshot) + public Snap(Akka.Persistence.AtLeastOnceDeliverySnapshot snapshot) { this.Snapshot = snapshot; } - public GuaranteedDeliverySnapshot Snapshot { get; private set; } + public Akka.Persistence.AtLeastOnceDeliverySnapshot Snapshot { get; private set; } } public class DeliveryActor : UntypedActor @@ -77,22 +77,22 @@ protected override void OnReceive(object message) } } /// - /// GuaranteedDelivery will repeat sending messages, unless confirmed by deliveryId + /// AtLeastOnceDelivery will repeat sending messages, unless confirmed by deliveryId /// /// By default, in-memory Journal is used, so this won't survive system restarts. /// - public class GuaranteedDeliveryExampleActor : GuaranteedDeliveryActor + public class AtLeastOnceDeliveryExampleActor : AtLeastOnceDeliveryActor { public ActorPath DeliveryPath { get; private set; } - public GuaranteedDeliveryExampleActor(ActorPath deliveryPath) + public AtLeastOnceDeliveryExampleActor(ActorPath deliveryPath) { this.DeliveryPath = deliveryPath; } public override string PersistenceId { - get { return "guaranteed-1"; } + get { return "at-least-once-1"; } } protected override bool ReceiveRecover(object message) diff --git a/src/examples/PersistenceExample/PersistenceExample.csproj b/src/examples/PersistenceExample/PersistenceExample.csproj index ee14e7a6219..2d824a15dfe 100644 --- a/src/examples/PersistenceExample/PersistenceExample.csproj +++ b/src/examples/PersistenceExample/PersistenceExample.csproj @@ -42,7 +42,7 @@ - + diff --git a/src/examples/PersistenceExample/Program.cs b/src/examples/PersistenceExample/Program.cs index 6882ec38f79..3bcad92d1fa 100644 --- a/src/examples/PersistenceExample/Program.cs +++ b/src/examples/PersistenceExample/Program.cs @@ -37,18 +37,18 @@ static void Main(string[] args) //ViewExample(system); - GuaranteedDelivery(system); + AtLeastOnceDelivery(system); Console.ReadLine(); } } - private static void GuaranteedDelivery(ActorSystem system) + private static void AtLeastOnceDelivery(ActorSystem system) { - Console.WriteLine("\n--- GUARANTEED DELIVERY EXAMPLE ---\n"); + Console.WriteLine("\n--- AT LEAST ONCE DELIVERY EXAMPLE ---\n"); var delivery = system.ActorOf(Props.Create(()=> new DeliveryActor()),"delivery"); - var deliverer = system.ActorOf(Props.Create(() => new GuaranteedDeliveryExampleActor(delivery.Path))); + var deliverer = system.ActorOf(Props.Create(() => new AtLeastOnceDeliveryExampleActor(delivery.Path))); delivery.Tell("start"); deliverer.Tell(new Message("foo")); From eee0cd5c2d118509d79ffe234ddf93f3cfa4a162 Mon Sep 17 00:00:00 2001 From: Marcus Griep Date: Sun, 17 May 2015 16:03:08 -0400 Subject: [PATCH 39/66] Add failing test for supervisor strategy constructor bug --- .../Actor/SupervisorStrategySpecs.cs | 99 +++++++++++++++++++ src/core/Akka.Tests/Akka.Tests.csproj | 1 + 2 files changed, 100 insertions(+) create mode 100644 src/core/Akka.Tests/Actor/SupervisorStrategySpecs.cs diff --git a/src/core/Akka.Tests/Actor/SupervisorStrategySpecs.cs b/src/core/Akka.Tests/Actor/SupervisorStrategySpecs.cs new file mode 100644 index 00000000000..52974d91a2f --- /dev/null +++ b/src/core/Akka.Tests/Actor/SupervisorStrategySpecs.cs @@ -0,0 +1,99 @@ +using System; +using Akka.Actor; +using Xunit; + +namespace Akka.Tests.Actor +{ + public class SupervisorStrategySpecs + { + public static readonly object[][] RetriesTestData = new[] + { + new object[] { new int?(), -1 }, + new object[] { new int?(-1), -1 }, + new object[] { new int?(0), 0 }, + new object[] { new int?(5), 5 }, + }; + + public static readonly object[][] TimeoutTestData = new[] + { + new object[] { new TimeSpan?(), -1 }, + new object[] { new TimeSpan?(System.Threading.Timeout.InfiniteTimeSpan), -1 }, + new object[] { new TimeSpan?(TimeSpan.FromMilliseconds(0)), 0 }, + new object[] { new TimeSpan?(TimeSpan.FromMilliseconds(100)), 100 }, + new object[] { new TimeSpan?(TimeSpan.FromMilliseconds(100).Add(TimeSpan.FromTicks(75))), 100 }, + new object[] { new TimeSpan?(TimeSpan.FromMilliseconds(10000)), 10000 }, + }; + + [Theory] + [MemberData("RetriesTestData")] + public void A_constructed_OneForOne_supervisor_strategy_with_nullable_retries_has_the_expected_properties(int? retries, int expectedRetries) + { + var uut = new OneForOneStrategy(retries, null, exn => Directive.Restart); + + Assert.Equal(uut.MaxNumberOfRetries, expectedRetries); + } + + [Theory] + [MemberData("TimeoutTestData")] + public void A_constructed_OneForOne_supervisor_strategy_with_nullable_timeouts_has_the_expected_properties(TimeSpan? timeout, int expectedTimeoutMilliseconds) + { + var uut = new OneForOneStrategy(-1, timeout, exn => Directive.Restart); + + Assert.Equal(uut.WithinTimeRangeMilliseconds, expectedTimeoutMilliseconds); + } + + [Theory] + [MemberData("RetriesTestData")] + public void A_constructed_OneForOne_supervisor_strategy_with_nullable_retries_and_a_decider_has_the_expected_properties(int? retries, int expectedRetries) + { + var uut = new OneForOneStrategy(retries, null, Decider.From(Directive.Restart)); + + Assert.Equal(uut.MaxNumberOfRetries, expectedRetries); + } + + [Theory] + [MemberData("TimeoutTestData")] + public void A_constructed_OneForOne_supervisor_strategy_with_nullable_timeouts_and_a_decider_has_the_expected_properties(TimeSpan? timeout, int expectedTimeoutMilliseconds) + { + var uut = new OneForOneStrategy(-1, timeout, Decider.From(Directive.Restart)); + + Assert.Equal(uut.WithinTimeRangeMilliseconds, expectedTimeoutMilliseconds); + } + + [Theory] + [MemberData("RetriesTestData")] + public void A_constructed_AllForOne_supervisor_strategy_with_nullable_retries_has_the_expected_properties(int? retries, int expectedRetries) + { + var uut = new AllForOneStrategy(retries, null, exn => Directive.Restart); + + Assert.Equal(uut.MaxNumberOfRetries, expectedRetries); + } + + [Theory] + [MemberData("TimeoutTestData")] + public void A_constructed_AllForOne_supervisor_strategy_with_nullable_timeouts_has_the_expected_properties(TimeSpan? timeout, int expectedTimeoutMilliseconds) + { + var uut = new AllForOneStrategy(-1, timeout, exn => Directive.Restart); + + Assert.Equal(uut.WithinTimeRangeMilliseconds, expectedTimeoutMilliseconds); + } + + [Theory] + [MemberData("RetriesTestData")] + public void A_constructed_AllForOne_supervisor_strategy_with_nullable_retries_and_a_decider_has_the_expected_properties(int? retries, int expectedRetries) + { + var uut = new OneForOneStrategy(retries, null, Decider.From(Directive.Restart)); + + Assert.Equal(uut.MaxNumberOfRetries, expectedRetries); + } + + [Theory] + [MemberData("TimeoutTestData")] + public void A_constructed_AllForOne_supervisor_strategy_with_nullable_timeouts_and_a_decider_has_the_expected_properties(TimeSpan? timeout, int expectedTimeoutMilliseconds) + { + var uut = new OneForOneStrategy(-1, timeout, Decider.From(Directive.Restart)); + + Assert.Equal(uut.WithinTimeRangeMilliseconds, expectedTimeoutMilliseconds); + } + } +} diff --git a/src/core/Akka.Tests/Akka.Tests.csproj b/src/core/Akka.Tests/Akka.Tests.csproj index ba7d59185c7..5ffbc9c7f09 100644 --- a/src/core/Akka.Tests/Akka.Tests.csproj +++ b/src/core/Akka.Tests/Akka.Tests.csproj @@ -124,6 +124,7 @@ + From 0931aec64daa61913a48a2feba2046721ce4f749 Mon Sep 17 00:00:00 2001 From: Marcus Griep Date: Sun, 17 May 2015 16:07:51 -0400 Subject: [PATCH 40/66] Fix Nullable Supervisor Strategy constructors In chaining to other constructors, the constructors for the `AllForOne` and `OneForOne` strategies pull timeouts from `TimeSpan`s using the `Milliseconds` property. This is not the correct behavior for `TimeSpan` values greater than or equal to 1 second as it provides only the milliseconds portion of the timespan. Instead, use the `TotalMilliseconds` property and cast it to an `int`. --- src/core/Akka/Actor/SupervisorStrategy.cs | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/src/core/Akka/Actor/SupervisorStrategy.cs b/src/core/Akka/Actor/SupervisorStrategy.cs index fbe356dd36e..5879d3e6f6e 100644 --- a/src/core/Akka/Actor/SupervisorStrategy.cs +++ b/src/core/Akka/Actor/SupervisorStrategy.cs @@ -231,7 +231,7 @@ public IDecider Decider /// duration of the time window for maxNrOfRetries, Duration.Inf means no window. /// mapping from Exception to public OneForOneStrategy(int? maxNrOfRetries, TimeSpan? withinTimeRange, Func localOnlyDecider) - : this(maxNrOfRetries.GetValueOrDefault(-1), withinTimeRange.GetValueOrDefault(Timeout.InfiniteTimeSpan).Milliseconds, localOnlyDecider) + : this(maxNrOfRetries.GetValueOrDefault(-1), (int) withinTimeRange.GetValueOrDefault(Timeout.InfiniteTimeSpan).TotalMilliseconds, localOnlyDecider) { //Intentionally left blank } @@ -248,7 +248,7 @@ public OneForOneStrategy(int? maxNrOfRetries, TimeSpan? withinTimeRange, Funcduration of the time window for maxNrOfRetries, Duration.Inf means no window. /// mapping from Exception to public OneForOneStrategy(int? maxNrOfRetries, TimeSpan? withinTimeRange, IDecider decider) - : this(maxNrOfRetries.GetValueOrDefault(-1), withinTimeRange.GetValueOrDefault(Timeout.InfiniteTimeSpan).Milliseconds, decider) + : this(maxNrOfRetries.GetValueOrDefault(-1), (int) withinTimeRange.GetValueOrDefault(Timeout.InfiniteTimeSpan).TotalMilliseconds, decider) { //Intentionally left blank } @@ -265,7 +265,8 @@ public OneForOneStrategy(int? maxNrOfRetries, TimeSpan? withinTimeRange, IDecide /// duration in milliseconds of the time window for , negative values means no window. /// Mapping from an to /// If true failures will be logged - public OneForOneStrategy(int maxNrOfRetries, int withinTimeMilliseconds, Func localOnlyDecider, bool loggingEnabled = true) : this(maxNrOfRetries,withinTimeMilliseconds,new LocalOnlyDecider(localOnlyDecider),loggingEnabled) + public OneForOneStrategy(int maxNrOfRetries, int withinTimeMilliseconds, Func localOnlyDecider, bool loggingEnabled = true) + : this(maxNrOfRetries, withinTimeMilliseconds, new LocalOnlyDecider(localOnlyDecider), loggingEnabled) { //Intentionally left blank } @@ -403,7 +404,7 @@ public IDecider Decider /// duration of the time window for maxNrOfRetries, means no window. /// mapping from Exception to public AllForOneStrategy(int? maxNrOfRetries, TimeSpan? withinTimeRange, Func localOnlyDecider) - : this(maxNrOfRetries.GetValueOrDefault(-1), withinTimeRange.GetValueOrDefault(Timeout.InfiniteTimeSpan).Milliseconds, localOnlyDecider) + : this(maxNrOfRetries.GetValueOrDefault(-1), (int) withinTimeRange.GetValueOrDefault(Timeout.InfiniteTimeSpan).TotalMilliseconds, localOnlyDecider) { //Intentionally left blank } @@ -420,7 +421,7 @@ public AllForOneStrategy(int? maxNrOfRetries, TimeSpan? withinTimeRange, Funcduration of the time window for maxNrOfRetries, means no window. /// mapping from Exception to public AllForOneStrategy(int? maxNrOfRetries, TimeSpan? withinTimeRange, IDecider decider) - : this(maxNrOfRetries.GetValueOrDefault(-1), withinTimeRange.GetValueOrDefault(Timeout.InfiniteTimeSpan).Milliseconds, decider) + : this(maxNrOfRetries.GetValueOrDefault(-1), (int) withinTimeRange.GetValueOrDefault(Timeout.InfiniteTimeSpan).TotalMilliseconds, decider) { //Intentionally left blank } @@ -437,7 +438,8 @@ public AllForOneStrategy(int? maxNrOfRetries, TimeSpan? withinTimeRange, IDecide /// duration in milliseconds of the time window for , negative values means no window. /// Mapping from an to /// If true failures will be logged - public AllForOneStrategy(int maxNrOfRetries, int withinTimeMilliseconds, Func localOnlyDecider, bool loggingEnabled=true) : this(maxNrOfRetries,withinTimeMilliseconds,new LocalOnlyDecider(localOnlyDecider),loggingEnabled) + public AllForOneStrategy(int maxNrOfRetries, int withinTimeMilliseconds, Func localOnlyDecider, bool loggingEnabled=true) + : this(maxNrOfRetries, withinTimeMilliseconds, new LocalOnlyDecider(localOnlyDecider), loggingEnabled) { //Intentionally left blank } From b0ff94ecb9b97359153f9046b1be860af44b0c75 Mon Sep 17 00:00:00 2001 From: Sean Gilliam Date: Mon, 18 May 2015 16:59:08 -0500 Subject: [PATCH 41/66] Missing xmldoc comments and typo fix - added missing xmldoc comments to the contrib loggers - file rename Slf4jLogger => Slf4NetLogger to match the class name --- .../loggers/Akka.Logger.NLog/NLogLogger.cs | 11 +++++- .../SerilogLogMessageFormatter.cs | 19 +++++++++- .../Akka.Logger.Serilog/SerilogLogger.cs | 37 ++++++++++++------- .../Akka.Logger.slf4net.csproj | 2 +- .../{Slf4jLogger.cs => Slf4NetLogger.cs} | 16 +++++--- 5 files changed, 63 insertions(+), 22 deletions(-) rename src/contrib/loggers/Akka.Logger.slf4net/{Slf4jLogger.cs => Slf4NetLogger.cs} (75%) diff --git a/src/contrib/loggers/Akka.Logger.NLog/NLogLogger.cs b/src/contrib/loggers/Akka.Logger.NLog/NLogLogger.cs index 1b8b2050d54..78f331df5a9 100644 --- a/src/contrib/loggers/Akka.Logger.NLog/NLogLogger.cs +++ b/src/contrib/loggers/Akka.Logger.NLog/NLogLogger.cs @@ -5,14 +5,20 @@ // //----------------------------------------------------------------------- +using System; using Akka.Actor; using Akka.Event; using NLog; -using System; using NLogger = global::NLog.Logger; namespace Akka.Logger.NLog { + /// + /// This class is used to receive log events and sends them to + /// the configured NLog logger. The following log events are + /// recognized: , , + /// and . + /// public class NLogLogger : ReceiveActor { private readonly ILoggingAdapter _log = Context.GetLogger(); @@ -23,6 +29,9 @@ private static void Log(LogEvent logEvent, Action logStatement) logStatement(logger); } + /// + /// Initializes a new instance of the class. + /// public NLogLogger() { Receive(m => Log(m, logger => logger.Error("{0}", m.Message))); diff --git a/src/contrib/loggers/Akka.Logger.Serilog/SerilogLogMessageFormatter.cs b/src/contrib/loggers/Akka.Logger.Serilog/SerilogLogMessageFormatter.cs index 7e781f74a70..4ca10a848a2 100644 --- a/src/contrib/loggers/Akka.Logger.Serilog/SerilogLogMessageFormatter.cs +++ b/src/contrib/loggers/Akka.Logger.Serilog/SerilogLogMessageFormatter.cs @@ -13,15 +13,32 @@ namespace Akka.Logger.Serilog { + /// + /// This class contains methods used to convert Serilog templated messages + /// into normal text messages. + /// public class SerilogLogMessageFormatter : ILogMessageFormatter { private readonly MessageTemplateCache _templateCache; - + + /// + /// Initializes a new instance of the class. + /// public SerilogLogMessageFormatter() { _templateCache = new MessageTemplateCache(new MessageTemplateParser()); } + /// + /// Converts the specified template string to a text string using the specified + /// token array to match replacements. + /// + /// The template string used in the conversion. + /// The array that contains values to replace in the template. + /// + /// A text string where the template placeholders have been replaced with + /// their corresponding values. + /// public string Format(string format, params object[] args) { var template = _templateCache.Parse(format); diff --git a/src/contrib/loggers/Akka.Logger.Serilog/SerilogLogger.cs b/src/contrib/loggers/Akka.Logger.Serilog/SerilogLogger.cs index 8d6d666fd60..7d27c6aaf30 100644 --- a/src/contrib/loggers/Akka.Logger.Serilog/SerilogLogger.cs +++ b/src/contrib/loggers/Akka.Logger.Serilog/SerilogLogger.cs @@ -5,13 +5,19 @@ // //----------------------------------------------------------------------- +using System; using Akka.Actor; using Akka.Event; using Serilog; -using System; namespace Akka.Logger.Serilog { + /// + /// This class is used to receive log events and sends them to + /// the configured Serilog logger. The following log events are + /// recognized: , , + /// and . + /// public class SerilogLogger : ReceiveActor { private readonly ILoggingAdapter _log = Context.GetLogger(); @@ -30,19 +36,6 @@ private ILogger SetContextFromLogEvent(ILogger logger, LogEvent logEvent) return logger; } - public SerilogLogger() - { - Receive(m => WithSerilog(logger => SetContextFromLogEvent(logger, m).Error(m.Cause, GetFormat(m.Message), GetArgs(m.Message)))); - Receive(m => WithSerilog(logger => SetContextFromLogEvent(logger, m).Warning(GetFormat(m.Message), GetArgs(m.Message)))); - Receive(m => WithSerilog(logger => SetContextFromLogEvent(logger, m).Information(GetFormat(m.Message), GetArgs(m.Message)))); - Receive(m => WithSerilog(logger => SetContextFromLogEvent(logger, m).Debug(GetFormat(m.Message), GetArgs(m.Message)))); - Receive(m => - { - _log.Info("SerilogLogger started"); - Sender.Tell(new LoggerInitialized()); - }); - } - private static string GetFormat(object message) { var logMessage = message as LogMessage; @@ -60,6 +53,22 @@ private static object[] GetArgs(object message) ? logMessage.Args : new[] { message }; } + + /// + /// Initializes a new instance of the class. + /// + public SerilogLogger() + { + Receive(m => WithSerilog(logger => SetContextFromLogEvent(logger, m).Error(m.Cause, GetFormat(m.Message), GetArgs(m.Message)))); + Receive(m => WithSerilog(logger => SetContextFromLogEvent(logger, m).Warning(GetFormat(m.Message), GetArgs(m.Message)))); + Receive(m => WithSerilog(logger => SetContextFromLogEvent(logger, m).Information(GetFormat(m.Message), GetArgs(m.Message)))); + Receive(m => WithSerilog(logger => SetContextFromLogEvent(logger, m).Debug(GetFormat(m.Message), GetArgs(m.Message)))); + Receive(m => + { + _log.Info("SerilogLogger started"); + Sender.Tell(new LoggerInitialized()); + }); + } } } diff --git a/src/contrib/loggers/Akka.Logger.slf4net/Akka.Logger.slf4net.csproj b/src/contrib/loggers/Akka.Logger.slf4net/Akka.Logger.slf4net.csproj index 8c913478b38..1db2a92deca 100644 --- a/src/contrib/loggers/Akka.Logger.slf4net/Akka.Logger.slf4net.csproj +++ b/src/contrib/loggers/Akka.Logger.slf4net/Akka.Logger.slf4net.csproj @@ -65,7 +65,7 @@ Properties\SharedAssemblyInfo.cs - + diff --git a/src/contrib/loggers/Akka.Logger.slf4net/Slf4jLogger.cs b/src/contrib/loggers/Akka.Logger.slf4net/Slf4NetLogger.cs similarity index 75% rename from src/contrib/loggers/Akka.Logger.slf4net/Slf4jLogger.cs rename to src/contrib/loggers/Akka.Logger.slf4net/Slf4NetLogger.cs index 6337b4e8d37..3030c11facc 100644 --- a/src/contrib/loggers/Akka.Logger.slf4net/Slf4jLogger.cs +++ b/src/contrib/loggers/Akka.Logger.slf4net/Slf4NetLogger.cs @@ -5,19 +5,21 @@ // //----------------------------------------------------------------------- +using System; using Akka.Actor; using Akka.Event; using slf4net; -using System; namespace Akka.Logger.slf4net { + /// + /// This class is used to receive log events and sends them to + /// the configured slf4net logger. The following log events are + /// recognized: , , + /// and . + /// public class Slf4NetLogger : UntypedActor { - //private string mdcThreadAttributeName = "sourceThread"; - //private string mdcAkkaSourceAttributeName = "akkaSource"; - //private string mdcAkkaTimestamp = "akkaTimestamp"; - private readonly ILoggingAdapter _log = Context.GetLogger(); private void WithMDC(Action logStatement) @@ -26,6 +28,10 @@ private void WithMDC(Action logStatement) logStatement(logger); } + /// + /// Receives an event and logs it to the slf4net logger. + /// + /// The event sent to the logger. protected override void OnReceive(object message) { message From 3b4a05b42d5e13012b218a4a3eb691f26d3339a0 Mon Sep 17 00:00:00 2001 From: Natan Vivo Date: Tue, 19 May 2015 09:27:42 -0300 Subject: [PATCH 42/66] Added generic extensions to EventStream subscribe/unsubscribe. --- src/core/Akka/Akka.csproj | 1 + src/core/Akka/Event/EventStreamExtensions.cs | 42 ++++++++++++++++++++ 2 files changed, 43 insertions(+) create mode 100644 src/core/Akka/Event/EventStreamExtensions.cs diff --git a/src/core/Akka/Akka.csproj b/src/core/Akka/Akka.csproj index b1f9d2e73dc..a0c2ae2c9d8 100644 --- a/src/core/Akka/Akka.csproj +++ b/src/core/Akka/Akka.csproj @@ -217,6 +217,7 @@ + diff --git a/src/core/Akka/Event/EventStreamExtensions.cs b/src/core/Akka/Event/EventStreamExtensions.cs new file mode 100644 index 00000000000..07510fc5421 --- /dev/null +++ b/src/core/Akka/Event/EventStreamExtensions.cs @@ -0,0 +1,42 @@ +using Akka.Actor; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace Akka.Event +{ + + /// + /// Extension methods for the EventStream class. + /// + public static class EventStreamExtensions + { + /// + /// Subscribes the specified subscriber. + /// + /// The channel. + /// The event stream. + /// The subscriber. + /// true if subscription was successful, false otherwise. + /// subscriber + public static bool Subscribe(this EventStream eventStream, IActorRef subscriber) + { + return eventStream.Subscribe(subscriber, typeof(TChannel)); + } + + /// + /// Unsubscribes the specified subscriber. + /// + /// The channel. + /// The event stream. + /// The subscriber. + /// true if unsubscription was successful, false otherwise. + /// subscriber + public static bool Unsubscribe(this EventStream eventStream, IActorRef subscriber) + { + return eventStream.Unsubscribe(subscriber, typeof(TChannel)); + } + } +} From 8739fd78d2bbbb4467d1a418ec86a066f037b48d Mon Sep 17 00:00:00 2001 From: Thomas Lazar Date: Tue, 19 May 2015 16:17:45 +0200 Subject: [PATCH 43/66] DIActorSystemAdapter.cs - removed ActorOf method - added non generic Props method DIActorContextAdapter.cs - obsoleted ActorOf method - added non generic Props method Examples - updated Examples to use the new syntax Readme.md - updated Examples with new syntax --- .../Akka.DI.Core/DIActorContextAdapter.cs | 12 +++++++++--- .../Akka.DI.Core/DIActorSystemAdapter.cs | 7 ++++--- .../dependencyInjection/Akka.DI.Core/Readme.md | 6 +++--- .../Examples/BasicAutoFacUses/Program.cs | 3 ++- .../Examples/BasicNinjectUses/Program.cs | 3 ++- .../Examples/BasicUnityUses/Program.cs | 3 ++- 6 files changed, 22 insertions(+), 12 deletions(-) diff --git a/src/contrib/dependencyInjection/Akka.DI.Core/DIActorContextAdapter.cs b/src/contrib/dependencyInjection/Akka.DI.Core/DIActorContextAdapter.cs index 7ae6e2447f3..f84fbc29cc0 100644 --- a/src/contrib/dependencyInjection/Akka.DI.Core/DIActorContextAdapter.cs +++ b/src/contrib/dependencyInjection/Akka.DI.Core/DIActorContextAdapter.cs @@ -20,16 +20,22 @@ public DIActorContextAdapter(IActorContext context) this.context = context; this.producer = context.System.GetExtension(); } + + [Obsolete("Use Props methods for actor creation. This method will be removed in future versions")] public IActorRef ActorOf(string name = null) where TActor : ActorBase { return context.ActorOf(producer.Props(typeof(TActor)), name); } - public Props Props() where TActor : ActorBase + public Props Props(Type actorType) { - return producer.Props(typeof(TActor)); + return producer.Props(actorType); } - } + public Props Props() where TActor : ActorBase + { + return Props(typeof(TActor)); + } + } } diff --git a/src/contrib/dependencyInjection/Akka.DI.Core/DIActorSystemAdapter.cs b/src/contrib/dependencyInjection/Akka.DI.Core/DIActorSystemAdapter.cs index be6053c2574..4538d4636f3 100644 --- a/src/contrib/dependencyInjection/Akka.DI.Core/DIActorSystemAdapter.cs +++ b/src/contrib/dependencyInjection/Akka.DI.Core/DIActorSystemAdapter.cs @@ -20,14 +20,15 @@ public DIActorSystemAdapter(ActorSystem system) this.system = system; this.producer = system.GetExtension(); } - public IActorRef ActorOf(string name = null) where TActor : ActorBase + + public Props Props(Type actorType) { - return system.ActorOf(producer.Props(typeof(TActor)), name); + return producer.Props(actorType); } public Props Props() where TActor : ActorBase { - return producer.Props(typeof(TActor)); + return Props(typeof(TActor)); } } } diff --git a/src/contrib/dependencyInjection/Akka.DI.Core/Readme.md b/src/contrib/dependencyInjection/Akka.DI.Core/Readme.md index 1ebd2b13561..f971e8084ae 100644 --- a/src/contrib/dependencyInjection/Akka.DI.Core/Readme.md +++ b/src/contrib/dependencyInjection/Akka.DI.Core/Readme.md @@ -154,8 +154,8 @@ using (var system = ActorSystem.Create("MySystem")) IDependencyResolver resolver = new WindsorDependencyResolver(container, system); // Register the actors with the system - system.ActorOf(resolver.Create(), "Worker1"); - system.ActorOf(resolver.Create(), "Worker2"); + system.ActorOf(system.DI().Props(), "Worker1"); + system.ActorOf(system.DI().Props(), "Worker2"); // Create the router IActorRef router = system.ActorOf(Props.Empty.WithRouter(new ConsistentHashingGroup(config))); @@ -176,5 +176,5 @@ using (var system = ActorSystem.Create("MySystem")) When you want to create child actors from within your existing actors using Dependency Injection you can use the Actor Content extension just like in the following example. ```csharp -Context.DI().ActorOf().Tell(message); +Context.ActorOf(Context.DI().Props()).Tell(message); ``` diff --git a/src/contrib/dependencyInjection/Examples/BasicAutoFacUses/Program.cs b/src/contrib/dependencyInjection/Examples/BasicAutoFacUses/Program.cs index cc51a070b16..87e4426ef1e 100644 --- a/src/contrib/dependencyInjection/Examples/BasicAutoFacUses/Program.cs +++ b/src/contrib/dependencyInjection/Examples/BasicAutoFacUses/Program.cs @@ -11,6 +11,7 @@ using System.Threading.Tasks; using Autofac; using Akka.DI.AutoFac; +using Akka.DI.Core; namespace BasicAutoFacUses { @@ -34,7 +35,7 @@ private static void WithHashPool() var propsResolver = new AutoFacDependencyResolver(container, system); - var router = system.ActorOf(propsResolver.Create().WithRouter(FromConfig.Instance), "router1"); + var router = system.ActorOf(system.DI().Props().WithRouter(FromConfig.Instance), "router1"); Task.Delay(500).Wait(); Console.WriteLine("Sending Messages"); diff --git a/src/contrib/dependencyInjection/Examples/BasicNinjectUses/Program.cs b/src/contrib/dependencyInjection/Examples/BasicNinjectUses/Program.cs index 0d045e10423..984c020877e 100644 --- a/src/contrib/dependencyInjection/Examples/BasicNinjectUses/Program.cs +++ b/src/contrib/dependencyInjection/Examples/BasicNinjectUses/Program.cs @@ -7,6 +7,7 @@ using Akka.Actor; using Akka.DI.Ninject; +using Akka.DI.Core; using Akka.Routing; using System; using System.Threading.Tasks; @@ -31,7 +32,7 @@ private static void WithHashPool() var propsResolver = new NinjectDependencyResolver(container, system); - var router = system.ActorOf(propsResolver.Create().WithRouter(FromConfig.Instance), "router1"); + var router = system.ActorOf(system.DI().Props().WithRouter(FromConfig.Instance), "router1"); Task.Delay(500).Wait(); Console.WriteLine("Sending Messages"); diff --git a/src/contrib/dependencyInjection/Examples/BasicUnityUses/Program.cs b/src/contrib/dependencyInjection/Examples/BasicUnityUses/Program.cs index 0281c33a329..2b6eef0fcb4 100644 --- a/src/contrib/dependencyInjection/Examples/BasicUnityUses/Program.cs +++ b/src/contrib/dependencyInjection/Examples/BasicUnityUses/Program.cs @@ -8,6 +8,7 @@ using System; using System.Threading.Tasks; using Akka.Actor; +using Akka.DI.Core; using Akka.DI.Unity; using Akka.Routing; using Microsoft.Practices.Unity; @@ -32,7 +33,7 @@ private static void WithHashPool() var propsResolver = new UnityDependencyResolver(container, system); - var router = system.ActorOf(propsResolver.Create().WithRouter(FromConfig.Instance), "router1"); + var router = system.ActorOf(system.DI().Props().WithRouter(FromConfig.Instance), "router1"); Task.Delay(500).Wait(); Console.WriteLine("Sending Messages"); From adc1debda4fd0427614db4ba03fd72acc719c607 Mon Sep 17 00:00:00 2001 From: "Lucas N. Munhoz" Date: Tue, 19 May 2015 12:28:21 -0300 Subject: [PATCH 44/66] Fix the url for Building Akka docs The old url seems not exists anymore. --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index f466dc67155..ff18906544a 100644 --- a/README.md +++ b/README.md @@ -38,6 +38,6 @@ PM> Install-Package Akka.FSharp ### Contribute If you are interested in helping porting Akka to .NET please take a look at [Contributing to Akka.NET](http://akkadotnet.github.io/wiki/Contributing to Akka.NET). -Also, please see [Building Akka.NET](https://github.com/akkadotnet/akka.net/wiki/Building-and-Distributing-Pigeon). +Also, please see [Building Akka.NET](http://getakka.net/docs/Building%20and%20Distributing%20Akka). ReSharper From e5b6f82006b6e0954a8bf133989540b5a30a8f4f Mon Sep 17 00:00:00 2001 From: rogeralsing Date: Wed, 20 May 2015 09:53:59 +0200 Subject: [PATCH 45/66] Convert null to NoSender. --- src/core/Akka.TestKit.Tests/NoImplicitSenderSpec.cs | 7 +++---- src/core/Akka.TestKit/TestKitBase.cs | 6 ++++++ src/core/Akka/Actor/ActorRef.cs | 5 ++++- 3 files changed, 13 insertions(+), 5 deletions(-) diff --git a/src/core/Akka.TestKit.Tests/NoImplicitSenderSpec.cs b/src/core/Akka.TestKit.Tests/NoImplicitSenderSpec.cs index 6ce2f6d4765..70bd8ac5a29 100644 --- a/src/core/Akka.TestKit.Tests/NoImplicitSenderSpec.cs +++ b/src/core/Akka.TestKit.Tests/NoImplicitSenderSpec.cs @@ -19,7 +19,7 @@ public void When_Not_ImplicitSender_then_testActor_is_not_sender() { var echoActor = Sys.ActorOf(c => c.ReceiveAny((m, ctx) => TestActor.Tell(ctx.Sender))); echoActor.Tell("message"); - ExpectMsg(actorRef => actorRef == ActorRefs.NoSender); + ExpectMsg(actorRef => Equals(actorRef, ActorRefs.NoSender)); } } @@ -31,11 +31,11 @@ public void ImplicitSender_should_have_testActor_as_sender() { var echoActor = Sys.ActorOf(c => c.ReceiveAny((m, ctx) => TestActor.Tell(ctx.Sender))); echoActor.Tell("message"); - ExpectMsg(actorRef => actorRef == TestActor); + ExpectMsg(actorRef => Equals(actorRef, TestActor)); //Test that it works after we know that context has been changed echoActor.Tell("message"); - ExpectMsg(actorRef => actorRef == TestActor); + ExpectMsg(actorRef => Equals(actorRef, TestActor)); } @@ -59,6 +59,5 @@ public void ImplicitSender_should_not_change_when_creating_TestActors() LastSender.ShouldBe(TestActor); } } - } diff --git a/src/core/Akka.TestKit/TestKitBase.cs b/src/core/Akka.TestKit/TestKitBase.cs index cce55da34d5..4f1b2dadce4 100644 --- a/src/core/Akka.TestKit/TestKitBase.cs +++ b/src/core/Akka.TestKit/TestKitBase.cs @@ -110,6 +110,12 @@ private TestKitBase(ITestKitAssertions assertions, ActorSystem system, Config co { InternalCurrentActorCellKeeper.Current = (ActorCell)((ActorRefWithCell)testActor).Underlying; } + else if(!(this is TestProbe)) + //HACK: we need to clear the current context when running a No Implicit Sender test as sender from an async test may leak + //but we should not clear the current context when creating a testprobe from a test + { + InternalCurrentActorCellKeeper.Current = null; + } SynchronizationContext.SetSynchronizationContext( new ActorCellKeepingSynchronizationContext(InternalCurrentActorCellKeeper.Current)); _testActor = testActor; diff --git a/src/core/Akka/Actor/ActorRef.cs b/src/core/Akka/Actor/ActorRef.cs index e3e96650643..88109ff2bc2 100644 --- a/src/core/Akka/Actor/ActorRef.cs +++ b/src/core/Akka/Actor/ActorRef.cs @@ -178,7 +178,10 @@ public ISurrogated FromSurrogate(ActorSystem system) public void Tell(object message, IActorRef sender) { - if (sender == null) throw new ArgumentNullException("sender", "A sender must be specified"); + if (sender == null) + { + sender = ActorRefs.NoSender; + } TellInternal(message, sender); } From 1b076d3d84f7a03aa0b449383683847e166bb220 Mon Sep 17 00:00:00 2001 From: Stefan Sedich Date: Thu, 21 May 2015 07:17:35 +1000 Subject: [PATCH 46/66] Make actor system respect the configured schedule type --- src/core/Akka.Tests/Actor/ActorSystemSpec.cs | 43 +++++++++++++++++++ .../Akka/Actor/Internals/ActorSystemImpl.cs | 6 ++- .../Akka/Actor/Scheduler/SchedulerBase.cs | 8 +--- .../Actor/Scheduler/TaskBasedScheduler.cs | 8 ---- src/core/Akka/Actor/Settings.cs | 7 +++ src/core/Akka/Configuration/Pigeon.conf | 2 +- 6 files changed, 56 insertions(+), 18 deletions(-) diff --git a/src/core/Akka.Tests/Actor/ActorSystemSpec.cs b/src/core/Akka.Tests/Actor/ActorSystemSpec.cs index 7954ce3ec95..478ab59d9eb 100644 --- a/src/core/Akka.Tests/Actor/ActorSystemSpec.cs +++ b/src/core/Akka.Tests/Actor/ActorSystemSpec.cs @@ -89,6 +89,19 @@ public void AnActorSystem_Must_Support_Dynamically_Registered_Extensions() Assert.Equal(Sys, otherTestExtension.System); } + [Fact] + public void AnActorSystem_Must_Setup_The_Default_Scheduler() + { + Assert.True(Sys.Scheduler.GetType() == typeof(TaskBasedScheduler)); + } + + [Fact] + public void AnActorSystem_Must_Support_Using_A_Customer_Scheduler() + { + var actorSystem = ActorSystem.Create(Guid.NewGuid().ToString(), DefaultConfig.WithFallback("akka.scheduler.implementation = \"Akka.Tests.Actor.TestScheduler, Akka.Tests\"")); + Assert.True(actorSystem.Scheduler.GetType() == typeof(TestScheduler)); + } + #endregion } @@ -127,5 +140,35 @@ public TestExtensionImpl(ActorSystem system) public ActorSystem System { get; private set; } } + + public class TestScheduler : IScheduler + { + public void ScheduleTellOnce(TimeSpan delay, ICanTell receiver, object message, IActorRef sender) + { + throw new NotImplementedException(); + } + + public void ScheduleTellOnce(TimeSpan delay, ICanTell receiver, object message, IActorRef sender, ICancelable cancelable) + { + throw new NotImplementedException(); + } + + public void ScheduleTellRepeatedly(TimeSpan initialDelay, TimeSpan interval, ICanTell receiver, object message, + IActorRef sender) + { + throw new NotImplementedException(); + } + + public void ScheduleTellRepeatedly(TimeSpan initialDelay, TimeSpan interval, ICanTell receiver, object message, + IActorRef sender, ICancelable cancelable) + { + throw new NotImplementedException(); + } + + public DateTimeOffset Now { get; } + public TimeSpan MonotonicClock { get; } + public TimeSpan HighResMonotonicClock { get; } + public IAdvancedScheduler Advanced { get; } + } } diff --git a/src/core/Akka/Actor/Internals/ActorSystemImpl.cs b/src/core/Akka/Actor/Internals/ActorSystemImpl.cs index 3449b0bba88..e84d064e2c4 100644 --- a/src/core/Akka/Actor/Internals/ActorSystemImpl.cs +++ b/src/core/Akka/Actor/Internals/ActorSystemImpl.cs @@ -52,8 +52,9 @@ public ActorSystemImpl(string name, Config config) throw new ArgumentNullException("config"); _name = name; - ConfigureScheduler(); + ConfigureSettings(config); + ConfigureScheduler(); ConfigureEventStream(); ConfigureProvider(); ConfigureSerialization(); @@ -127,7 +128,8 @@ public override ActorSelection ActorSelection(string actorPath) private void ConfigureScheduler() { - _scheduler = new TaskBasedScheduler(); + var schedulerType = Type.GetType(_settings.SchedulerClass, true); + _scheduler = (IScheduler)Activator.CreateInstance(schedulerType); } /// diff --git a/src/core/Akka/Actor/Scheduler/SchedulerBase.cs b/src/core/Akka/Actor/Scheduler/SchedulerBase.cs index f511e5e97fa..84d0baa5232 100644 --- a/src/core/Akka/Actor/Scheduler/SchedulerBase.cs +++ b/src/core/Akka/Actor/Scheduler/SchedulerBase.cs @@ -21,7 +21,6 @@ void ITellScheduler.ScheduleTellOnce(TimeSpan delay, ICanTell receiver, object m { ValidateDelay(delay, "delay"); InternalScheduleTellOnce(delay, receiver, message, sender, cancelable); - } void ITellScheduler.ScheduleTellRepeatedly(TimeSpan initialDelay, TimeSpan interval, ICanTell receiver, object message, IActorRef sender) @@ -38,7 +37,6 @@ void ITellScheduler.ScheduleTellRepeatedly(TimeSpan initialDelay, TimeSpan inter InternalScheduleTellRepeatedly(initialDelay, interval, receiver, message, sender, cancelable); } - void IActionScheduler.ScheduleOnce(TimeSpan delay, Action action) { ValidateDelay(delay, "delay"); @@ -66,22 +64,18 @@ void IActionScheduler.ScheduleRepeatedly(TimeSpan initialDelay, TimeSpan interva } IAdvancedScheduler IScheduler.Advanced { get { return this; } } - DateTimeOffset ITimeProvider.Now { get { return TimeNow; } } - - + protected abstract DateTimeOffset TimeNow { get; } public abstract TimeSpan MonotonicClock { get; } public abstract TimeSpan HighResMonotonicClock { get; } protected abstract void InternalScheduleTellOnce(TimeSpan delay, ICanTell receiver, object message, IActorRef sender, ICancelable cancelable); - protected abstract void InternalScheduleTellRepeatedly(TimeSpan initialDelay, TimeSpan interval, ICanTell receiver, object message, IActorRef sender, ICancelable cancelable); protected abstract void InternalScheduleOnce(TimeSpan delay, Action action, ICancelable cancelable); protected abstract void InternalScheduleRepeatedly(TimeSpan initialDelay, TimeSpan interval, Action action, ICancelable cancelable); - protected static void ValidateInterval(TimeSpan interval, string parameterName) { if(interval <= TimeSpan.Zero) diff --git a/src/core/Akka/Actor/Scheduler/TaskBasedScheduler.cs b/src/core/Akka/Actor/Scheduler/TaskBasedScheduler.cs index ce9276af0c4..633397a3905 100644 --- a/src/core/Akka/Actor/Scheduler/TaskBasedScheduler.cs +++ b/src/core/Akka/Actor/Scheduler/TaskBasedScheduler.cs @@ -11,12 +11,8 @@ namespace Akka.Actor { - /// - /// Class Scheduler. - /// public class TaskBasedScheduler : SchedulerBase, IDateTimeOffsetNowTimeProvider { - protected override DateTimeOffset TimeNow { get { return DateTimeOffset.Now; } } public override TimeSpan MonotonicClock { get { return Util.MonotonicClock.Elapsed; } } public override TimeSpan HighResMonotonicClock { get { return Util.MonotonicClock.ElapsedHighRes; } } @@ -45,7 +41,6 @@ protected override void InternalScheduleRepeatedly(TimeSpan initialDelay, TimeSp InternalScheduleRepeatedly(initialDelay, interval, action, cancellationToken); } - private void InternalScheduleOnce(TimeSpan initialDelay, Action action, CancellationToken token) { Task.Delay(initialDelay, token).ContinueWith(t => @@ -63,7 +58,6 @@ private void InternalScheduleOnce(TimeSpan initialDelay, Action action, Cancella }, token, TaskContinuationOptions.OnlyOnRanToCompletion, TaskScheduler.Current); } - private void InternalScheduleRepeatedly(TimeSpan initialDelay, TimeSpan interval, Action action, CancellationToken token) { Action executeAction = null; @@ -84,9 +78,7 @@ private void InternalScheduleRepeatedly(TimeSpan initialDelay, TimeSpan interval }; Task.Delay(initialDelay, token) .ContinueWith(executeAction, token, TaskContinuationOptions.OnlyOnRanToCompletion, TaskScheduler.Current); - } - } } diff --git a/src/core/Akka/Actor/Settings.cs b/src/core/Akka/Actor/Settings.cs index b79290de92e..6e9abe335de 100644 --- a/src/core/Akka/Actor/Settings.cs +++ b/src/core/Akka/Actor/Settings.cs @@ -107,6 +107,8 @@ public Settings(ActorSystem system, Config config) DebugRouterMisconfiguration = Config.GetBoolean("akka.actor.debug.router-misconfiguration"); Home = Config.GetString("akka.home") ?? ""; DefaultVirtualNodesFactor = Config.GetInt("akka.actor.deployment.default.virtual-nodes-factor"); + + SchedulerClass = Config.GetString("akka.scheduler.implementation"); //TODO: dunno.. we dont have FiniteStateMachines, dont know what the rest is /* final val SchedulerClass: String = getString("akka.scheduler.implementation") @@ -260,6 +262,11 @@ public Settings(ActorSystem system, Config config) /// public int DefaultVirtualNodesFactor { get; private set; } + /// + /// Gets the scheduler implementation used by this system. + /// + public string SchedulerClass { get; private set; } + /// /// Returns a that represents this instance. /// diff --git a/src/core/Akka/Configuration/Pigeon.conf b/src/core/Akka/Configuration/Pigeon.conf index 47ab68804d4..5643bc7a71c 100644 --- a/src/core/Akka/Configuration/Pigeon.conf +++ b/src/core/Akka/Configuration/Pigeon.conf @@ -435,7 +435,7 @@ akka { # 1) com.typesafe.config.Config # 2) akka.event.LoggingAdapter # 3) java.util.concurrent.ThreadFactory - implementation = akka.actor.LightArrayRevolverScheduler + implementation = "Akka.Actor.TaskBasedScheduler" # When shutting down the scheduler, there will typically be a thread which # needs to be stopped, and this timeout determines how long to wait for From 43c39d1dca60064f852138e18682bdf65b48ccd5 Mon Sep 17 00:00:00 2001 From: Stefan Sedich Date: Thu, 21 May 2015 07:32:39 +1000 Subject: [PATCH 47/66] Move over to using the new DedicatedThreadScheduler as the default implementation in config --- src/core/Akka.Tests/Actor/ActorSystemSpec.cs | 7 ++++++- src/core/Akka/Actor/Internals/ActorSystemImpl.cs | 4 +--- src/core/Akka/Actor/Scheduler/TaskBasedScheduler.cs | 8 ++++++++ src/core/Akka/Configuration/Pigeon.conf | 2 +- 4 files changed, 16 insertions(+), 5 deletions(-) diff --git a/src/core/Akka.Tests/Actor/ActorSystemSpec.cs b/src/core/Akka.Tests/Actor/ActorSystemSpec.cs index 478ab59d9eb..82558b9c451 100644 --- a/src/core/Akka.Tests/Actor/ActorSystemSpec.cs +++ b/src/core/Akka.Tests/Actor/ActorSystemSpec.cs @@ -92,7 +92,7 @@ public void AnActorSystem_Must_Support_Dynamically_Registered_Extensions() [Fact] public void AnActorSystem_Must_Setup_The_Default_Scheduler() { - Assert.True(Sys.Scheduler.GetType() == typeof(TaskBasedScheduler)); + Assert.True(Sys.Scheduler.GetType() == typeof(DedicatedThreadScheduler)); } [Fact] @@ -143,6 +143,11 @@ public TestExtensionImpl(ActorSystem system) public class TestScheduler : IScheduler { + public TestScheduler(ActorSystem system) + { + + } + public void ScheduleTellOnce(TimeSpan delay, ICanTell receiver, object message, IActorRef sender) { throw new NotImplementedException(); diff --git a/src/core/Akka/Actor/Internals/ActorSystemImpl.cs b/src/core/Akka/Actor/Internals/ActorSystemImpl.cs index 90dd9d44998..a11b8513825 100644 --- a/src/core/Akka/Actor/Internals/ActorSystemImpl.cs +++ b/src/core/Akka/Actor/Internals/ActorSystemImpl.cs @@ -52,12 +52,10 @@ public ActorSystemImpl(string name, Config config) throw new ArgumentNullException("config"); _name = name; - ConfigureScheduler(); ConfigureSettings(config); ConfigureScheduler(); ConfigureEventStream(); ConfigureProvider(); - ConfigureScheduler(); ConfigureSerialization(); ConfigureMailboxes(); ConfigureDispatchers(); @@ -130,7 +128,7 @@ public override ActorSelection ActorSelection(string actorPath) private void ConfigureScheduler() { var schedulerType = Type.GetType(_settings.SchedulerClass, true); - _scheduler = (IScheduler)Activator.CreateInstance(schedulerType); + _scheduler = (IScheduler) Activator.CreateInstance(schedulerType, this); } /// diff --git a/src/core/Akka/Actor/Scheduler/TaskBasedScheduler.cs b/src/core/Akka/Actor/Scheduler/TaskBasedScheduler.cs index 90ab6bdb401..62cbbd2d162 100644 --- a/src/core/Akka/Actor/Scheduler/TaskBasedScheduler.cs +++ b/src/core/Akka/Actor/Scheduler/TaskBasedScheduler.cs @@ -17,6 +17,14 @@ public class TaskBasedScheduler : SchedulerBase, IDateTimeOffsetNowTimeProvider public override TimeSpan MonotonicClock { get { return Util.MonotonicClock.Elapsed; } } public override TimeSpan HighResMonotonicClock { get { return Util.MonotonicClock.ElapsedHighRes; } } + public TaskBasedScheduler() + { + } + + public TaskBasedScheduler(ActorSystem system) + { + } + protected override void InternalScheduleTellOnce(TimeSpan delay, ICanTell receiver, object message, IActorRef sender, ICancelable cancelable) { var cancellationToken = cancelable == null ? CancellationToken.None : cancelable.Token; diff --git a/src/core/Akka/Configuration/Pigeon.conf b/src/core/Akka/Configuration/Pigeon.conf index 6ebe0ba22ab..5f07b9b5813 100644 --- a/src/core/Akka/Configuration/Pigeon.conf +++ b/src/core/Akka/Configuration/Pigeon.conf @@ -435,7 +435,7 @@ akka { # 1) com.typesafe.config.Config # 2) akka.event.LoggingAdapter # 3) java.util.concurrent.ThreadFactory - implementation = "Akka.Actor.TaskBasedScheduler" + implementation = "Akka.Actor.DedicatedThreadScheduler" # When shutting down the scheduler, there will typically be a thread which # needs to be stopped, and this timeout determines how long to wait for From 414fd3ca7421d8438b36b527391ec77534cbc142 Mon Sep 17 00:00:00 2001 From: Luke Tillman Date: Wed, 20 May 2015 16:09:01 -0600 Subject: [PATCH 48/66] Cassandra persistence plugin (issue #987) - Based partially on the great Scala version by Martin Krasser (https://github.com/krasserm/akka-persistence-cassandra) - Modified from Scala version: * Different schema for the Journal to try and avoid reading tombstones if using permanent deletions * Ensures batches of writes in the Journal will only go to one partition to ensure atomicity and isolation * Share ISession instance from DataStax driver across journal and session by default * Make ISession resolution pluggable (allows for more advanced creation options and possible DI integration) - Implementations for both Journal and Snapshot Store passing specs - Readme with instructions for setup - Separate task in build script for running tests --- build.fsx | 11 +- src/Akka.sln | 24 +- .../Akka.Persistence.Cassandra.Tests.csproj | 123 +++++ .../CassandraIntegrationSpec.cs | 455 ++++++++++++++++++ .../CassandraJournalSpec.cs | 26 + .../CassandraSnapshotStoreSpec.cs | 26 + .../Properties/AssemblyInfo.cs | 18 + .../TestSetupHelpers.cs | 33 ++ .../packages.config | 10 + .../Akka.Persistence.Cassandra.csproj | 116 +++++ .../Akka.Persistence.Cassandra.nuspec | 20 + .../CassandraExtension.cs | 47 ++ .../CassandraPersistence.cs | 23 + .../CassandraSettings.cs | 77 +++ .../ExtensionMethods.cs | 40 ++ .../Journal/CassandraJournal.cs | 375 +++++++++++++++ .../Journal/CassandraJournalSettings.cs | 27 ++ .../Journal/JournalStatements.cs | 63 +++ .../Properties/AssemblyInfo.cs | 18 + .../Akka.Persistence.Cassandra/README.md | 224 +++++++++ .../SessionManagement/CassandraSession.cs | 17 + .../DefaultSessionManager.cs | 55 +++ .../SessionManagement/IManageSessions.cs | 22 + .../SessionManagement/SessionSettings.cs | 67 +++ .../Snapshot/CassandraSnapshotStore.cs | 251 ++++++++++ .../CassandraSnapshotStoreSettings.cs | 22 + .../Snapshot/SnapshotStoreStatements.cs | 38 ++ .../packages.config | 5 + .../Akka.Persistence.Cassandra/reference.conf | 95 ++++ 29 files changed, 2326 insertions(+), 2 deletions(-) create mode 100644 src/contrib/persistence/Akka.Persistence.Cassandra.Tests/Akka.Persistence.Cassandra.Tests.csproj create mode 100644 src/contrib/persistence/Akka.Persistence.Cassandra.Tests/CassandraIntegrationSpec.cs create mode 100644 src/contrib/persistence/Akka.Persistence.Cassandra.Tests/CassandraJournalSpec.cs create mode 100644 src/contrib/persistence/Akka.Persistence.Cassandra.Tests/CassandraSnapshotStoreSpec.cs create mode 100644 src/contrib/persistence/Akka.Persistence.Cassandra.Tests/Properties/AssemblyInfo.cs create mode 100644 src/contrib/persistence/Akka.Persistence.Cassandra.Tests/TestSetupHelpers.cs create mode 100644 src/contrib/persistence/Akka.Persistence.Cassandra.Tests/packages.config create mode 100644 src/contrib/persistence/Akka.Persistence.Cassandra/Akka.Persistence.Cassandra.csproj create mode 100644 src/contrib/persistence/Akka.Persistence.Cassandra/Akka.Persistence.Cassandra.nuspec create mode 100644 src/contrib/persistence/Akka.Persistence.Cassandra/CassandraExtension.cs create mode 100644 src/contrib/persistence/Akka.Persistence.Cassandra/CassandraPersistence.cs create mode 100644 src/contrib/persistence/Akka.Persistence.Cassandra/CassandraSettings.cs create mode 100644 src/contrib/persistence/Akka.Persistence.Cassandra/ExtensionMethods.cs create mode 100644 src/contrib/persistence/Akka.Persistence.Cassandra/Journal/CassandraJournal.cs create mode 100644 src/contrib/persistence/Akka.Persistence.Cassandra/Journal/CassandraJournalSettings.cs create mode 100644 src/contrib/persistence/Akka.Persistence.Cassandra/Journal/JournalStatements.cs create mode 100644 src/contrib/persistence/Akka.Persistence.Cassandra/Properties/AssemblyInfo.cs create mode 100644 src/contrib/persistence/Akka.Persistence.Cassandra/README.md create mode 100644 src/contrib/persistence/Akka.Persistence.Cassandra/SessionManagement/CassandraSession.cs create mode 100644 src/contrib/persistence/Akka.Persistence.Cassandra/SessionManagement/DefaultSessionManager.cs create mode 100644 src/contrib/persistence/Akka.Persistence.Cassandra/SessionManagement/IManageSessions.cs create mode 100644 src/contrib/persistence/Akka.Persistence.Cassandra/SessionManagement/SessionSettings.cs create mode 100644 src/contrib/persistence/Akka.Persistence.Cassandra/Snapshot/CassandraSnapshotStore.cs create mode 100644 src/contrib/persistence/Akka.Persistence.Cassandra/Snapshot/CassandraSnapshotStoreSettings.cs create mode 100644 src/contrib/persistence/Akka.Persistence.Cassandra/Snapshot/SnapshotStoreStatements.cs create mode 100644 src/contrib/persistence/Akka.Persistence.Cassandra/packages.config create mode 100644 src/contrib/persistence/Akka.Persistence.Cassandra/reference.conf diff --git a/build.fsx b/build.fsx index 6d7a3a0c3d2..1c17c8d3690 100644 --- a/build.fsx +++ b/build.fsx @@ -218,7 +218,8 @@ Target "RunTests" <| fun _ -> "src/**/bin/Release/Akka.TestKit.VsTest.Tests.dll" -- "src/**/bin/Release/Akka.TestKit.NUnit.Tests.dll" -- "src/**/bin/Release/Akka.Persistence.SqlServer.Tests.dll" -- - "src/**/bin/Release/Akka.Persistence.PostgreSql.Tests.dll" + "src/**/bin/Release/Akka.Persistence.PostgreSql.Tests.dll" -- + "src/**/bin/Release/Akka.Persistence.Cassandra.Tests.dll" mkdir testOutput @@ -280,6 +281,14 @@ Target "RunPostgreSqlTests" <| fun _ -> (fun p -> { p with OutputDir = testOutput; ToolPath = xunitToolPath }) postgreSqlTests +Target "RunCassandraTests" <| fun _ -> + let cassandraTests = !! "src/**/bin/Release/Akka.Persistence.Cassandra.Tests.dll" + let xunitToolPath = findToolInSubPath "xunit.console.exe" "src/packages/xunit.runner.console*/tools" + printfn "Using XUnit runner: %s" xunitToolPath + xUnit2 + (fun p -> { p with OutputDir = testOutput; ToolPath = xunitToolPath }) + cassandraTests + //-------------------------------------------------------------------------------- // Nuget targets //-------------------------------------------------------------------------------- diff --git a/src/Akka.sln b/src/Akka.sln index 334107d1972..c303e0ff85d 100644 --- a/src/Akka.sln +++ b/src/Akka.sln @@ -1,7 +1,7 @@  Microsoft Visual Studio Solution File, Format Version 12.00 # Visual Studio 2013 -VisualStudioVersion = 12.0.31101.0 +VisualStudioVersion = 12.0.30723.0 MinimumVisualStudioVersion = 10.0.40219.1 Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Examples", "Examples", "{69279534-1DBA-4115-BF8B-03F77FC8125E}" EndProject @@ -200,6 +200,10 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Akka.MultiNodeTests", "core EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Akka.Persistence.Sql.Common", "contrib\persistence\Akka.Persistence.Sql.Common\Akka.Persistence.Sql.Common.csproj", "{3B9E6211-9488-4DB5-B714-24248693B38F}" EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Akka.Persistence.Cassandra", "contrib\persistence\Akka.Persistence.Cassandra\Akka.Persistence.Cassandra.csproj", "{54BD0B45-8A46-4194-8C33-AD287CAC8FA4}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Akka.Persistence.Cassandra.Tests", "contrib\persistence\Akka.Persistence.Cassandra.Tests\Akka.Persistence.Cassandra.Tests.csproj", "{1FE6CA3D-4996-4A2A-AC0F-76F3BD66B4C4}" +EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Akka.Persistence.PostgreSql", "contrib\persistence\Akka.Persistence.PostgreSql\Akka.Persistence.PostgreSql.csproj", "{4B89227B-5AD1-4061-816F-570067C3727F}" EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Akka.Persistence.PostgreSql.Tests", "contrib\persistence\Akka.Persistence.PostgreSql.Tests\Akka.Persistence.PostgreSql.Tests.csproj", "{2D1812FD-70C0-43EE-9C25-3980E41F30E1}" @@ -753,6 +757,22 @@ Global {2D1812FD-70C0-43EE-9C25-3980E41F30E1}.Release Mono|Any CPU.Build.0 = Release|Any CPU {2D1812FD-70C0-43EE-9C25-3980E41F30E1}.Release|Any CPU.ActiveCfg = Release|Any CPU {2D1812FD-70C0-43EE-9C25-3980E41F30E1}.Release|Any CPU.Build.0 = Release|Any CPU + {54BD0B45-8A46-4194-8C33-AD287CAC8FA4}.Debug Mono|Any CPU.ActiveCfg = Debug|Any CPU + {54BD0B45-8A46-4194-8C33-AD287CAC8FA4}.Debug Mono|Any CPU.Build.0 = Debug|Any CPU + {54BD0B45-8A46-4194-8C33-AD287CAC8FA4}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {54BD0B45-8A46-4194-8C33-AD287CAC8FA4}.Debug|Any CPU.Build.0 = Debug|Any CPU + {54BD0B45-8A46-4194-8C33-AD287CAC8FA4}.Release Mono|Any CPU.ActiveCfg = Release|Any CPU + {54BD0B45-8A46-4194-8C33-AD287CAC8FA4}.Release Mono|Any CPU.Build.0 = Release|Any CPU + {54BD0B45-8A46-4194-8C33-AD287CAC8FA4}.Release|Any CPU.ActiveCfg = Release|Any CPU + {54BD0B45-8A46-4194-8C33-AD287CAC8FA4}.Release|Any CPU.Build.0 = Release|Any CPU + {1FE6CA3D-4996-4A2A-AC0F-76F3BD66B4C4}.Debug Mono|Any CPU.ActiveCfg = Debug|Any CPU + {1FE6CA3D-4996-4A2A-AC0F-76F3BD66B4C4}.Debug Mono|Any CPU.Build.0 = Debug|Any CPU + {1FE6CA3D-4996-4A2A-AC0F-76F3BD66B4C4}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {1FE6CA3D-4996-4A2A-AC0F-76F3BD66B4C4}.Debug|Any CPU.Build.0 = Debug|Any CPU + {1FE6CA3D-4996-4A2A-AC0F-76F3BD66B4C4}.Release Mono|Any CPU.ActiveCfg = Release|Any CPU + {1FE6CA3D-4996-4A2A-AC0F-76F3BD66B4C4}.Release Mono|Any CPU.Build.0 = Release|Any CPU + {1FE6CA3D-4996-4A2A-AC0F-76F3BD66B4C4}.Release|Any CPU.ActiveCfg = Release|Any CPU + {1FE6CA3D-4996-4A2A-AC0F-76F3BD66B4C4}.Release|Any CPU.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE @@ -844,5 +864,7 @@ Global {3B9E6211-9488-4DB5-B714-24248693B38F} = {264C22A4-CAFC-41F6-B82C-4DDC5C196767} {4B89227B-5AD1-4061-816F-570067C3727F} = {264C22A4-CAFC-41F6-B82C-4DDC5C196767} {2D1812FD-70C0-43EE-9C25-3980E41F30E1} = {264C22A4-CAFC-41F6-B82C-4DDC5C196767} + {54BD0B45-8A46-4194-8C33-AD287CAC8FA4} = {264C22A4-CAFC-41F6-B82C-4DDC5C196767} + {1FE6CA3D-4996-4A2A-AC0F-76F3BD66B4C4} = {264C22A4-CAFC-41F6-B82C-4DDC5C196767} EndGlobalSection EndGlobal diff --git a/src/contrib/persistence/Akka.Persistence.Cassandra.Tests/Akka.Persistence.Cassandra.Tests.csproj b/src/contrib/persistence/Akka.Persistence.Cassandra.Tests/Akka.Persistence.Cassandra.Tests.csproj new file mode 100644 index 00000000000..1bb8bcd515c --- /dev/null +++ b/src/contrib/persistence/Akka.Persistence.Cassandra.Tests/Akka.Persistence.Cassandra.Tests.csproj @@ -0,0 +1,123 @@ + + + + + + Debug + AnyCPU + {1FE6CA3D-4996-4A2A-AC0F-76F3BD66B4C4} + Library + Properties + Akka.Persistence.Cassandra.Tests + Akka.Persistence.Cassandra.Tests + v4.5 + 512 + ..\..\..\ + true + 176cb39d + + + true + full + false + bin\Debug\ + DEBUG;TRACE + prompt + 4 + + + pdbonly + true + bin\Release\ + TRACE + prompt + 4 + + + + ..\..\..\packages\CassandraCSharpDriver.2.5.2\lib\net40\Cassandra.dll + True + + + ..\..\..\packages\lz4net.1.0.5.93\lib\net40-client\LZ4.dll + True + + + + + + + + + + ..\..\..\packages\xunit.abstractions.2.0.0\lib\net35\xunit.abstractions.dll + True + + + ..\..\..\packages\xunit.assert.2.0.0\lib\portable-net45+win+wpa81+wp80+monotouch+monoandroid+Xamarin.iOS\xunit.assert.dll + True + + + ..\..\..\packages\xunit.extensibility.core.2.0.0\lib\portable-net45+win+wpa81+wp80+monotouch+monoandroid+Xamarin.iOS\xunit.core.dll + True + + + + + Properties\SharedAssemblyInfo.cs + + + + + + + + + + {ad9418b6-c452-4169-94fb-d43de0bfa966} + Akka.Persistence.TestKit + + + {fca84dea-c118-424b-9eb8-34375dfef18a} + Akka.Persistence + + + {0d3cbad0-bbdb-43e5-afc4-ed1d3ecdc224} + Akka.TestKit + + + {5deddf90-37f0-48d3-a0b0-a5cbd8a7e377} + Akka + + + {7dbd5c17-5e9d-40c4-9201-d092751532a7} + Akka.TestKit.Xunit2 + + + {54bd0b45-8a46-4194-8c33-ad287cac8fa4} + Akka.Persistence.Cassandra + + + + + + + + + + + + + This project references NuGet package(s) that are missing on this computer. Enable NuGet Package Restore to download them. For more information, see http://go.microsoft.com/fwlink/?LinkID=322105. The missing file is {0}. + + + + + + \ No newline at end of file diff --git a/src/contrib/persistence/Akka.Persistence.Cassandra.Tests/CassandraIntegrationSpec.cs b/src/contrib/persistence/Akka.Persistence.Cassandra.Tests/CassandraIntegrationSpec.cs new file mode 100644 index 00000000000..a72465c63e9 --- /dev/null +++ b/src/contrib/persistence/Akka.Persistence.Cassandra.Tests/CassandraIntegrationSpec.cs @@ -0,0 +1,455 @@ +using System; +using Akka.Actor; +using Akka.Configuration; +using Akka.TestKit; +using Akka.Util.Internal; +using Xunit; + +namespace Akka.Persistence.Cassandra.Tests +{ + /// + /// Some integration tests for Cassandra Journal and Snapshot plugins. + /// + public class CassandraIntegrationSpec : Akka.TestKit.Xunit2.TestKit + { + private static readonly Config IntegrationConfig = ConfigurationFactory.ParseString(@" + akka.persistence.journal.plugin = ""cassandra-journal"" + akka.persistence.snapshot-store.plugin = ""cassandra-snapshot-store"" + akka.persistence.publish-plugin-commands = on + akka.test.single-expect-default = 10s + cassandra-journal.partition-size = 5 + cassandra-journal.max-result-size = 3 + "); + + // Static so that each test run gets a different Id number + private static readonly AtomicCounter ActorIdCounter = new AtomicCounter(); + + private readonly string _actorId; + + public CassandraIntegrationSpec() + : base(IntegrationConfig, "CassandraIntegration") + { + TestSetupHelpers.ResetJournalData(Sys); + TestSetupHelpers.ResetSnapshotStoreData(Sys); + + // Increment actor Id with each test that's run + int id = ActorIdCounter.IncrementAndGet(); + _actorId = string.Format("p{0}", id); + } + + [Fact] + public void Cassandra_journal_should_write_and_replay_messages() + { + // Start a persistence actor and write some messages to it + var actor1 = Sys.ActorOf(Props.Create(_actorId)); + WriteAndVerifyMessages(actor1, 1L, 16L); + + // Now start a new instance (same persistence Id) and it should recover with those same messages + var actor2 = Sys.ActorOf(Props.Create(_actorId)); + for (long i = 1L; i <= 16L; i++) + { + string msg = string.Format("a-{0}", i); + ExpectHandled(msg, i, true); + } + + // We should then be able to send that actor another message and have it be persisted + actor2.Tell("b"); + ExpectHandled("b", 17L, false); + } + + [Theory] + [InlineData(true)] + [InlineData(false)] + public void Cassandra_journal_should_not_replay_deleted_messages(bool permanentDelete) + { + // Listen for delete messages on the event stream + TestProbe deleteProbe = CreateTestProbe(); + Sys.EventStream.Subscribe(deleteProbe.Ref, typeof (DeleteMessagesTo)); + + var actor1 = Sys.ActorOf(Props.Create(_actorId)); + WriteAndVerifyMessages(actor1, 1L, 16L); + + // Tell the actor to delete some messages and make sure it's finished + actor1.Tell(new DeleteToCommand(3L, permanentDelete)); + deleteProbe.ExpectMsg(); + + // Start a second copy of the actor and verify it starts replaying from the correct spot + Sys.ActorOf(Props.Create(_actorId)); + for (long i = 4L; i <= 16L; i++) + { + string msg = string.Format("a-{0}", i); + ExpectHandled(msg, i, true); + } + + // Delete some more messages and wait for confirmation + actor1.Tell(new DeleteToCommand(7L, permanentDelete)); + deleteProbe.ExpectMsg(); + + // Start another copy and verify playback again + Sys.ActorOf(Props.Create(_actorId)); + for (long i = 8L; i <= 16L; i++) + { + string msg = string.Format("a-{0}", i); + ExpectHandled(msg, i, true); + } + } + + [Fact] + public void Cassandra_journal_should_replay_message_incrementally() + { + // Write some messages to a Persistent Actor + var actor = Sys.ActorOf(Props.Create(_actorId)); + WriteAndVerifyMessages(actor, 1L, 6L); + + TestProbe probe = CreateTestProbe(); + + // Create a persistent view from the actor that does not do auto-updating + var view = Sys.ActorOf(Props.Create(_actorId + "-view", _actorId, probe.Ref)); + probe.ExpectNoMsg(200); + + // Tell the view to update and verify we get the messages we wrote earlier replayed + view.Tell(new Update(true, 3L)); + probe.ExpectMsg("a-1"); + probe.ExpectMsg("a-2"); + probe.ExpectMsg("a-3"); + probe.ExpectNoMsg(200); + + // Update the view again and verify we get the rest of the messages + view.Tell(new Update(true, 3L)); + probe.ExpectMsg("a-4"); + probe.ExpectMsg("a-5"); + probe.ExpectMsg("a-6"); + probe.ExpectNoMsg(200); + } + + [Fact] + public void Persistent_actor_should_recover_from_a_snapshot_with_follow_up_messages() + { + // Write a message, snapshot, then another follow-up message + var actor1 = Sys.ActorOf(Props.Create(_actorId, TestActor)); + actor1.Tell("a"); + ExpectHandled("a", 1, false); + actor1.Tell("snap"); + ExpectMsg("snapped-a-1"); + actor1.Tell("b"); + ExpectHandled("b", 2, false); + + // Start the actor again and verify we get a snapshot, followed by the message that wasn't in the snapshot + var actor2 = Sys.ActorOf(Props.Create(_actorId, TestActor)); + ExpectMsg("offered-a-1"); + ExpectHandled("b", 2, true); + } + + [Fact] + public void Persistent_actor_should_recover_from_a_snapshot_with_follow_up_messages_and_an_upper_bound() + { + // Create an actor and trigger manual recovery so it will accept new messages + var actor1 = Sys.ActorOf(Props.Create(_actorId, TestActor)); + actor1.Tell(new Recover(SnapshotSelectionCriteria.None)); + + // Write a message, snapshot, then write some follow-up messages + actor1.Tell("a"); + ExpectHandled("a", 1, false); + actor1.Tell("snap"); + ExpectMsg("snapped-a-1"); + WriteSameMessageAndVerify(actor1, "a", 2L, 7L); + + // Create another copy of that actor and manually recover to an upper bound (i.e. past state) and verify + // we get the expected messages after the snapshot + var actor2 = Sys.ActorOf(Props.Create(_actorId, TestActor)); + actor2.Tell(new Recover(SnapshotSelectionCriteria.Latest, toSequenceNr: 3L)); + ExpectMsg("offered-a-1"); + ExpectHandled("a", 2, true); + ExpectHandled("a", 3, true); + + // Should continue working after recovery to previous state, but highest sequence number should take into + // account other messages that were written but not replayed + actor2.Tell("d"); + ExpectHandled("d", 8L, false); + } + + [Fact] + public void Persistent_actor_should_recover_from_a_snapshot_without_follow_up_messages_inside_a_partition() + { + // Write a message, then snapshot, no follow-up messages after snapshot + var actor1 = Sys.ActorOf(Props.Create(_actorId, TestActor)); + actor1.Tell("a"); + ExpectHandled("a", 1L, false); + actor1.Tell("snap"); + ExpectMsg("snapped-a-1"); + + // Start another copy and verify we recover with the snapshot + var actor2 = Sys.ActorOf(Props.Create(_actorId, TestActor)); + ExpectMsg("offered-a-1"); + + // Write another message to verify + actor2.Tell("b"); + ExpectHandled("b", 2L, false); + } + + [Fact] + public void Persistent_actor_should_recover_from_a_snapshot_without_follow_up_messages_at_a_partition_boundary_where_next_partition_is_invalid() + { + // Partition size for tests is 5 (see Config above), so write messages up to partition boundary (but don't write any + // messages to the next partition) + var actor1 = Sys.ActorOf(Props.Create(_actorId, TestActor)); + WriteSameMessageAndVerify(actor1, "a", 1L, 5L); + + // Snapshot and verify without any follow-up messages + actor1.Tell("snap"); + ExpectMsg("snapped-a-5"); + + // Create a second copy of that actor and verify it recovers from the snapshot and continues working + var actor2 = Sys.ActorOf(Props.Create(_actorId, TestActor)); + ExpectMsg("offered-a-5"); + actor2.Tell("b"); + ExpectHandled("b", 6L, false); + } + + /// + /// Write messages "a-xxx" where xxx is an index number from start to end and verify that each message returns + /// a Handled response. + /// + private void WriteAndVerifyMessages(IActorRef persistentActor, long start, long end) + { + for (long i = start; i <= end; i++) + { + string msg = string.Format("a-{0}", i); + persistentActor.Tell(msg, TestActor); + ExpectHandled(msg, i, false); + } + } + + /// + /// Writes the same message multiple times and verify that we get a Handled response. + /// + private void WriteSameMessageAndVerify(IActorRef persistentActor, string message, long start, long end) + { + for (long i = start; i <= end; i++) + { + persistentActor.Tell(message, TestActor); + ExpectHandled(message, i, false); + } + } + + private void ExpectHandled(string message, long sequenceNumber, bool isRecovering) + { + object msg = ReceiveOne(); + var handledMsg = Assert.IsType(msg); + Assert.Equal(message, handledMsg.Message); + Assert.Equal(sequenceNumber, handledMsg.SequenceNumber); + Assert.Equal(isRecovering, handledMsg.IsRecovering); + } + + #region Test Messages and Actors + + [Serializable] + public class DeleteToCommand + { + public long SequenceNumber { get; private set; } + public bool Permanent { get; private set; } + + public DeleteToCommand(long sequenceNumber, bool permanent) + { + SequenceNumber = sequenceNumber; + Permanent = permanent; + } + } + + [Serializable] + public class HandledMessage + { + public string Message { get; private set; } + public long SequenceNumber { get; private set; } + public bool IsRecovering { get; private set; } + + public HandledMessage(string message, long sequenceNumber, bool isRecovering) + { + Message = message; + SequenceNumber = sequenceNumber; + IsRecovering = isRecovering; + } + } + + public class PersistentActorA : PersistentActor + { + private readonly string _persistenceId; + + public PersistentActorA(string persistenceId) + { + _persistenceId = persistenceId; + } + + public override string PersistenceId + { + get { return _persistenceId; } + } + + protected override bool ReceiveRecover(object message) + { + if (message is string) + { + var payload = (string) message; + Handle(payload); + return true; + } + + return false; + } + + protected override bool ReceiveCommand(object message) + { + if (message is DeleteToCommand) + { + var delete = (DeleteToCommand) message; + DeleteMessages(delete.SequenceNumber, delete.Permanent); + return true; + } + + if (message is string) + { + var payload = (string) message; + Persist(payload, Handle); + return true; + } + + return false; + } + + private void Handle(string payload) + { + Context.Sender.Tell(new HandledMessage(payload, LastSequenceNr, IsRecovering), Self); + } + } + + public class PersistentActorC : PersistentActor + { + private readonly string _persistenceId; + private readonly IActorRef _probe; + + private string _last; + + public override string PersistenceId + { + get { return _persistenceId; } + } + + public PersistentActorC(string persistenceId, IActorRef probe) + { + _persistenceId = persistenceId; + _probe = probe; + } + + protected override bool ReceiveRecover(object message) + { + if (message is SnapshotOffer) + { + var offer = (SnapshotOffer) message; + _last = (string) offer.Snapshot; + _probe.Tell(string.Format("offered-{0}", _last)); + return true; + } + + if (message is string) + { + var payload = (string) message; + Handle(payload); + return true; + } + + return false; + } + + protected override bool ReceiveCommand(object message) + { + if (message is string) + { + var msg = (string) message; + if (msg == "snap") + SaveSnapshot(_last); + else + Persist(msg, Handle); + + return true; + } + + if (message is SaveSnapshotSuccess) + { + _probe.Tell(string.Format("snapped-{0}", _last), Context.Sender); + return true; + } + + if (message is DeleteToCommand) + { + var delete = (DeleteToCommand) message; + DeleteMessages(delete.SequenceNumber, delete.Permanent); + return true; + } + + return false; + } + + private void Handle(string payload) + { + _last = string.Format("{0}-{1}", payload, LastSequenceNr); + _probe.Tell(new HandledMessage(payload, LastSequenceNr, IsRecovering)); + } + } + + public class PersistentActorCWithManualRecovery : PersistentActorC + { + public PersistentActorCWithManualRecovery(string persistenceId, IActorRef probe) + : base(persistenceId, probe) + { + } + + protected override void PreRestart(Exception reason, object message) + { + // Don't do automatic recovery + } + } + + public class ViewA : PersistentView + { + private readonly string _viewId; + private readonly string _persistenceId; + private readonly IActorRef _probe; + + public override string ViewId + { + get { return _viewId; } + } + + public override string PersistenceId + { + get { return _persistenceId; } + } + + public override bool IsAutoUpdate + { + get { return false; } + } + + public override long AutoUpdateReplayMax + { + get { return 0L; } + } + + public ViewA(string viewId, string persistenceId, IActorRef probe) + { + _viewId = viewId; + _persistenceId = persistenceId; + _probe = probe; + } + + protected override bool Receive(object message) + { + // Just forward messages to the test probe + _probe.Tell(message, Context.Sender); + return true; + } + } + + #endregion + } +} diff --git a/src/contrib/persistence/Akka.Persistence.Cassandra.Tests/CassandraJournalSpec.cs b/src/contrib/persistence/Akka.Persistence.Cassandra.Tests/CassandraJournalSpec.cs new file mode 100644 index 00000000000..8a8da9bf797 --- /dev/null +++ b/src/contrib/persistence/Akka.Persistence.Cassandra.Tests/CassandraJournalSpec.cs @@ -0,0 +1,26 @@ +using Akka.Configuration; +using Akka.Persistence.TestKit.Journal; + +namespace Akka.Persistence.Cassandra.Tests +{ + public class CassandraJournalSpec : JournalSpec + { + private static readonly Config JournalConfig = ConfigurationFactory.ParseString(@" + akka.persistence.journal.plugin = ""cassandra-journal"" + akka.test.single-expect-default = 10s + "); + + public CassandraJournalSpec() + : base(JournalConfig, "CassandraJournalSystem") + { + TestSetupHelpers.ResetJournalData(Sys); + Initialize(); + } + + protected override void Dispose(bool disposing) + { + TestSetupHelpers.ResetJournalData(Sys); + base.Dispose(disposing); + } + } +} diff --git a/src/contrib/persistence/Akka.Persistence.Cassandra.Tests/CassandraSnapshotStoreSpec.cs b/src/contrib/persistence/Akka.Persistence.Cassandra.Tests/CassandraSnapshotStoreSpec.cs new file mode 100644 index 00000000000..ffde5b4749b --- /dev/null +++ b/src/contrib/persistence/Akka.Persistence.Cassandra.Tests/CassandraSnapshotStoreSpec.cs @@ -0,0 +1,26 @@ +using Akka.Configuration; +using Akka.Persistence.TestKit.Snapshot; + +namespace Akka.Persistence.Cassandra.Tests +{ + public class CassandraSnapshotStoreSpec : SnapshotStoreSpec + { + private static readonly Config SnapshotConfig = ConfigurationFactory.ParseString(@" + akka.persistence.snapshot-store.plugin = ""cassandra-snapshot-store"" + akka.test.single-expect-default = 10s + "); + + public CassandraSnapshotStoreSpec() + : base(SnapshotConfig, "CassandraSnapshotSystem") + { + TestSetupHelpers.ResetSnapshotStoreData(Sys); + Initialize(); + } + + protected override void Dispose(bool disposing) + { + TestSetupHelpers.ResetSnapshotStoreData(Sys); + base.Dispose(disposing); + } + } +} \ No newline at end of file diff --git a/src/contrib/persistence/Akka.Persistence.Cassandra.Tests/Properties/AssemblyInfo.cs b/src/contrib/persistence/Akka.Persistence.Cassandra.Tests/Properties/AssemblyInfo.cs new file mode 100644 index 00000000000..5aaeae06581 --- /dev/null +++ b/src/contrib/persistence/Akka.Persistence.Cassandra.Tests/Properties/AssemblyInfo.cs @@ -0,0 +1,18 @@ +using System.Reflection; +using System.Runtime.InteropServices; + +// General Information about an assembly is controlled through the following +// set of attributes. Change these attribute values to modify the information +// associated with an assembly. +[assembly: AssemblyTitle("Akka.Persistence.Cassandra.Tests")] +[assembly: AssemblyDescription("")] +[assembly: AssemblyProduct("Akka.Persistence.Cassandra.Tests")] +[assembly: AssemblyCulture("")] + +// Setting ComVisible to false makes the types in this assembly not visible +// to COM components. If you need to access a type in this assembly from +// COM, set the ComVisible attribute to true on that type. +[assembly: ComVisible(false)] + +// The following GUID is for the ID of the typelib if this project is exposed to COM +[assembly: Guid("64d0cc80-1160-4ae7-89fe-304252260860")] \ No newline at end of file diff --git a/src/contrib/persistence/Akka.Persistence.Cassandra.Tests/TestSetupHelpers.cs b/src/contrib/persistence/Akka.Persistence.Cassandra.Tests/TestSetupHelpers.cs new file mode 100644 index 00000000000..40e6452a973 --- /dev/null +++ b/src/contrib/persistence/Akka.Persistence.Cassandra.Tests/TestSetupHelpers.cs @@ -0,0 +1,33 @@ +using Akka.Actor; +using Cassandra; + +namespace Akka.Persistence.Cassandra.Tests +{ + /// + /// Some static helper methods for resetting Cassandra between tests or test contexts. + /// + public static class TestSetupHelpers + { + public static void ResetJournalData(ActorSystem sys) + { + // Get or add the extension + var ext = CassandraPersistence.Instance.Apply(sys); + + // Use session to remove keyspace + ISession session = ext.SessionManager.ResolveSession(ext.JournalSettings.SessionKey); + session.DeleteKeyspaceIfExists(ext.JournalSettings.Keyspace); + ext.SessionManager.ReleaseSession(session); + } + + public static void ResetSnapshotStoreData(ActorSystem sys) + { + // Get or add the extension + var ext = CassandraPersistence.Instance.Apply(sys); + + // Use session to remove the keyspace + ISession session = ext.SessionManager.ResolveSession(ext.SnapshotStoreSettings.SessionKey); + session.DeleteKeyspaceIfExists(ext.SnapshotStoreSettings.Keyspace); + ext.SessionManager.ReleaseSession(session); + } + } +} diff --git a/src/contrib/persistence/Akka.Persistence.Cassandra.Tests/packages.config b/src/contrib/persistence/Akka.Persistence.Cassandra.Tests/packages.config new file mode 100644 index 00000000000..1c8896e228a --- /dev/null +++ b/src/contrib/persistence/Akka.Persistence.Cassandra.Tests/packages.config @@ -0,0 +1,10 @@ + + + + + + + + + + \ No newline at end of file diff --git a/src/contrib/persistence/Akka.Persistence.Cassandra/Akka.Persistence.Cassandra.csproj b/src/contrib/persistence/Akka.Persistence.Cassandra/Akka.Persistence.Cassandra.csproj new file mode 100644 index 00000000000..d74db9b03a2 --- /dev/null +++ b/src/contrib/persistence/Akka.Persistence.Cassandra/Akka.Persistence.Cassandra.csproj @@ -0,0 +1,116 @@ + + + + + Debug + AnyCPU + {54BD0B45-8A46-4194-8C33-AD287CAC8FA4} + Library + Properties + Akka.Persistence.Cassandra + Akka.Persistence.Cassandra + v4.5 + 512 + ..\..\..\ + true + + + true + full + false + bin\Debug\ + DEBUG;TRACE + prompt + 4 + + + pdbonly + true + bin\Release\ + TRACE + prompt + 4 + + + + ..\..\..\packages\CassandraCSharpDriver.2.5.2\lib\net40\Cassandra.dll + + + ..\..\..\packages\lz4net.1.0.5.93\lib\net40-client\LZ4.dll + + + + + + + + + + + + Properties\SharedAssemblyInfo.cs + + + Code + + + Code + + + + + Code + + + + Code + + + + Code + + + Code + + + + + Code + + + Code + + + + + + + + + + + + {fca84dea-c118-424b-9eb8-34375dfef18a} + Akka.Persistence + + + {5deddf90-37f0-48d3-a0b0-a5cbd8a7e377} + Akka + + + + + + + This project references NuGet package(s) that are missing on this computer. Enable NuGet Package Restore to download them. For more information, see http://go.microsoft.com/fwlink/?LinkID=322105. The missing file is {0}. + + + + + \ No newline at end of file diff --git a/src/contrib/persistence/Akka.Persistence.Cassandra/Akka.Persistence.Cassandra.nuspec b/src/contrib/persistence/Akka.Persistence.Cassandra/Akka.Persistence.Cassandra.nuspec new file mode 100644 index 00000000000..6732708c6d9 --- /dev/null +++ b/src/contrib/persistence/Akka.Persistence.Cassandra/Akka.Persistence.Cassandra.nuspec @@ -0,0 +1,20 @@ + + + + @project@ + @project@@title@ + @build.number@ + @authors@ + @authors@ + Cassandra Persistence support for Akka.NET + https://github.com/akkadotnet/akka.net/blob/master/LICENSE + https://github.com/akkadotnet/akka.net + https://raw.githubusercontent.com/akkadotnet/akka.net/gh-pages/images/icon-32x32.png + false + @releaseNotes@ + @copyright@ + @tags@ Persistence Cassandra + @dependencies@ + @references@ + + \ No newline at end of file diff --git a/src/contrib/persistence/Akka.Persistence.Cassandra/CassandraExtension.cs b/src/contrib/persistence/Akka.Persistence.Cassandra/CassandraExtension.cs new file mode 100644 index 00000000000..f0374de7a84 --- /dev/null +++ b/src/contrib/persistence/Akka.Persistence.Cassandra/CassandraExtension.cs @@ -0,0 +1,47 @@ +using System; +using Akka.Actor; +using Akka.Persistence.Cassandra.Journal; +using Akka.Persistence.Cassandra.SessionManagement; +using Akka.Persistence.Cassandra.Snapshot; + +namespace Akka.Persistence.Cassandra +{ + /// + /// An Akka.NET extension for Cassandra persistence. + /// + public class CassandraExtension : IExtension + { + /// + /// The settings for the Cassandra journal. + /// + public CassandraJournalSettings JournalSettings { get; private set; } + + /// + /// The settings for the Cassandra snapshot store. + /// + public CassandraSnapshotStoreSettings SnapshotStoreSettings { get; private set; } + + /// + /// The session manager for resolving session instances. + /// + public IManageSessions SessionManager { get; private set; } + + public CassandraExtension(ExtendedActorSystem system) + { + if (system == null) throw new ArgumentNullException("system"); + + // Initialize fallback configuration defaults + system.Settings.InjectTopLevelFallback(CassandraPersistence.DefaultConfig()); + + // Get or add the session manager + SessionManager = CassandraSession.Instance.Apply(system); + + // Read config + var journalConfig = system.Settings.Config.GetConfig("cassandra-journal"); + JournalSettings = new CassandraJournalSettings(journalConfig); + + var snapshotConfig = system.Settings.Config.GetConfig("cassandra-snapshot-store"); + SnapshotStoreSettings = new CassandraSnapshotStoreSettings(snapshotConfig); + } + } +} \ No newline at end of file diff --git a/src/contrib/persistence/Akka.Persistence.Cassandra/CassandraPersistence.cs b/src/contrib/persistence/Akka.Persistence.Cassandra/CassandraPersistence.cs new file mode 100644 index 00000000000..2d8305438f7 --- /dev/null +++ b/src/contrib/persistence/Akka.Persistence.Cassandra/CassandraPersistence.cs @@ -0,0 +1,23 @@ +using Akka.Actor; +using Akka.Configuration; + +namespace Akka.Persistence.Cassandra +{ + /// + /// Extension Id provider for the Cassandra Persistence extension. + /// + public class CassandraPersistence : ExtensionIdProvider + { + public static readonly CassandraPersistence Instance = new CassandraPersistence(); + + public override CassandraExtension CreateExtension(ExtendedActorSystem system) + { + return new CassandraExtension(system); + } + + public static Config DefaultConfig() + { + return ConfigurationFactory.FromResource("Akka.Persistence.Cassandra.reference.conf"); + } + } +} diff --git a/src/contrib/persistence/Akka.Persistence.Cassandra/CassandraSettings.cs b/src/contrib/persistence/Akka.Persistence.Cassandra/CassandraSettings.cs new file mode 100644 index 00000000000..87c424fcbb9 --- /dev/null +++ b/src/contrib/persistence/Akka.Persistence.Cassandra/CassandraSettings.cs @@ -0,0 +1,77 @@ +using System; +using Akka.Configuration; +using Cassandra; + +namespace Akka.Persistence.Cassandra +{ + /// + /// Abstract class for parsing common settings used by both the Journal and Snapshot store from HOCON configuration. + /// + public abstract class CassandraSettings + { + /// + /// The name (key) of the session to use when resolving an ISession instance. When using default session management, + /// this points at configuration under the "cassandra-sessions" section where the session's configuration is found. + /// + public string SessionKey { get; private set; } + + /// + /// The keyspace to be created/used. + /// + public string Keyspace { get; private set; } + + /// + /// A string to be appended to the CREATE KEYSPACE statement after the WITH clause when the keyspace is + /// automatically created. Use this to define options like replication strategy. + /// + public string KeyspaceCreationOptions { get; private set; } + + /// + /// When true the plugin will automatically try to create the keyspace if it doesn't already exist on start. + /// + public bool KeyspaceAutocreate { get; private set; } + + /// + /// Name of the table to be created/used. + /// + public string Table { get; private set; } + + /// + /// A string to be appended to the CREATE TABLE statement after the WITH clause. Use this to define things + /// like gc_grace_seconds or one of the many other table options. + /// + public string TableCreationProperties { get; private set; } + + /// + /// Consistency level for reads. + /// + public ConsistencyLevel ReadConsistency { get; private set; } + + /// + /// Consistency level for writes. + /// + public ConsistencyLevel WriteConsistency { get; private set; } + + protected CassandraSettings(Config config) + { + SessionKey = config.GetString("session-key"); + + Keyspace = config.GetString("keyspace"); + KeyspaceCreationOptions = config.GetString("keyspace-creation-options"); + KeyspaceAutocreate = config.GetBoolean("keyspace-autocreate"); + + Table = config.GetString("table"); + TableCreationProperties = config.GetString("table-creation-properties"); + + // Quote keyspace and table if necessary + if (config.GetBoolean("use-quoted-identifiers")) + { + Keyspace = string.Format("\"{0}\"", Keyspace); + Table = string.Format("\"{0}\"", Keyspace); + } + + ReadConsistency = (ConsistencyLevel) Enum.Parse(typeof(ConsistencyLevel), config.GetString("read-consistency"), true); + WriteConsistency = (ConsistencyLevel) Enum.Parse(typeof(ConsistencyLevel), config.GetString("write-consistency"), true); + } + } +} diff --git a/src/contrib/persistence/Akka.Persistence.Cassandra/ExtensionMethods.cs b/src/contrib/persistence/Akka.Persistence.Cassandra/ExtensionMethods.cs new file mode 100644 index 00000000000..e390b4c6c06 --- /dev/null +++ b/src/contrib/persistence/Akka.Persistence.Cassandra/ExtensionMethods.cs @@ -0,0 +1,40 @@ +using System; +using Akka.Actor; +using Cassandra; + +namespace Akka.Persistence.Cassandra +{ + /// + /// Extension methods used by the Cassandra persistence plugin. + /// + internal static class ExtensionMethods + { + /// + /// Gets the PersistenceExtension instance registered with the ActorSystem. Throws an InvalidOperationException if not found. + /// + internal static PersistenceExtension PersistenceExtension(this ActorSystem system) + { + var ext = system.GetExtension(); + if (ext == null) + throw new InvalidOperationException("Persistence extension not found."); + + return ext; + } + + /// + /// Converts a Type to a string representation that can be stored in Cassandra. + /// + internal static string ToQualifiedString(this Type t) + { + return string.Format("{0}, {1}", t.FullName, t.Assembly.GetName().Name); + } + + /// + /// Prepares a CQL string with format arguments using the session. + /// + internal static PreparedStatement PrepareFormat(this ISession session, string cqlFormatString, params object[] args) + { + return session.Prepare(string.Format(cqlFormatString, args)); + } + } +} \ No newline at end of file diff --git a/src/contrib/persistence/Akka.Persistence.Cassandra/Journal/CassandraJournal.cs b/src/contrib/persistence/Akka.Persistence.Cassandra/Journal/CassandraJournal.cs new file mode 100644 index 00000000000..4f9292a77a2 --- /dev/null +++ b/src/contrib/persistence/Akka.Persistence.Cassandra/Journal/CassandraJournal.cs @@ -0,0 +1,375 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading.Tasks; +using Akka.Configuration; +using Akka.Persistence.Journal; +using Akka.Serialization; +using Cassandra; + +namespace Akka.Persistence.Cassandra.Journal +{ + /// + /// An Akka.NET journal implementation that writes events asynchronously to Cassandra. + /// + public class CassandraJournal : AsyncWriteJournal + { + private const string InvalidPartitionSizeException = + "Partition size cannot change after initial table creation. (Value at creation: {0}, Currently configured value in Akka configuration: {1})"; + + private static readonly Type PersistentRepresentationType = typeof (IPersistentRepresentation); + + private readonly CassandraExtension _cassandraExtension; + private readonly Serializer _serializer; + private readonly int _maxDeletionBatchSize; + + private ISession _session; + private PreparedStatement _writeMessage; + private PreparedStatement _writeHeader; + private PreparedStatement _selectHeaderSequence; + private PreparedStatement _selectLastMessageSequence; + private PreparedStatement _selectMessages; + private PreparedStatement _writeDeleteMarker; + private PreparedStatement _deleteMessagePermanent; + private PreparedStatement _selectDeletedToSequence; + private PreparedStatement _selectConfigurationValue; + private PreparedStatement _writeConfigurationValue; + + public CassandraJournal() + { + _cassandraExtension = CassandraPersistence.Instance.Apply(Context.System); + _serializer = Context.System.Serialization.FindSerializerForType(PersistentRepresentationType); + + // Use setting from the persistence extension when batch deleting + PersistenceExtension persistence = Context.System.PersistenceExtension(); + _maxDeletionBatchSize = persistence.Settings.Journal.MaxDeletionBatchSize; + } + + protected override void PreStart() + { + base.PreStart(); + + // Create session + CassandraJournalSettings settings = _cassandraExtension.JournalSettings; + _session = _cassandraExtension.SessionManager.ResolveSession(settings.SessionKey); + + // Create keyspace if necessary and always try to create table + if (settings.KeyspaceAutocreate) + _session.Execute(string.Format(JournalStatements.CreateKeyspace, settings.Keyspace, settings.KeyspaceCreationOptions)); + + var fullyQualifiedTableName = string.Format("{0}.{1}", settings.Keyspace, settings.Table); + + string createTable = string.IsNullOrWhiteSpace(settings.TableCreationProperties) + ? string.Format(JournalStatements.CreateTable, fullyQualifiedTableName, string.Empty, string.Empty) + : string.Format(JournalStatements.CreateTable, fullyQualifiedTableName, " WITH ", + settings.TableCreationProperties); + _session.Execute(createTable); + + // Prepare some statements against C* + _writeMessage = _session.PrepareFormat(JournalStatements.WriteMessage, fullyQualifiedTableName); + _writeHeader = _session.PrepareFormat(JournalStatements.WriteHeader, fullyQualifiedTableName); + _selectHeaderSequence = _session.PrepareFormat(JournalStatements.SelectHeaderSequence, fullyQualifiedTableName); + _selectLastMessageSequence = _session.PrepareFormat(JournalStatements.SelectLastMessageSequence, fullyQualifiedTableName); + _selectMessages = _session.PrepareFormat(JournalStatements.SelectMessages, fullyQualifiedTableName); + _writeDeleteMarker = _session.PrepareFormat(JournalStatements.WriteDeleteMarker, fullyQualifiedTableName); + _deleteMessagePermanent = _session.PrepareFormat(JournalStatements.DeleteMessagePermanent, fullyQualifiedTableName); + _selectDeletedToSequence = _session.PrepareFormat(JournalStatements.SelectDeletedToSequence, fullyQualifiedTableName); + _selectConfigurationValue = _session.PrepareFormat(JournalStatements.SelectConfigurationValue, fullyQualifiedTableName); + _writeConfigurationValue = _session.PrepareFormat(JournalStatements.WriteConfigurationValue, fullyQualifiedTableName); + + // The partition size can only be set once (the first time the table is created) so see if it's already been set + long partitionSize = GetConfigurationValueOrDefault("partition-size", -1L); + if (partitionSize == -1L) + { + // Persist the partition size specified in the cluster settings + WriteConfigurationValue("partition-size", settings.PartitionSize); + } + else if (partitionSize != settings.PartitionSize) + { + throw new ConfigurationException(string.Format(InvalidPartitionSizeException, partitionSize, settings.PartitionSize)); + } + } + + public override async Task ReplayMessagesAsync(string persistenceId, long fromSequenceNr, long toSequenceNr, long max, + Action replayCallback) + { + long partitionNumber = GetPartitionNumber(fromSequenceNr); + + // A sequence number may have been moved to the next partition if it was part of a batch that was too large + // to write to a single partition + long maxPartitionNumber = GetPartitionNumber(toSequenceNr) + 1L; + long count = 0L; + + while (partitionNumber <= maxPartitionNumber && count < max) + { + // Check for header and deleted to sequence number in parallel + RowSet[] rowSets = await GetHeaderAndDeletedTo(persistenceId, partitionNumber).ConfigureAwait(false); + + // If header doesn't exist, just bail on the non-existent partition + if (rowSets[0].SingleOrDefault() == null) + return; + + // See what's been deleted in the partition and if no record found, just use long's min value + Row deletedToRow = rowSets[1].SingleOrDefault(); + long deletedTo = deletedToRow == null ? long.MinValue : deletedToRow.GetValue("sequence_number"); + + // Page through messages in the partition + bool hasNextPage = true; + byte[] pageState = null; + while (count < max && hasNextPage) + { + // Get next page from current partition + IStatement getRows = _selectMessages.Bind(persistenceId, partitionNumber, fromSequenceNr, toSequenceNr) + .SetConsistencyLevel(_cassandraExtension.JournalSettings.ReadConsistency) + .SetPageSize(_cassandraExtension.JournalSettings.MaxResultSize) + .SetPagingState(pageState) + .SetAutoPage(false); + + RowSet messageRows = await _session.ExecuteAsync(getRows).ConfigureAwait(false); + pageState = messageRows.PagingState; + hasNextPage = pageState != null; + IEnumerator messagesEnumerator = + messageRows.Select(row => MapRowToPersistentRepresentation(row, deletedTo)) + .GetEnumerator(); + + // Process page + while (count < max && messagesEnumerator.MoveNext()) + { + replayCallback(messagesEnumerator.Current); + count++; + } + } + + // Go to next partition + partitionNumber++; + } + } + + public override async Task ReadHighestSequenceNrAsync(string persistenceId, long fromSequenceNr) + { + fromSequenceNr = Math.Max(1L, fromSequenceNr); + long partitionNumber = GetPartitionNumber(fromSequenceNr); + long maxSequenceNumber = 0L; + while (true) + { + // Check for header and deleted to sequence number in parallel + RowSet[] rowSets = await GetHeaderAndDeletedTo(persistenceId, partitionNumber).ConfigureAwait(false); + + // If header doesn't exist, just bail on the non-existent partition + if (rowSets[0].SingleOrDefault() == null) + break; + + // See what's been deleted in the partition and if no record found, just use long's min value + Row deletedToRow = rowSets[1].SingleOrDefault(); + long deletedTo = deletedToRow == null ? long.MinValue : deletedToRow.GetValue("sequence_number"); + + // Try to avoid reading possible tombstones by skipping deleted records if higher than the fromSequenceNr provided + long from = Math.Max(fromSequenceNr, deletedTo); + + // Get the last sequence number in the partition, skipping deleted messages + IStatement getLastMessageSequence = _selectLastMessageSequence.Bind(persistenceId, partitionNumber, from) + .SetConsistencyLevel(_cassandraExtension.JournalSettings.ReadConsistency); + RowSet sequenceRows = await _session.ExecuteAsync(getLastMessageSequence).ConfigureAwait(false); + + // If there aren't any non-deleted messages, use the delete marker's value as the max, otherwise, use whatever value was returned + Row sequenceRow = sequenceRows.SingleOrDefault(); + maxSequenceNumber = sequenceRow == null ? Math.Max(maxSequenceNumber, deletedTo) : sequenceRow.GetValue("sequence_number"); + + // Go to next partition + partitionNumber++; + } + + return maxSequenceNumber; + } + + protected override Task WriteMessagesAsync(IEnumerable messages) + { + // It's implied by the API/docs that a batch of messages will be for a single persistence id + List messageList = messages.ToList(); + string persistenceId = messageList[0].PersistenceId; + + long seqNr = messageList[0].SequenceNr; + bool writeHeader = IsNewPartition(seqNr); + long partitionNumber = GetPartitionNumber(seqNr); + + if (messageList.Count > 1) + { + // See if this collection of writes would span multiple partitions and if so, move all the writes to the next partition + long lastMessagePartition = GetPartitionNumber(messageList[messageList.Count - 1].SequenceNr); + if (lastMessagePartition != partitionNumber) + { + partitionNumber = lastMessagePartition; + writeHeader = true; + } + } + + // No need for a batch if writing a single message + if (messageList.Count == 1 && writeHeader == false) + { + IPersistentRepresentation message = messageList[0]; + IStatement statement = _writeMessage.Bind(persistenceId, partitionNumber, message.SequenceNr, Serialize(message)) + .SetConsistencyLevel(_cassandraExtension.JournalSettings.WriteConsistency); + return _session.ExecuteAsync(statement); + } + + // Use a batch and add statements for each message + var batch = new BatchStatement(); + foreach (IPersistentRepresentation message in messageList) + { + batch.Add(_writeMessage.Bind(message.PersistenceId, partitionNumber, message.SequenceNr, Serialize(message))); + } + + // Add header if necessary + if (writeHeader) + batch.Add(_writeHeader.Bind(persistenceId, partitionNumber, seqNr)); + + batch.SetConsistencyLevel(_cassandraExtension.JournalSettings.WriteConsistency); + return _session.ExecuteAsync(batch); + } + + protected override async Task DeleteMessagesToAsync(string persistenceId, long toSequenceNr, bool isPermanent) + { + long maxPartitionNumber = GetPartitionNumber(toSequenceNr) + 1L; + long partitionNumber = 0L; + + while (partitionNumber <= maxPartitionNumber) + { + // Check for header and deleted to sequence number in parallel + RowSet[] rowSets = await GetHeaderAndDeletedTo(persistenceId, partitionNumber).ConfigureAwait(false); + + // If header doesn't exist, just bail on the non-existent partition + Row headerRow = rowSets[0].SingleOrDefault(); + if (headerRow == null) + return; + + // Start deleting either from the first sequence number after the last deletion, or the beginning of the partition + Row deletedToRow = rowSets[1].SingleOrDefault(); + long deleteFrom = deletedToRow == null + ? headerRow.GetValue("sequence_number") + : deletedToRow.GetValue("sequence_number") + 1L; + + // Nothing to delete if we're going to start higher than the specified sequence number + if (deleteFrom > toSequenceNr) + return; + + // Get the last sequence number in the partition and try to avoid tombstones by skipping deletes + IStatement getLastMessageSequence = _selectLastMessageSequence.Bind(persistenceId, partitionNumber, deleteFrom) + .SetConsistencyLevel(_cassandraExtension.JournalSettings.ReadConsistency); + RowSet lastSequenceRows = await _session.ExecuteAsync(getLastMessageSequence).ConfigureAwait(false); + + // If we have a sequence number, we've got messages to delete still in the partition + Row lastSequenceRow = lastSequenceRows.SingleOrDefault(); + if (lastSequenceRow != null) + { + // Delete either to the end of the partition or to the number specified, whichever comes first + long deleteTo = Math.Min(lastSequenceRow.GetValue("sequence_number"), toSequenceNr); + if (isPermanent == false) + { + IStatement writeMarker = _writeDeleteMarker.Bind(persistenceId, partitionNumber, deleteTo) + .SetConsistencyLevel(_cassandraExtension.JournalSettings.WriteConsistency); + await _session.ExecuteAsync(writeMarker).ConfigureAwait(false); + } + else + { + // Permanently delete using batches in parallel + long batchFrom = deleteFrom; + long batchTo; + var batches = new List(); + do + { + batchTo = Math.Min(batchFrom + _maxDeletionBatchSize - 1L, deleteTo); + + var batch = new BatchStatement(); + for (long seq = batchFrom; seq <= batchTo; seq++) + batch.Add(_deleteMessagePermanent.Bind(persistenceId, partitionNumber, seq)); + + batch.Add(_writeDeleteMarker.Bind(persistenceId, partitionNumber, batchTo)); + batch.SetConsistencyLevel(_cassandraExtension.JournalSettings.WriteConsistency); + + batches.Add(_session.ExecuteAsync(batch)); + batchFrom = batchTo + 1L; + } while (batchTo < deleteTo); + + await Task.WhenAll(batches).ConfigureAwait(false); + } + + // If we've deleted everything we're supposed to, no need to continue + if (deleteTo == toSequenceNr) + return; + } + + // Go to next partition + partitionNumber++; + } + } + + private Task GetHeaderAndDeletedTo(string persistenceId, long partitionNumber) + { + return Task.WhenAll(new[] + { + _selectHeaderSequence.Bind(persistenceId, partitionNumber).SetConsistencyLevel(_cassandraExtension.JournalSettings.ReadConsistency), + _selectDeletedToSequence.Bind(persistenceId, partitionNumber).SetConsistencyLevel(_cassandraExtension.JournalSettings.ReadConsistency) + }.Select(_session.ExecuteAsync)); + } + + private IPersistentRepresentation MapRowToPersistentRepresentation(Row row, long deletedTo) + { + IPersistentRepresentation pr = Deserialize(row.GetValue("message")); + if (pr.SequenceNr <= deletedTo) + pr = pr.Update(pr.SequenceNr, pr.PersistenceId, true, pr.Sender); + + return pr; + } + + private long GetPartitionNumber(long sequenceNumber) + { + return (sequenceNumber - 1L)/_cassandraExtension.JournalSettings.PartitionSize; + } + + private bool IsNewPartition(long sequenceNumber) + { + return (sequenceNumber - 1L)%_cassandraExtension.JournalSettings.PartitionSize == 0L; + } + + private T GetConfigurationValueOrDefault(string key, T defaultValue) + { + IStatement bound = _selectConfigurationValue.Bind(key).SetConsistencyLevel(_cassandraExtension.JournalSettings.ReadConsistency); + RowSet rows = _session.Execute(bound); + Row row = rows.SingleOrDefault(); + if (row == null) + return defaultValue; + + IPersistentRepresentation persistent = Deserialize(row.GetValue("message")); + return (T) persistent.Payload; + } + + private void WriteConfigurationValue(string key, T value) + { + var persistent = new Persistent(value); + IStatement bound = _writeConfigurationValue.Bind(key, Serialize(persistent)) + .SetConsistencyLevel(_cassandraExtension.JournalSettings.WriteConsistency); + _session.Execute(bound); + } + + private IPersistentRepresentation Deserialize(byte[] bytes) + { + return (IPersistentRepresentation) _serializer.FromBinary(bytes, PersistentRepresentationType); + } + + private byte[] Serialize(IPersistentRepresentation message) + { + return _serializer.ToBinary(message); + } + + protected override void PostStop() + { + base.PostStop(); + + if (_cassandraExtension != null && _session != null) + { + _cassandraExtension.SessionManager.ReleaseSession(_session); + _session = null; + } + } + } +} diff --git a/src/contrib/persistence/Akka.Persistence.Cassandra/Journal/CassandraJournalSettings.cs b/src/contrib/persistence/Akka.Persistence.Cassandra/Journal/CassandraJournalSettings.cs new file mode 100644 index 00000000000..b326191b7ab --- /dev/null +++ b/src/contrib/persistence/Akka.Persistence.Cassandra/Journal/CassandraJournalSettings.cs @@ -0,0 +1,27 @@ +using Akka.Configuration; + +namespace Akka.Persistence.Cassandra.Journal +{ + /// + /// Settings for the Cassandra journal implementation, parsed from HOCON configuration. + /// + public class CassandraJournalSettings : CassandraSettings + { + /// + /// The approximate number of rows per partition to use. Cannot be changed after table creation. + /// + public long PartitionSize { get; private set; } + + /// + /// The maximum number of messages to retrieve in one request when replaying messages. + /// + public int MaxResultSize { get; private set; } + + public CassandraJournalSettings(Config config) + : base(config) + { + PartitionSize = config.GetLong("partition-size"); + MaxResultSize = config.GetInt("max-result-size"); + } + } +} \ No newline at end of file diff --git a/src/contrib/persistence/Akka.Persistence.Cassandra/Journal/JournalStatements.cs b/src/contrib/persistence/Akka.Persistence.Cassandra/Journal/JournalStatements.cs new file mode 100644 index 00000000000..1aa0128a42d --- /dev/null +++ b/src/contrib/persistence/Akka.Persistence.Cassandra/Journal/JournalStatements.cs @@ -0,0 +1,63 @@ +namespace Akka.Persistence.Cassandra.Journal +{ + /// + /// CQL strings for use with the CassandraJournal. + /// + internal static class JournalStatements + { + public const string CreateKeyspace = @" + CREATE KEYSPACE IF NOT EXISTS {0} + WITH {1}"; + + public const string CreateTable = @" + CREATE TABLE IF NOT EXISTS {0} ( + persistence_id text, + partition_number bigint, + marker text, + sequence_number bigint, + message blob, + PRIMARY KEY ((persistence_id, partition_number), marker, sequence_number) + ){1}{2}"; + + public const string WriteMessage = @" + INSERT INTO {0} (persistence_id, partition_number, marker, sequence_number, message) + VALUES (?, ?, 'A', ?, ?)"; + + public const string WriteHeader = @" + INSERT INTO {0} (persistence_id, partition_number, marker, sequence_number) + VALUES (?, ?, 'H', ?)"; + + public const string SelectMessages = @" + SELECT message FROM {0} WHERE persistence_id = ? AND partition_number = ? + AND marker = 'A' AND sequence_number >= ? AND sequence_number <= ?"; + + public const string WriteDeleteMarker = @" + INSERT INTO {0} (persistence_id, partition_number, marker, sequence_number) + VALUES (?, ?, 'D', ?)"; + + public const string DeleteMessagePermanent = @" + DELETE FROM {0} WHERE persistence_id = ? AND partition_number = ? + AND marker = 'A' AND sequence_number = ?"; + + public const string SelectDeletedToSequence = @" + SELECT sequence_number FROM {0} WHERE persistence_id = ? AND partition_number = ? + AND marker = 'D' ORDER BY marker DESC, sequence_number DESC LIMIT 1"; + + public const string SelectLastMessageSequence = @" + SELECT sequence_number FROM {0} WHERE persistence_id = ? AND partition_number = ? + AND marker = 'A' AND sequence_number >= ? + ORDER BY marker DESC, sequence_number DESC LIMIT 1"; + + public const string SelectHeaderSequence = @" + SELECT sequence_number FROM {0} WHERE persistence_id = ? AND partition_number = ? + AND marker = 'H'"; + + public const string SelectConfigurationValue = @" + SELECT message FROM {0} + WHERE persistence_id = 'akkanet-configuration-values' AND partition_number = 0 AND marker = ?"; + + public const string WriteConfigurationValue = @" + INSERT INTO {0} (persistence_id, partition_number, marker, sequence_number, message) + VALUES ('akkanet-configuration-values', 0, ?, 0, ?)"; + } +} diff --git a/src/contrib/persistence/Akka.Persistence.Cassandra/Properties/AssemblyInfo.cs b/src/contrib/persistence/Akka.Persistence.Cassandra/Properties/AssemblyInfo.cs new file mode 100644 index 00000000000..d3781b62daa --- /dev/null +++ b/src/contrib/persistence/Akka.Persistence.Cassandra/Properties/AssemblyInfo.cs @@ -0,0 +1,18 @@ +using System.Reflection; +using System.Runtime.InteropServices; + +// General Information about an assembly is controlled through the following +// set of attributes. Change these attribute values to modify the information +// associated with an assembly. +[assembly: AssemblyTitle("Akka.Persistence.Cassandra")] +[assembly: AssemblyDescription("")] +[assembly: AssemblyProduct("Akka.Persistence.Cassandra")] +[assembly: AssemblyCulture("")] + +// Setting ComVisible to false makes the types in this assembly not visible +// to COM components. If you need to access a type in this assembly from +// COM, set the ComVisible attribute to true on that type. +[assembly: ComVisible(false)] + +// The following GUID is for the ID of the typelib if this project is exposed to COM +[assembly: Guid("3c96b6f4-572a-4559-9487-bb91db490506")] \ No newline at end of file diff --git a/src/contrib/persistence/Akka.Persistence.Cassandra/README.md b/src/contrib/persistence/Akka.Persistence.Cassandra/README.md new file mode 100644 index 00000000000..cfa524ba014 --- /dev/null +++ b/src/contrib/persistence/Akka.Persistence.Cassandra/README.md @@ -0,0 +1,224 @@ +Akka.Persistence.Cassandra +========================== +A replicated journal and snapshot store implementation for Akka.Persistence backed by +[Apache Cassandra](http://planetcassandra.org/). + +**WARNING: The Akka.Persistence.Cassandra plugin is still in beta and the mechanics described below are subject to +change.** + +Quick Start +----------- +To activate the journal plugin, add the following line to the actor system configuration file: +``` +akka.persistence.journal.plugin = "cassandra-journal" +``` +To activate the snapshot store plugin, add the following line to the actor system configuration file: +``` +akka.persistence.snasphot-store.plugin = "cassandra-snapshot-store" +``` +The default configuration will try to connect to a Cassandra cluster running on `127.0.0.1` for persisting messages +and snapshots. More information on the available configuration options is in the sections below. + +Connecting to the Cluster +------------------------- +Both the journal and the snapshot store plugins use the [DataStax .NET Driver](https://github.com/datastax/csharp-driver) +for Cassandra to communicate with the cluster. The driver has an `ISession` object which is used to execute statements +against the cluster (very similar to a `DbConnection` object in ADO.NET). You can control the creation and +configuration of these session instance(s) by modifying the configuration under `cassandra-sessions`. Out of the +box, both the journal and the snapshot store plugin will try to use a session called `default`. You can override +the settings for that session with the following configuration keys: + +- `cassandra-sessions.default.contact-points`: A comma-seperated list of contact points in the cluster in the format +of either `host` or `host:port`. Default value is *`[ "127.0.0.1" ]`*. +- `cassandra-sessions.default.port`: Default port for contact points in the cluster, used if a contact point is not +in [host:port] format. Default value is *`9042`*. +- `cassandra-sessions.default.credentials.username`: The username to login to Cassandra hosts. No authentication is +used by default. +- `cassandra-sessions.default.credentials.password`: The password corresponding to the username. No authentication +is used by default. +- `cassandra-sessions.default.ssl`: Boolean value indicating whether to use SSL when connecting to the cluster. No +default value is set and so SSL is not used by default. +- `cassandra-sessions.default.compression`: The [type of compression](https://github.com/datastax/csharp-driver/blob/master/src/Cassandra/CompressionType.cs) +to use when communicating with the cluster. No default value is set and so compression is not used by default. + +If you require more advanced configuration of the `ISession` object than the options provided here (for example, to +use a different session for the journal and snapshot store plugins or to configure the session via code or manage +it with an IoC container), see the [Advanced Session Management](#advanced-session-management) section below. + +Journal +------- +### Features +- All operations of the journal plugin API are fully supported +- Uses Cassandra in a log-oriented way (i.e. data is only ever inserted but never updated) +- Uses marker records for permanent deletes to try and avoid the problem of [reading many tombstones](http://www.datastax.com/dev/blog/cassandra-anti-patterns-queues-and-queue-like-datasets) + when replaying messages. +- Messages for a single persistence Id are partitioned across the cluster to avoid unbounded partition + growth and support scalability by adding more nodes to the cluster. + +### Configuration +As mentioned in the Quick Start section, you can activate the journal plugin by adding the following line to your +actor system configuration file: +``` +akka.persistence.journal.plugin = "cassandra-journal" +``` +You can also override the journal's default settings with the following configuration keys: +- `cassandra-journal.class`: The Type name of the Cassandra journal plugin. Default value is *`Akka.Persistence.Cassandra.Journal.CassandraJournal, Akka.Persistence.Cassandra`*. +- `cassandra-journal.session-key`: The name (key) of the session to use when resolving an `ISession` instance. When +using default session management, this points at a configuration section under `cassandra-sessions` where the +session's configuration is found. Default value is *`default`*. +- `cassandra-journal.use-quoted-identifiers`: Whether or not to quote the table and keyspace names when executing +statements against Cassandra. Default value is *`false`*. +- `cassandra-journal.keyspace`: The keyspace to be created/used by the journal. Default value is *`akkanet`*. +- `cassandra-journal.keyspace-creation-options`: A string to be appended to the `CREATE KEYSPACE` statement after +the `WITH` clause when the keyspace is automatically created. Use this to define options like the replication +strategy. Default value is *`REPLICATION = { 'class' : 'SimpleStrategy', 'replication_factor' : 1 }`*. +- `cassandra-journal.keyspace-autocreate`: When true, the journal will automatically try to create the keyspace if +it doesn't already exist on startup. Default value is *`true`*. +- `cassandra-journal.table`: The name of the table to be created/used by the journal. Default value is *`messages`*. +- `cassandra-journal.table-creation-properties`: A string to be appended to the `CREATE TABLE` statement after the +`WITH` clause. Use this to define advanced table options like `gc_grace_seconds` or one of the other many table +options. Default value is *an empty string*. +- `cassandra-journal.partition-size`: The approximate number of message rows to store in a single partition. Cannot +be changed after table creation. Default value is *`5000000`*. +- `cassandra-journal.max-result-size`: The maximum number of messages to retrieve in a single request to Cassandra +when replaying messages. Default value is *`50001`*. +- `cassandra-journal.read-consistency`: The consistency level to use for read operations. Default value is *`Quorum`*. +- `cassandra-journal.write-consistency`: The consistency level to use for write operations. Default value is +*`Quorum`*. + +The default value for read and write consistency levels ensure that persistent actors can read their own writes. +Consider using `LocalQuorum` for both reads and writes if using a Cassandra cluster with multiple datacenters. + +Snapshot Store +-------------- +### Features +- Snapshot IO is done in a fully asynchronous fashion, including deletes (the snapshot store plugin API only +directly specifies synchronous methods for doing deletes) + +### Configuration +As mentioned in the Quick Start section, you can activate the snapshot store plugin by adding the following line +to your actor system configuration file: +``` +akka.persistence.snapshot-store.plugin = "cassandra-snapshot-store" +``` +You can also override the snapshot store's default settings with the following configuration keys: +- `cassandra-snapshot-store.class`: The Type name of the Cassandra snapshot store plugin. Default value is +*`Akka.Persistence.Cassandra.Snapshot.CassandraSnapshotStore, Akka.Persistence.Cassandra`*. +- `cassandra-snapshot-store.session-key`: The name (key) of the session to use when resolving an `ISession` +instance. When using default session management, this points at a configuration section under `cassandra-sessions` +where the session's configuration is found. Default value is *`default`*. +- `cassandra-snapshot-store.use-quoted-identifiers`: Whether or not to quote the table and keyspace names when +executing statements against Cassandra. Default value is *`false`*. +- `cassandra-snapshot-store.keyspace`: The keyspace to be created/used by the snapshot store. Default value is +*`akkanet`*. +- `cassandra-snapshot-store.keyspace-creation-options`: A string to be appended to the `CREATE KEYSPACE` statement +after the `WITH` clause when the keyspace is automatically created. Use this to define options like the replication +strategy. Default value is *`REPLICATION = { 'class' : 'SimpleStrategy', 'replication_factor' : 1 }`*. +- `cassandra-snapshot-store.keyspace-autocreate`: When true, the snapshot store will automatically try to create +the keyspace if it doesn't already exist on startup. Default value is *`true`*. +- `cassandra-snapshot-store.table`: The name of the table to be created/used by the snapshot store. Default value +is *`snapshots`*. +- `cassandra-snapshot-store.table-creation-properties`: A string to be appended to the `CREATE TABLE` statement +after the `WITH` clause. Use this to define advanced table options like `gc_grace_seconds` or one of the other +many table options. Default value is *an empty string*. +- `cassandra-snapshot-store.max-metadata-result-size`: The maximum number of snapshot metadata instances to +retrieve in a single request when trying to find a snapshot that matches criteria. Default value is *`10`*. +- `cassandra-snapshot-store.read-consistency`: The consistency level to use for read operations. Default value +is *`One`*. +- `cassandra-snapshot-store.write-consistency`: The consistency level to use for write operations. Default value +is *`One`*. + +Consider using `LocalOne` consistency level for both reads and writes if using a Cassandra cluster with multiple +datacenters. + +Advanced Session Management +--------------------------- +In some advanced scenarios, you may want to have more control over how `ISession` instances are created. Some +example scenarios might include: +- to use a different session instance for the journal and snapshot store plugins (i.e. maybe you have more than one +Cassandra cluster and are storing journal messages and snapshots in different clusters) +- to access more advanced configuration options for building the session instance in code using the DataStax +driver's cluster builder API directly +- to use session instances that have already been registered with an IoC container and are being managed there + +If you want more control over how session instances are created or managed, you have two options depending on how +much control you need. + +### Defining multiple session instances in the `cassandra-sessions` section +It is possible to define configuration for more than one session instance under the `cassandra-sessions` section of +your actor system's configuration file. To do this, just create your own section with a unique name/key for the +sub-section. All of the same options listed above in the [Connecting to the Cluster](#connecting-to-the-cluster) +can then be used to configure that session. For example, I might define seperate configurations for my journal and +snapshot store plugins like this: +``` +cassandra-sessions { + my-journal-session { + contact-points = [ "10.1.1.1", "10.1.1.2" ] + port = 9042 + credentials { + username = "myusername" + password = "mypassword" + } + } + + my-snapshot-session { + contact-points = [ "10.2.1.1:9142", "10.2.1.2:9142" ] + } +} +``` +I can then tell the journal and snapshot store plugins to use those sessions by overriding each plugin's `session-key` +configuration like this: +``` +cassandra-journal.session-key = "my-journal-session" +cassandra-snapshot-store.session-key = "my-snapshot-session" +``` + +### Controlling session configuration and management with code +You can also override how sessions are created, managed and resolved with your own code. Session management is +done as its own plugin for Akka.NET and a default implementation that uses the `cassandra-sessions` section is +provided out of the box. If you want to provide your own implementation for doing this (for example, to manage +sessions with an IoC container or use the DataStax driver's cluster builder API to do more advanced configuration), +here are the steps you'll need to follow: + +1. Create a class that implements the `IManageSessions` interface from `Akka.Persistence.Cassandra.SessionManagement`. + This interface is simple and just requires that you provide a way for resolving and releasing session instances. For + example: + + ```cs + public class MySessionManager : IManageSessions + { + public override ISession ResolveSession(string key) + { + // Do something here to get the ISession instance (pull from IoC container, etc) + } + + public override ISession ReleaseSession(ISession session) + { + // Do something here to release the session instance if necessary + } + } + ``` +1. Next, you'll need to create an extension id provider class by inheriting from + `ExtensionIdProvider`. This class is responsible for actually providing a copy of your + `IManageSessions` implementation. For example: + + ```cs + public class MySessionExtension : ExtensionIdProvider + { + public override IManageSessions CreateExtension(ExtendedActorSystem system) + { + // Return a copy of your implementation of IManageSessions + return new MySessionManager(); + } + } + ``` +1. Lastly, you'll need to register your extension with the actor system when creating it in your application. For + example: + + ```cs + var actorSystem = ActorSystem.Create("MyApplicationActorSystem"); + var extensionId = new MySessionExtension(); + actorSystem.RegisterExtension(extensionId); + ``` + +The journal and snapshot store plugins will now call your code when resolving or releasing sessions. diff --git a/src/contrib/persistence/Akka.Persistence.Cassandra/SessionManagement/CassandraSession.cs b/src/contrib/persistence/Akka.Persistence.Cassandra/SessionManagement/CassandraSession.cs new file mode 100644 index 00000000000..8ef2e0ddcfe --- /dev/null +++ b/src/contrib/persistence/Akka.Persistence.Cassandra/SessionManagement/CassandraSession.cs @@ -0,0 +1,17 @@ +using Akka.Actor; + +namespace Akka.Persistence.Cassandra.SessionManagement +{ + /// + /// Extension Id provider for Cassandra Session management extension. + /// + public class CassandraSession : ExtensionIdProvider + { + public static CassandraSession Instance = new CassandraSession(); + + public override IManageSessions CreateExtension(ExtendedActorSystem system) + { + return new DefaultSessionManager(system); + } + } +} \ No newline at end of file diff --git a/src/contrib/persistence/Akka.Persistence.Cassandra/SessionManagement/DefaultSessionManager.cs b/src/contrib/persistence/Akka.Persistence.Cassandra/SessionManagement/DefaultSessionManager.cs new file mode 100644 index 00000000000..5b7b3d1b171 --- /dev/null +++ b/src/contrib/persistence/Akka.Persistence.Cassandra/SessionManagement/DefaultSessionManager.cs @@ -0,0 +1,55 @@ +using System; +using System.Collections.Concurrent; +using Akka.Actor; +using Akka.Configuration; +using Cassandra; + +namespace Akka.Persistence.Cassandra.SessionManagement +{ + /// + /// A default session manager implementation that reads configuration from the system's "cassandra-cluster" + /// section and builds ISession instances from that configuration. Caches session instances for reuse. + /// + public class DefaultSessionManager : IManageSessions + { + private readonly Config _sessionConfigs; + private readonly ConcurrentDictionary> _sessionCache; + + public DefaultSessionManager(ExtendedActorSystem system) + { + if (system == null) throw new ArgumentNullException("system"); + + // Read configuration sections + _sessionConfigs = system.Settings.Config.GetConfig("cassandra-sessions"); + + _sessionCache = new ConcurrentDictionary>(); + } + + /// + /// Resolves the session with the key specified. + /// + public ISession ResolveSession(string key) + { + return _sessionCache.GetOrAdd(key, k => new Lazy(() => CreateSession(k))).Value; + } + + /// + /// Releases the session instance. + /// + public void ReleaseSession(ISession session) + { + // No-op since we want session instance to live for actor system's duration + // (TODO: Dispose of session instance if hooks are added to listen for Actor system shutdown?) + } + + private ISession CreateSession(string clusterName) + { + if (_sessionConfigs.HasPath(clusterName) == false) + throw new ConfigurationException(string.Format("Cannot find cluster configuration named '{0}'", clusterName)); + + // Get a cluster builder from the settings, build the cluster, and connect for a session + var clusterSettings = new SessionSettings(_sessionConfigs.GetConfig(clusterName)); + return clusterSettings.Builder.Build().Connect(); + } + } +} \ No newline at end of file diff --git a/src/contrib/persistence/Akka.Persistence.Cassandra/SessionManagement/IManageSessions.cs b/src/contrib/persistence/Akka.Persistence.Cassandra/SessionManagement/IManageSessions.cs new file mode 100644 index 00000000000..9aff6fd2396 --- /dev/null +++ b/src/contrib/persistence/Akka.Persistence.Cassandra/SessionManagement/IManageSessions.cs @@ -0,0 +1,22 @@ +using Akka.Actor; +using Cassandra; + +namespace Akka.Persistence.Cassandra.SessionManagement +{ + /// + /// Contract for extension responsible for resolving/releasing Cassandra ISession instances used by the + /// Cassandra Persistence plugin. + /// + public interface IManageSessions : IExtension + { + /// + /// Resolves the session with the key specified. + /// + ISession ResolveSession(string key); + + /// + /// Releases the session instance. + /// + void ReleaseSession(ISession session); + } +} \ No newline at end of file diff --git a/src/contrib/persistence/Akka.Persistence.Cassandra/SessionManagement/SessionSettings.cs b/src/contrib/persistence/Akka.Persistence.Cassandra/SessionManagement/SessionSettings.cs new file mode 100644 index 00000000000..4c1cc9c14ef --- /dev/null +++ b/src/contrib/persistence/Akka.Persistence.Cassandra/SessionManagement/SessionSettings.cs @@ -0,0 +1,67 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Net; +using Akka.Configuration; +using Cassandra; + +namespace Akka.Persistence.Cassandra.SessionManagement +{ + /// + /// Internal class for converting basic session settings in HOCON configuration to a Builder instance from the + /// DataStax driver for Cassandra. + /// + internal class SessionSettings + { + /// + /// A Builder instance with the appropriate configuration settings applied to it. + /// + public Builder Builder { get; private set; } + + public SessionSettings(Config config) + { + if (config == null) throw new ArgumentNullException("config"); + + Builder = Cluster.Builder(); + + // Get IP and port configuration + int port = config.GetInt("port", 9042); + IPEndPoint[] contactPoints = ParseContactPoints(config.GetStringList("contact-points"), port); + Builder.AddContactPoints(contactPoints); + + // Support user/pass authentication + if (config.HasPath("credentials")) + Builder.WithCredentials(config.GetString("credentials.username"), config.GetString("credentials.password")); + + // Support SSL + if (config.GetBoolean("ssl")) + Builder.WithSSL(); + + // Support compression + string compressionTypeConfig = config.GetString("compression"); + if (compressionTypeConfig != null) + { + var compressionType = (CompressionType) Enum.Parse(typeof (CompressionType), compressionTypeConfig, true); + Builder.WithCompression(compressionType); + } + } + + private static IPEndPoint[] ParseContactPoints(IList contactPoints, int port) + { + if (contactPoints == null || contactPoints.Count == 0) + throw new ConfigurationException("List of contact points cannot be empty."); + + return contactPoints.Select(cp => + { + string[] ipAndPort = cp.Split(':'); + if (ipAndPort.Length == 1) + return new IPEndPoint(IPAddress.Parse(ipAndPort[0]), port); + + if (ipAndPort.Length == 2) + return new IPEndPoint(IPAddress.Parse(ipAndPort[0]), int.Parse(ipAndPort[1])); + + throw new ConfigurationException(string.Format("Contact points should have format [host:post] or [host] but found: {0}", cp)); + }).ToArray(); + } + } +} \ No newline at end of file diff --git a/src/contrib/persistence/Akka.Persistence.Cassandra/Snapshot/CassandraSnapshotStore.cs b/src/contrib/persistence/Akka.Persistence.Cassandra/Snapshot/CassandraSnapshotStore.cs new file mode 100644 index 00000000000..31bf72efbe6 --- /dev/null +++ b/src/contrib/persistence/Akka.Persistence.Cassandra/Snapshot/CassandraSnapshotStore.cs @@ -0,0 +1,251 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading.Tasks; +using Akka.Event; +using Akka.Persistence.Snapshot; +using Akka.Serialization; +using Cassandra; + +namespace Akka.Persistence.Cassandra.Snapshot +{ + /// + /// A SnapshotStore implementation for writing snapshots to Cassandra. + /// + public class CassandraSnapshotStore : SnapshotStore + { + private static readonly Type SnapshotType = typeof (Serialization.Snapshot); + + private readonly CassandraExtension _cassandraExtension; + private readonly Serializer _serializer; + private readonly ILoggingAdapter _log; + private readonly bool _publish; + + private ISession _session; + private PreparedStatement _writeSnapshot; + private PreparedStatement _deleteSnapshot; + private PreparedStatement _selectSnapshot; + private PreparedStatement _selectSnapshotMetadata; + + public CassandraSnapshotStore() + { + _cassandraExtension = CassandraPersistence.Instance.Apply(Context.System); + _serializer = Context.System.Serialization.FindSerializerForType(SnapshotType); + _log = Context.System.Log; + + // Here so we can emulate the base class behavior but do deletes async + PersistenceExtension persistence = Context.System.PersistenceExtension(); + _publish = persistence.Settings.Internal.PublishPluginCommands; + } + + protected override void PreStart() + { + base.PreStart(); + + // Get a session to talk to Cassandra with + CassandraSnapshotStoreSettings settings = _cassandraExtension.SnapshotStoreSettings; + _session = _cassandraExtension.SessionManager.ResolveSession(settings.SessionKey); + + // Create the keyspace if necessary and always attempt to create the table + if (settings.KeyspaceAutocreate) + _session.Execute(string.Format(SnapshotStoreStatements.CreateKeyspace, settings.Keyspace, settings.KeyspaceCreationOptions)); + + var fullyQualifiedTableName = string.Format("{0}.{1}", settings.Keyspace, settings.Table); + var createTable = string.IsNullOrWhiteSpace(settings.TableCreationProperties) + ? string.Format(SnapshotStoreStatements.CreateTable, fullyQualifiedTableName, string.Empty, string.Empty) + : string.Format(SnapshotStoreStatements.CreateTable, fullyQualifiedTableName, " AND ", + settings.TableCreationProperties); + + _session.Execute(createTable); + + // Prepare some statements + _writeSnapshot = _session.PrepareFormat(SnapshotStoreStatements.WriteSnapshot, fullyQualifiedTableName); + _deleteSnapshot = _session.PrepareFormat(SnapshotStoreStatements.DeleteSnapshot, fullyQualifiedTableName); + _selectSnapshot = _session.PrepareFormat(SnapshotStoreStatements.SelectSnapshot, fullyQualifiedTableName); + _selectSnapshotMetadata = _session.PrepareFormat(SnapshotStoreStatements.SelectSnapshotMetadata, fullyQualifiedTableName); + } + + protected override bool Receive(object message) + { + // Make deletes async as well, but make sure we still publish like the base class does + if (message is DeleteSnapshot) + { + HandleDeleteAsync((DeleteSnapshot) message, msg => DeleteAsync(msg.Metadata)); + } + else if (message is DeleteSnapshots) + { + HandleDeleteAsync((DeleteSnapshots) message, msg => DeleteAsync(msg.PersistenceId, msg.Criteria)); + } + else + { + return base.Receive(message); + } + + return true; + } + + protected override async Task LoadAsync(string persistenceId, SnapshotSelectionCriteria criteria) + { + bool hasNextPage = true; + byte[] nextPageState = null; + + while (hasNextPage) + { + // Get a page of metadata that match the criteria + IStatement getMetadata = _selectSnapshotMetadata.Bind(persistenceId, criteria.MaxSequenceNr) + .SetConsistencyLevel(_cassandraExtension.SnapshotStoreSettings.ReadConsistency) + .SetPageSize(_cassandraExtension.SnapshotStoreSettings.MaxMetadataResultSize) + .SetPagingState(nextPageState) + .SetAutoPage(false); + RowSet metadataRows = await _session.ExecuteAsync(getMetadata).ConfigureAwait(false); + + nextPageState = metadataRows.PagingState; + hasNextPage = nextPageState != null; + IEnumerable page = metadataRows.Select(MapRowToSnapshotMetadata) + .Where(md => md.Timestamp <= criteria.MaxTimeStamp); + + // Try to get the first available snapshot from the page + foreach (SnapshotMetadata md in page) + { + try + { + IStatement getSnapshot = _selectSnapshot.Bind(md.PersistenceId, md.SequenceNr) + .SetConsistencyLevel(_cassandraExtension.SnapshotStoreSettings.ReadConsistency); + RowSet snapshotRows = await _session.ExecuteAsync(getSnapshot).ConfigureAwait(false); + + // If we didn't get a snapshot for some reason, just try the next one + Row snapshotRow = snapshotRows.SingleOrDefault(); + if (snapshotRow == null) + continue; + + // We found a snapshot so create the necessary class and return the result + return new SelectedSnapshot(md, Deserialize(snapshotRow.GetValue("snapshot"))); + } + catch (Exception e) + { + // If there is a problem, just try the next snapshot + _log.Warning("Unexpected exception while retrieveing snapshot {0} for id {1}: {2}", md.SequenceNr, md.PersistenceId, e); + } + } + + // Just try the next page if available + } + + // Out of snapshots that match or none found + return null; + } + + protected override Task SaveAsync(SnapshotMetadata metadata, object snapshot) + { + IStatement bound = _writeSnapshot.Bind(metadata.PersistenceId, metadata.SequenceNr, metadata.Timestamp.Ticks, Serialize(snapshot)) + .SetConsistencyLevel(_cassandraExtension.SnapshotStoreSettings.WriteConsistency); + return _session.ExecuteAsync(bound); + } + + protected Task DeleteAsync(SnapshotMetadata metadata) + { + IStatement bound = _deleteSnapshot.Bind(metadata.PersistenceId, metadata.SequenceNr) + .SetConsistencyLevel(_cassandraExtension.SnapshotStoreSettings.WriteConsistency); + return _session.ExecuteAsync(bound); + } + + protected async Task DeleteAsync(string persistenceId, SnapshotSelectionCriteria criteria) + { + // Use a batch to delete all matching snapshots + var batch = new BatchStatement(); + + bool hasNextPage = true; + byte[] nextPageState = null; + + while (hasNextPage) + { + // Get a page of metadata that match the criteria + IStatement getMetadata = _selectSnapshotMetadata.Bind(persistenceId, criteria.MaxSequenceNr) + .SetConsistencyLevel(_cassandraExtension.SnapshotStoreSettings.ReadConsistency) + .SetPageSize(_cassandraExtension.SnapshotStoreSettings.MaxMetadataResultSize) + .SetPagingState(nextPageState) + .SetAutoPage(false); + RowSet metadataRows = await _session.ExecuteAsync(getMetadata).ConfigureAwait(false); + + nextPageState = metadataRows.PagingState; + hasNextPage = nextPageState != null; + IEnumerable page = metadataRows.Select(MapRowToSnapshotMetadata) + .Where(md => md.Timestamp <= criteria.MaxTimeStamp); + // Add any matching snapshots from the page to the batch + foreach (SnapshotMetadata md in page) + batch.Add(_deleteSnapshot.Bind(md.PersistenceId, md.SequenceNr)); + + // Go to next page if available + } + + if (batch.IsEmpty) + return; + + // Send the batch of deletes + batch.SetConsistencyLevel(_cassandraExtension.SnapshotStoreSettings.WriteConsistency); + await _session.ExecuteAsync(batch).ConfigureAwait(false); + } + + protected override void Saved(SnapshotMetadata metadata) + { + // No op + } + + protected override void Delete(SnapshotMetadata metadata) + { + // Should never get called + throw new NotSupportedException("Deletes are handled async by this snapshot store."); + } + + protected override void Delete(string persistenceId, SnapshotSelectionCriteria criteria) + { + // Should never get called + throw new NotSupportedException("Deletes are handled async by this snapshot store."); + } + + protected override void PostStop() + { + base.PostStop(); + + if (_cassandraExtension != null && _session != null) + { + _cassandraExtension.SessionManager.ReleaseSession(_session); + _session = null; + } + } + + private async Task HandleDeleteAsync(T message, Func handler) + { + try + { + // Capture event stream so we can use it after await + EventStream es = Context.System.EventStream; + + // Delete async, then publish if necessary + await handler(message).ConfigureAwait(false); + if (_publish) + es.Publish(message); + } + catch (Exception e) + { + _log.Error(e, "Unexpected error while deleting snapshot(s)."); + } + } + + private object Deserialize(byte[] bytes) + { + return ((Serialization.Snapshot) _serializer.FromBinary(bytes, SnapshotType)).Data; + } + + private byte[] Serialize(object snapshotData) + { + return _serializer.ToBinary(new Serialization.Snapshot(snapshotData)); + } + + private static SnapshotMetadata MapRowToSnapshotMetadata(Row row) + { + return new SnapshotMetadata(row.GetValue("persistence_id"), row.GetValue("sequence_number"), + new DateTime(row.GetValue("timestamp_ticks"))); + } + } +} diff --git a/src/contrib/persistence/Akka.Persistence.Cassandra/Snapshot/CassandraSnapshotStoreSettings.cs b/src/contrib/persistence/Akka.Persistence.Cassandra/Snapshot/CassandraSnapshotStoreSettings.cs new file mode 100644 index 00000000000..737957c2434 --- /dev/null +++ b/src/contrib/persistence/Akka.Persistence.Cassandra/Snapshot/CassandraSnapshotStoreSettings.cs @@ -0,0 +1,22 @@ +using Akka.Configuration; + +namespace Akka.Persistence.Cassandra.Snapshot +{ + /// + /// Settings for the Cassandra snapshot store implementation, parsed from HOCON configuration. + /// + public class CassandraSnapshotStoreSettings : CassandraSettings + { + /// + /// The maximum number of snapshot metadata records to retrieve in a single request when trying to find + /// snapshots that meet criteria. + /// + public int MaxMetadataResultSize { get; private set; } + + public CassandraSnapshotStoreSettings(Config config) + : base(config) + { + MaxMetadataResultSize = config.GetInt("max-metadata-result-size"); + } + } +} diff --git a/src/contrib/persistence/Akka.Persistence.Cassandra/Snapshot/SnapshotStoreStatements.cs b/src/contrib/persistence/Akka.Persistence.Cassandra/Snapshot/SnapshotStoreStatements.cs new file mode 100644 index 00000000000..7b1998ccd0a --- /dev/null +++ b/src/contrib/persistence/Akka.Persistence.Cassandra/Snapshot/SnapshotStoreStatements.cs @@ -0,0 +1,38 @@ +using System; +using Cassandra; + +namespace Akka.Persistence.Cassandra.Snapshot +{ + /// + /// CQL strings used by the CassandraSnapshotStore. + /// + internal static class SnapshotStoreStatements + { + public const string CreateKeyspace = @" + CREATE KEYSPACE IF NOT EXISTS {0} + WITH {1}"; + + public const string CreateTable = @" + CREATE TABLE IF NOT EXISTS {0} ( + persistence_id text, + sequence_number bigint, + timestamp_ticks bigint, + snapshot blob, + PRIMARY KEY (persistence_id, sequence_number) + ) WITH CLUSTERING ORDER BY (sequence_number DESC){1}{2}"; + + public const string WriteSnapshot = @" + INSERT INTO {0} (persistence_id, sequence_number, timestamp_ticks, snapshot) + VALUES (?, ?, ?, ?)"; + + public const string DeleteSnapshot = @" + DELETE FROM {0} WHERE persistence_id = ? AND sequence_number = ?"; + + public const string SelectSnapshot = @" + SELECT snapshot FROM {0} WHERE persistence_id = ? AND sequence_number = ?"; + + public const string SelectSnapshotMetadata = @" + SELECT persistence_id, sequence_number, timestamp_ticks FROM {0} + WHERE persistence_id = ? AND sequence_number <= ?"; + } +} diff --git a/src/contrib/persistence/Akka.Persistence.Cassandra/packages.config b/src/contrib/persistence/Akka.Persistence.Cassandra/packages.config new file mode 100644 index 00000000000..551f8242368 --- /dev/null +++ b/src/contrib/persistence/Akka.Persistence.Cassandra/packages.config @@ -0,0 +1,5 @@ + + + + + \ No newline at end of file diff --git a/src/contrib/persistence/Akka.Persistence.Cassandra/reference.conf b/src/contrib/persistence/Akka.Persistence.Cassandra/reference.conf new file mode 100644 index 00000000000..410bc43d756 --- /dev/null +++ b/src/contrib/persistence/Akka.Persistence.Cassandra/reference.conf @@ -0,0 +1,95 @@ +cassandra-sessions { + + # The "default" Cassandra session, used by both the journal and snapshot store if not changed in + # the cassandra-journal and cassandra-snapshot-store configuration sections below + default { + + # Comma-seperated list of contact points in the cluster in the format of either [host] or [host:port] + contact-points = [ "127.0.0.1" ] + + # Default port for contact points in the cluster, used if a contact point is not in [host:port] format + port = 9042 + } +} + +cassandra-journal { + + # Type name of the cassandra journal plugin + class = "Akka.Persistence.Cassandra.Journal.CassandraJournal, Akka.Persistence.Cassandra" + + # The name (key) of the session to use when resolving an ISession instance. When using default session management, + # this points at configuration under the "cassandra-sessions" section where the session's configuration is found. + session-key = "default" + + # Whether or not to quote table and keyspace names when executing statements against Cassandra + use-quoted-identifiers = false + + # The keyspace to be created/used by the journal + keyspace = "akkanet" + + # A string to be appended to the CREATE KEYSPACE statement after the WITH clause when the keyspace is + # automatically created. Use this to define options like replication strategy. + keyspace-creation-options = "REPLICATION = { 'class' : 'SimpleStrategy', 'replication_factor' : 1 }" + + # When true the journal will automatically try to create the keyspace if it doesn't already exist on start + keyspace-autocreate = true + + # Name of the table to be created/used by the journal + table = "messages" + + # A string to be appended to the CREATE TABLE statement after the WITH clause. Use this to define things + # like gc_grace_seconds or one of the many other table options. + table-creation-properties = "" + + # The approximate number of rows per partition to use. Cannot be changed after table creation. + partition-size = 5000000 + + # The maximum number of messages to retrieve in one request when replaying messages + max-result-size = 50001 + + # Consistency level for reads + read-consistency = "Quorum" + + # Consistency level for writes + write-consistency = "Quorum" +} + +cassandra-snapshot-store { + + # Type name of the cassandra snapshot store plugin + class = "Akka.Persistence.Cassandra.Snapshot.CassandraSnapshotStore, Akka.Persistence.Cassandra" + + # The name (key) of the session to use when resolving an ISession instance. When using default session management, + # this points at configuration under the "cassandra-sessions" section where the session's configuration is found. + session-key = "default" + + # Whether or not to quote table and keyspace names when executing statements against Cassandra + use-quoted-identifiers = false + + # The keyspace to be created/used by the snapshot store + keyspace = "akkanet" + + # A string to be appended to the CREATE KEYSPACE statement after the WITH clause when the keyspace is + # automatically created. Use this to define options like replication strategy. + keyspace-creation-options = "REPLICATION = { 'class' : 'SimpleStrategy', 'replication_factor' : 1 }" + + # When true the journal will automatically try to create the keyspace if it doesn't already exist on start + keyspace-autocreate = true + + # Name of the table to be created/used by the snapshot store + table = "snapshots" + + # A string to be appended to the CREATE TABLE statement after the WITH clause. Use this to define things + # like gc_grace_seconds or one of the many other table options. + table-creation-properties = "" + + # The maximum number of snapshot metadata instances to retrieve in a single request when trying to find a + # snapshot that matches the criteria + max-metadata-result-size = 10 + + # Consistency level for reads + read-consistency = "One" + + # Consistency level for writes + write-consistency = "One" +} \ No newline at end of file From 457c2d416da614472ef4e501090f2cb38eaa1ad1 Mon Sep 17 00:00:00 2001 From: rogeralsing Date: Thu, 21 May 2015 18:28:47 +0200 Subject: [PATCH 49/66] Fixes #996 --- src/core/Akka.Tests/Routing/RoutingSpec.cs | 41 ++++++++++++++++++++++ src/core/Akka/Actor/ActorCell.cs | 2 ++ src/core/Akka/Routing/RouterPoolActor.cs | 9 +++-- 3 files changed, 50 insertions(+), 2 deletions(-) diff --git a/src/core/Akka.Tests/Routing/RoutingSpec.cs b/src/core/Akka.Tests/Routing/RoutingSpec.cs index b99afe16680..017f5b033c6 100644 --- a/src/core/Akka.Tests/Routing/RoutingSpec.cs +++ b/src/core/Akka.Tests/Routing/RoutingSpec.cs @@ -289,6 +289,47 @@ public void Router_RemoveRoute_should_remove_existing_routee_and_leave_the_rest( updatedRouter.Routees.Cast().Any(r => ReferenceEquals(r.Actor, blackHole1)).ShouldBe(true); updatedRouter.Routees.Cast().Any(r => ReferenceEquals(r.Actor, blackHole2)).ShouldBe(true); } + + public class RouterSupervisorSpec : AkkaSpec + { + #region Killable actor + + private class KillableActor : ReceiveActor + { + private readonly IActorRef TestActor; + + public KillableActor(IActorRef testActor) + { + TestActor = testActor; + Receive(s => s == "go away", s => { throw new ArgumentException("Goodbye then!"); }); + } + } + + #endregion + + #region Tests + + [Fact] + public void Routers_must_use_provided_supervisor_strategy() + { + var router = Sys.ActorOf(Props.Create(() => new KillableActor(TestActor)) + .WithRouter( + new RoundRobinPool(1, null, new AllForOneStrategy( + exception => + { + TestActor.Tell("supervised"); + return Directive.Stop; + }), + null)), + "router1"); + + router.Tell("go away"); + + ExpectMsg("supervised", TimeSpan.FromSeconds(2)); + } + + #endregion + } } } diff --git a/src/core/Akka/Actor/ActorCell.cs b/src/core/Akka/Actor/ActorCell.cs index 21008ef9be1..e1bf788186a 100644 --- a/src/core/Akka/Actor/ActorCell.cs +++ b/src/core/Akka/Actor/ActorCell.cs @@ -209,6 +209,8 @@ private ActorBase NewActor() { _state = _state.ClearBehaviorStack(); instance = CreateNewActorInstance(); + //TODO: this overwrites any already initiaized supervisor strategy + //We should investigate what we can do to handle this better instance.SupervisorStrategyInternal = _props.SupervisorStrategy; //defaults to null - won't affect lazy instantiation unless explicitly set in props }); diff --git a/src/core/Akka/Routing/RouterPoolActor.cs b/src/core/Akka/Routing/RouterPoolActor.cs index 62042c429a9..097684345f2 100644 --- a/src/core/Akka/Routing/RouterPoolActor.cs +++ b/src/core/Akka/Routing/RouterPoolActor.cs @@ -18,7 +18,7 @@ namespace Akka.Routing /// internal class RouterPoolActor : RouterActor { - // private SupervisorStrategy supervisorStrategy; + private readonly SupervisorStrategy _supervisorStrategy; protected Pool Pool { @@ -42,7 +42,12 @@ protected Pool Pool /// The supervisor strategy. public RouterPoolActor(SupervisorStrategy supervisorStrategy) { - SupervisorStrategyInternal = supervisorStrategy; + _supervisorStrategy = supervisorStrategy; + } + + protected override SupervisorStrategy SupervisorStrategy() + { + return _supervisorStrategy; } /// From 617a4ff07c82d702c27368b704f82f87530d9d30 Mon Sep 17 00:00:00 2001 From: rogeralsing Date: Thu, 21 May 2015 21:31:57 +0200 Subject: [PATCH 50/66] Updated the TaskBasedScheduler tests for DedicatedThreadScheduler instead. --- ...uler_ActionScheduler_Cancellation_Tests.cs | 16 ++-- ...cheduler_ActionScheduler_Schedule_Tests.cs | 28 +++--- ...eduler_TellScheduler_Cancellation_Tests.cs | 16 ++-- ...dScheduler_TellScheduler_Schedule_Tests.cs | 24 ++--- .../TaskBasedScheduler_Verify_TimeProvider.cs | 11 ++- .../Scheduler/DedicatedThreadScheduler.cs | 25 +++-- .../Actor/Scheduler/TaskBasedScheduler.cs | 92 ------------------- src/core/Akka/Akka.csproj | 1 - 8 files changed, 65 insertions(+), 148 deletions(-) delete mode 100644 src/core/Akka/Actor/Scheduler/TaskBasedScheduler.cs diff --git a/src/core/Akka.Tests/Actor/Scheduler/TaskBasedScheduler_ActionScheduler_Cancellation_Tests.cs b/src/core/Akka.Tests/Actor/Scheduler/TaskBasedScheduler_ActionScheduler_Cancellation_Tests.cs index a6056c06243..9279b5cdf37 100644 --- a/src/core/Akka.Tests/Actor/Scheduler/TaskBasedScheduler_ActionScheduler_Cancellation_Tests.cs +++ b/src/core/Akka.Tests/Actor/Scheduler/TaskBasedScheduler_ActionScheduler_Cancellation_Tests.cs @@ -1,5 +1,5 @@ //----------------------------------------------------------------------- -// +// // Copyright (C) 2009-2015 Typesafe Inc. // Copyright (C) 2013-2015 Akka.NET project // @@ -12,13 +12,13 @@ namespace Akka.Tests.Actor.Scheduler { // ReSharper disable once InconsistentNaming - public class TaskBasedScheduler_ActionScheduler_Cancellation_Tests : AkkaSpec + public class DedicatedThreadScheduler_ActionScheduler_Cancellation_Tests : AkkaSpec { [Fact] public void When_ScheduleOnce_using_canceled_Cancelable_Then_their_actions_should_not_be_invoked() { // Prepare, set up actions to be fired - IActionScheduler scheduler = new TaskBasedScheduler(); + IActionScheduler scheduler = new DedicatedThreadScheduler(Sys); var canceled = Cancelable.CreateCanceled(); scheduler.ScheduleOnce(0, () => TestActor.Tell("Test"), canceled); @@ -31,7 +31,7 @@ public void When_ScheduleOnce_using_canceled_Cancelable_Then_their_actions_shoul public void When_ScheduleRepeatedly_using_canceled_Cancelable_Then_their_actions_should_not_be_invoked() { // Prepare, set up actions to be fired - IActionScheduler scheduler = new TaskBasedScheduler(); + IActionScheduler scheduler = new DedicatedThreadScheduler(Sys); var canceled = Cancelable.CreateCanceled(); scheduler.ScheduleRepeatedly(0, 100, () => TestActor.Tell("Test1"), canceled); @@ -45,7 +45,7 @@ public void When_ScheduleRepeatedly_using_canceled_Cancelable_Then_their_actions public void When_ScheduleOnce_and_then_canceling_before_they_occur_Then_their_actions_should_not_be_invoked() { // Prepare, set up actions to be fired - IActionScheduler scheduler = new TaskBasedScheduler(); + IActionScheduler scheduler = new DedicatedThreadScheduler(Sys); var cancelable = new Cancelable(scheduler); scheduler.ScheduleOnce(100, () => TestActor.Tell("Test"), cancelable); @@ -59,7 +59,7 @@ public void When_ScheduleOnce_and_then_canceling_before_they_occur_Then_their_ac public void When_ScheduleRepeatedly_and_then_canceling_before_they_occur_Then_their_actions_should_not_be_invoked() { // Prepare, set up actions to be fired - IActionScheduler scheduler = new TaskBasedScheduler(); + IActionScheduler scheduler = new DedicatedThreadScheduler(Sys); var cancelable = new Cancelable(scheduler); scheduler.ScheduleRepeatedly(100, 2, () => TestActor.Tell("Test"), cancelable); @@ -73,7 +73,7 @@ public void When_ScheduleRepeatedly_and_then_canceling_before_they_occur_Then_th public void When_canceling_existing_running_repeaters_Then_their_future_actions_should_not_be_invoked() { // Prepare, set up actions to be fired - IActionScheduler scheduler = new TaskBasedScheduler(); + IActionScheduler scheduler = new DedicatedThreadScheduler(Sys); var cancelable = new Cancelable(scheduler); scheduler.ScheduleRepeatedly(0, 150, () => TestActor.Tell("Test"), cancelable); @@ -88,7 +88,7 @@ public void When_canceling_existing_running_repeaters_Then_their_future_actions_ public void When_canceling_existing_running_repeaters_by_scheduling_the_cancellation_ahead_of_time_Then_their_future_actions_should_not_be_invoked() { // Prepare, set up actions to be fired - IActionScheduler scheduler = new TaskBasedScheduler(); + IActionScheduler scheduler = new DedicatedThreadScheduler(Sys); var cancelableOdd = new Cancelable(scheduler); scheduler.ScheduleRepeatedly(1, 150, () => TestActor.Tell("Test"), cancelableOdd); diff --git a/src/core/Akka.Tests/Actor/Scheduler/TaskBasedScheduler_ActionScheduler_Schedule_Tests.cs b/src/core/Akka.Tests/Actor/Scheduler/TaskBasedScheduler_ActionScheduler_Schedule_Tests.cs index 9cbbbc01711..e9f13542635 100644 --- a/src/core/Akka.Tests/Actor/Scheduler/TaskBasedScheduler_ActionScheduler_Schedule_Tests.cs +++ b/src/core/Akka.Tests/Actor/Scheduler/TaskBasedScheduler_ActionScheduler_Schedule_Tests.cs @@ -1,5 +1,5 @@ //----------------------------------------------------------------------- -// +// // Copyright (C) 2009-2015 Typesafe Inc. // Copyright (C) 2013-2015 Akka.NET project // @@ -16,14 +16,14 @@ namespace Akka.Tests.Actor.Scheduler { // ReSharper disable once InconsistentNaming - public class TaskBasedScheduler_ActionScheduler_Schedule_Tests : AkkaSpec + public class DedicatedThreadScheduler_ActionScheduler_Schedule_Tests : AkkaSpec { - [Theory(Skip = "Tests that messages are sent with the specified interval, however due to inaccuracy of Task.Dely this often fails. Run this manually if you've made changes to TaskBasedScheduler")] + [Theory] [InlineData(10, 1000)] public void ScheduleRepeatedly_in_milliseconds_Tests_and_verify_the_interval(int initialDelay, int interval) { // Prepare, set up actions to be fired - IActionScheduler scheduler = new TaskBasedScheduler(); + IActionScheduler scheduler = new DedicatedThreadScheduler(Sys); var cancelable = new Cancelable(Sys.Scheduler); var receiver = ActorOf(dsl => @@ -67,7 +67,7 @@ public void ScheduleRepeatedly_in_milliseconds_Tests_and_verify_the_interval(int public void ScheduleRepeatedly_in_milliseconds_Tests(int initialDelay, int interval) { // Prepare, set up actions to be fired - IActionScheduler testScheduler = new TaskBasedScheduler(); + IActionScheduler testScheduler = new DedicatedThreadScheduler(Sys); testScheduler.ScheduleRepeatedly(initialDelay, interval, () => TestActor.Tell("Test")); @@ -83,7 +83,7 @@ public void ScheduleRepeatedly_in_milliseconds_Tests(int initialDelay, int inter public void ScheduleRepeatedly_in_TimeSpan_Tests(int initialDelay, int interval) { // Prepare, set up actions to be fired - IActionScheduler testScheduler = new TaskBasedScheduler(); + IActionScheduler testScheduler = new DedicatedThreadScheduler(Sys); testScheduler.ScheduleRepeatedly(TimeSpan.FromMilliseconds(initialDelay), TimeSpan.FromMilliseconds(interval), () => TestActor.Tell("Test")); @@ -98,7 +98,7 @@ public void ScheduleRepeatedly_in_TimeSpan_Tests(int initialDelay, int interval) public void ScheduleOnceTests() { // Prepare, set up actions to be fired - IActionScheduler testScheduler = new TaskBasedScheduler(); + IActionScheduler testScheduler = new DedicatedThreadScheduler(Sys); testScheduler.ScheduleOnce(50, () => TestActor.Tell("Test1")); testScheduler.ScheduleOnce(100, () => TestActor.Tell("Test2")); @@ -116,7 +116,7 @@ public void ScheduleOnceTests() public void When_ScheduleOnce_many_at_the_same_time_Then_all_fires(int[] times) { // Prepare, set up actions to be fired - IActionScheduler scheduler = new TaskBasedScheduler(); + IActionScheduler scheduler = new DedicatedThreadScheduler(Sys); foreach(var time in times) { @@ -141,7 +141,7 @@ public void When_ScheduleOnce_many_at_the_same_time_Then_all_fires(int[] times) [InlineData(-4711)] public void When_ScheduleOnce_with_invalid_delay_Then_exception_is_thrown(int invalidTime) { - IActionScheduler testScheduler = new TaskBasedScheduler(); + IActionScheduler testScheduler = new DedicatedThreadScheduler(Sys); XAssert.Throws(() => testScheduler.ScheduleOnce(invalidTime, () => { }) @@ -153,7 +153,7 @@ public void When_ScheduleOnce_with_invalid_delay_Then_exception_is_thrown(int in [InlineData(-4711)] public void When_ScheduleRepeatedly_with_invalid_delay_Then_exception_is_thrown(int invalidTime) { - IActionScheduler testScheduler = new TaskBasedScheduler(); + IActionScheduler testScheduler = new DedicatedThreadScheduler(Sys); XAssert.Throws(() => testScheduler.ScheduleRepeatedly(invalidTime, 100, () => { }) @@ -166,7 +166,7 @@ public void When_ScheduleRepeatedly_with_invalid_delay_Then_exception_is_thrown( [InlineData(-4711)] public void When_ScheduleRepeatedly_with_invalid_interval_Then_exception_is_thrown(int invalidInterval) { - IActionScheduler testScheduler = new TaskBasedScheduler(); + IActionScheduler testScheduler = new DedicatedThreadScheduler(Sys); XAssert.Throws(() => testScheduler.ScheduleRepeatedly(42, invalidInterval, () => { }) @@ -176,7 +176,7 @@ public void When_ScheduleRepeatedly_with_invalid_interval_Then_exception_is_thro [Fact] public void When_ScheduleOnce_with_0_delay_Then_action_is_executed_immediately() { - IActionScheduler testScheduler = new TaskBasedScheduler(); + IActionScheduler testScheduler = new DedicatedThreadScheduler(Sys); var manualResetEvent = new ManualResetEventSlim(); manualResetEvent.IsSet.ShouldBeFalse(); testScheduler.ScheduleOnce(0, () => manualResetEvent.Set()); @@ -187,7 +187,7 @@ public void When_ScheduleOnce_with_0_delay_Then_action_is_executed_immediately() [Fact] public void When_ScheduleRepeatedly_with_0_delay_Then_action_is_executed_immediately() { - IActionScheduler testScheduler = new TaskBasedScheduler(); + IActionScheduler testScheduler = new DedicatedThreadScheduler(Sys); var manualResetEvent = new ManualResetEventSlim(); manualResetEvent.IsSet.ShouldBeFalse(); testScheduler.ScheduleRepeatedly(0, 100, () => manualResetEvent.Set()); @@ -198,7 +198,7 @@ public void When_ScheduleRepeatedly_with_0_delay_Then_action_is_executed_immedia [Fact] public void When_ScheduleRepeatedly_action_crashes_Then_no_more_calls_will_be_scheduled() { - IActionScheduler testScheduler = new TaskBasedScheduler(); + IActionScheduler testScheduler = new DedicatedThreadScheduler(Sys); var timesCalled = 0; testScheduler.ScheduleRepeatedly(0, 10, () => { Interlocked.Increment(ref timesCalled); throw new Exception("Crash"); }); AwaitCondition(() => timesCalled >= 1); diff --git a/src/core/Akka.Tests/Actor/Scheduler/TaskBasedScheduler_TellScheduler_Cancellation_Tests.cs b/src/core/Akka.Tests/Actor/Scheduler/TaskBasedScheduler_TellScheduler_Cancellation_Tests.cs index dd5862b8092..088ce04ce3c 100644 --- a/src/core/Akka.Tests/Actor/Scheduler/TaskBasedScheduler_TellScheduler_Cancellation_Tests.cs +++ b/src/core/Akka.Tests/Actor/Scheduler/TaskBasedScheduler_TellScheduler_Cancellation_Tests.cs @@ -1,5 +1,5 @@ //----------------------------------------------------------------------- -// +// // Copyright (C) 2009-2015 Typesafe Inc. // Copyright (C) 2013-2015 Akka.NET project // @@ -12,13 +12,13 @@ namespace Akka.Tests.Actor.Scheduler { // ReSharper disable once InconsistentNaming - public class TaskBasedScheduler_TellScheduler_Cancellation_Tests : AkkaSpec + public class DedicatedThreadScheduler_TellScheduler_Cancellation_Tests : AkkaSpec { [Fact] public void When_ScheduleTellOnce_using_canceled_Cancelable_Then_their_actions_should_not_be_invoked() { // Prepare, set up actions to be fired - ITellScheduler scheduler = new TaskBasedScheduler(); + ITellScheduler scheduler = new DedicatedThreadScheduler(Sys); var canceled = Cancelable.CreateCanceled(); scheduler.ScheduleTellOnce(0, TestActor, "Test", ActorRefs.NoSender, canceled); @@ -32,7 +32,7 @@ public void When_ScheduleTellOnce_using_canceled_Cancelable_Then_their_actions_s public void When_ScheduleTellRepeatedly_using_canceled_Cancelable_Then_their_actions_should_not_be_invoked() { // Prepare, set up actions to be fired - ITellScheduler scheduler = new TaskBasedScheduler(); + ITellScheduler scheduler = new DedicatedThreadScheduler(Sys); var canceled = Cancelable.CreateCanceled(); scheduler.ScheduleTellRepeatedly(0, 2, TestActor, "Test", ActorRefs.NoSender, canceled); @@ -46,7 +46,7 @@ public void When_ScheduleTellRepeatedly_using_canceled_Cancelable_Then_their_act public void When_ScheduleTellOnce_and_then_canceling_before_they_occur_Then_their_actions_should_not_be_invoked() { // Prepare, set up actions to be fired - IScheduler scheduler = new TaskBasedScheduler(); + IScheduler scheduler = new DedicatedThreadScheduler(Sys); var cancelable = new Cancelable(scheduler); scheduler.ScheduleTellOnce(100, TestActor, "Test", ActorRefs.NoSender, cancelable); @@ -61,7 +61,7 @@ public void When_ScheduleTellOnce_and_then_canceling_before_they_occur_Then_thei public void When_ScheduleTellRepeatedly_and_then_canceling_before_they_occur_Then_their_actions_should_not_be_invoked() { // Prepare, set up actions to be fired - IScheduler scheduler = new TaskBasedScheduler(); + IScheduler scheduler = new DedicatedThreadScheduler(Sys); var cancelable = new Cancelable(scheduler); scheduler.ScheduleTellRepeatedly(100, 2, TestActor, "Test", ActorRefs.NoSender, cancelable); @@ -76,7 +76,7 @@ public void When_ScheduleTellRepeatedly_and_then_canceling_before_they_occur_The public void When_canceling_existing_running_repeaters_Then_their_future_actions_should_not_be_invoked() { // Prepare, set up actions to be fired - IScheduler scheduler = new TaskBasedScheduler(); + IScheduler scheduler = new DedicatedThreadScheduler(Sys); var cancelable = new Cancelable(scheduler); scheduler.ScheduleTellRepeatedly(0, 150, TestActor, "Test", ActorRefs.NoSender, cancelable); @@ -91,7 +91,7 @@ public void When_canceling_existing_running_repeaters_Then_their_future_actions_ public void When_canceling_existing_running_repeaters_by_scheduling_the_cancellation_ahead_of_time_Then_their_future_actions_should_not_be_invoked() { // Prepare, set up actions to be fired - IScheduler scheduler = new TaskBasedScheduler(); + IScheduler scheduler = new DedicatedThreadScheduler(Sys); var cancelableOdd = new Cancelable(scheduler); scheduler.ScheduleTellRepeatedly(1, 150, TestActor, "Test", ActorRefs.NoSender, cancelableOdd); diff --git a/src/core/Akka.Tests/Actor/Scheduler/TaskBasedScheduler_TellScheduler_Schedule_Tests.cs b/src/core/Akka.Tests/Actor/Scheduler/TaskBasedScheduler_TellScheduler_Schedule_Tests.cs index 8dabd9e9bd0..90f8f04cc69 100644 --- a/src/core/Akka.Tests/Actor/Scheduler/TaskBasedScheduler_TellScheduler_Schedule_Tests.cs +++ b/src/core/Akka.Tests/Actor/Scheduler/TaskBasedScheduler_TellScheduler_Schedule_Tests.cs @@ -1,5 +1,5 @@ //----------------------------------------------------------------------- -// +// // Copyright (C) 2009-2015 Typesafe Inc. // Copyright (C) 2013-2015 Akka.NET project // @@ -15,14 +15,14 @@ namespace Akka.Tests.Actor.Scheduler { // ReSharper disable once InconsistentNaming - public class TaskBasedScheduler_TellScheduler_Schedule_Tests : AkkaSpec + public class DedicatedThreadScheduler_TellScheduler_Schedule_Tests : AkkaSpec { - [Theory(Skip = "Tests that messages are sent with the specified interval, however due to inaccuracy of Task.Dely this often fails. Run this manually if you've made changes to TaskBasedScheduler")] + // [Theory(Skip = "Tests that messages are sent with the specified interval, however due to inaccuracy of Task.Dely this often fails. Run this manually if you've made changes to DedicatedThreadScheduler")] [InlineData(10, 1000)] public void ScheduleTellRepeatedly_in_milliseconds_Tests(int initialDelay, int interval) { // Prepare, set up actions to be fired - IScheduler scheduler = new TaskBasedScheduler(); + IScheduler scheduler = new DedicatedThreadScheduler(Sys); var cancelable = new Cancelable(Sys.Scheduler); var receiver = ActorOf(dsl => @@ -67,7 +67,7 @@ public void ScheduleTellRepeatedly_in_milliseconds_Tests(int initialDelay, int i public void ScheduleTellRepeatedly_TimeSpan_Tests(int initialDelay, int interval) { //Prepare, set up actions to be fired - IScheduler scheduler = new TaskBasedScheduler(); + IScheduler scheduler = new DedicatedThreadScheduler(Sys); scheduler.ScheduleTellRepeatedly(TimeSpan.FromMilliseconds(initialDelay), TimeSpan.FromMilliseconds(interval), TestActor, "Test", ActorRefs.NoSender); @@ -83,7 +83,7 @@ public void ScheduleTellRepeatedly_TimeSpan_Tests(int initialDelay, int interval public void ScheduleTellOnceTests(int[] times) { // Prepare, set up messages to be sent - IScheduler scheduler = new TaskBasedScheduler(); + IScheduler scheduler = new DedicatedThreadScheduler(Sys); foreach(var time in times) { @@ -103,7 +103,7 @@ public void ScheduleTellOnceTests(int[] times) public void When_ScheduleTellOnce_many_at_the_same_time_Then_all_fires(int[] times) { // Prepare, set up actions to be fired - IScheduler scheduler = new TaskBasedScheduler(); + IScheduler scheduler = new DedicatedThreadScheduler(Sys); foreach(var time in times) { @@ -126,7 +126,7 @@ public void When_ScheduleTellOnce_many_at_the_same_time_Then_all_fires(int[] tim [InlineData(-4711)] public void When_ScheduleTellOnce_with_invalid_delay_Then_exception_is_thrown(int invalidTime) { - IScheduler scheduler = new TaskBasedScheduler(); + IScheduler scheduler = new DedicatedThreadScheduler(Sys); XAssert.Throws(() => scheduler.ScheduleTellOnce(invalidTime, TestActor, "Test", ActorRefs.NoSender) @@ -139,7 +139,7 @@ public void When_ScheduleTellOnce_with_invalid_delay_Then_exception_is_thrown(in [InlineData(-4711)] public void When_ScheduleTellRepeatedly_with_invalid_delay_Then_exception_is_thrown(int invalidTime) { - IScheduler scheduler = new TaskBasedScheduler(); + IScheduler scheduler = new DedicatedThreadScheduler(Sys); XAssert.Throws(() => scheduler.ScheduleTellRepeatedly(invalidTime, 100, TestActor, "Test", ActorRefs.NoSender) @@ -153,7 +153,7 @@ public void When_ScheduleTellRepeatedly_with_invalid_delay_Then_exception_is_thr [InlineData(-4711)] public void When_ScheduleTellRepeatedly_with_invalid_interval_Then_exception_is_thrown(int invalidInterval) { - IScheduler scheduler = new TaskBasedScheduler(); + IScheduler scheduler = new DedicatedThreadScheduler(Sys); XAssert.Throws(() => scheduler.ScheduleTellRepeatedly(42, invalidInterval, TestActor, "Test", ActorRefs.NoSender) @@ -164,7 +164,7 @@ public void When_ScheduleTellRepeatedly_with_invalid_interval_Then_exception_is_ [Fact] public void When_ScheduleTellOnce_with_0_delay_Then_action_is_executed_immediately() { - IScheduler scheduler = new TaskBasedScheduler(); + IScheduler scheduler = new DedicatedThreadScheduler(Sys); scheduler.ScheduleTellOnce(0, TestActor, "Test", ActorRefs.NoSender); ExpectMsg("Test"); } @@ -172,7 +172,7 @@ public void When_ScheduleTellOnce_with_0_delay_Then_action_is_executed_immediate [Fact] public void When_ScheduleTellRepeatedly_with_0_delay_Then_action_is_executed_immediately() { - IScheduler scheduler = new TaskBasedScheduler(); + IScheduler scheduler = new DedicatedThreadScheduler(Sys); scheduler.ScheduleTellRepeatedly(0, 60 * 1000, TestActor, "Test", ActorRefs.NoSender); ExpectMsg("Test"); } diff --git a/src/core/Akka.Tests/Actor/Scheduler/TaskBasedScheduler_Verify_TimeProvider.cs b/src/core/Akka.Tests/Actor/Scheduler/TaskBasedScheduler_Verify_TimeProvider.cs index 62d442bed06..ac32acd12d5 100644 --- a/src/core/Akka.Tests/Actor/Scheduler/TaskBasedScheduler_Verify_TimeProvider.cs +++ b/src/core/Akka.Tests/Actor/Scheduler/TaskBasedScheduler_Verify_TimeProvider.cs @@ -1,5 +1,5 @@ //----------------------------------------------------------------------- -// +// // Copyright (C) 2009-2015 Typesafe Inc. // Copyright (C) 2013-2015 Akka.NET project // @@ -13,13 +13,16 @@ namespace Akka.Tests.Actor.Scheduler { // ReSharper disable once InconsistentNaming - public class TaskBasedScheduler_Verify_TimeProvider + public class DedicatedThreadScheduler_Verify_TimeProvider { [Fact] public void Now_Should_be_accurate() { - ITimeProvider timeProvider = new TaskBasedScheduler(); - Math.Abs((timeProvider.Now - DateTimeOffset.Now).TotalMilliseconds).ShouldBeLessThan(20); + using (var sys = ActorSystem.Create("Foo")) + { + ITimeProvider timeProvider = new DedicatedThreadScheduler(sys); + Math.Abs((timeProvider.Now - DateTimeOffset.Now).TotalMilliseconds).ShouldBeLessThan(20); + } } } } diff --git a/src/core/Akka/Actor/Scheduler/DedicatedThreadScheduler.cs b/src/core/Akka/Actor/Scheduler/DedicatedThreadScheduler.cs index 2c4733c781d..0ffac036feb 100644 --- a/src/core/Akka/Actor/Scheduler/DedicatedThreadScheduler.cs +++ b/src/core/Akka/Actor/Scheduler/DedicatedThreadScheduler.cs @@ -2,12 +2,14 @@ using System.Collections.Concurrent; using System.Collections.Generic; using System.Threading; +using Akka.Event; namespace Akka.Actor { public class DedicatedThreadScheduler : SchedulerBase, IDateTimeOffsetNowTimeProvider { - private readonly ConcurrentQueue _workQueue = new ConcurrentQueue(); + private readonly ConcurrentQueue _workQueue = new ConcurrentQueue(); + private ILoggingAdapter _log; protected override DateTimeOffset TimeNow { get { return DateTimeOffset.Now; } } public override TimeSpan MonotonicClock { get { return Util.MonotonicClock.Elapsed; } } public override TimeSpan HighResMonotonicClock { get { return Util.MonotonicClock.ElapsedHighRes; } } @@ -15,6 +17,7 @@ public class DedicatedThreadScheduler : SchedulerBase, IDateTimeOffsetNowTimePro //TODO: use some more efficient approach to handle future work public DedicatedThreadScheduler(ActorSystem system) { + _log = Logging.GetLogger(system, this); var precision = system.Settings.Config.GetTimeSpan("akka.scheduler.tick-duration"); var thread = new Thread(_ => { @@ -105,7 +108,10 @@ private void InternalScheduleOnce(TimeSpan initialDelay, Action action, Cancella action(); } catch (OperationCanceledException) { } - //TODO: Should we log other exceptions? /@hcanber + catch (Exception x) + { + _log.Error(x, "DedicatedThreadScheduler faild to execute action"); + } }; AddWork(initialDelay, executeAction, token); @@ -123,15 +129,16 @@ private void InternalScheduleRepeatedly(TimeSpan initialDelay, TimeSpan interval try { action(); + if (token.IsCancellationRequested) + return; + + AddWork(interval, executeAction, token); } catch (OperationCanceledException) { } - //TODO: Should we log other exceptions? /@hcanber - - if (token.IsCancellationRequested) - return; - - AddWork(interval, executeAction,token); - + catch (Exception x) + { + _log.Error(x,"DedicatedThreadScheduler faild to execute action"); + } }; AddWork(initialDelay, executeAction, token); diff --git a/src/core/Akka/Actor/Scheduler/TaskBasedScheduler.cs b/src/core/Akka/Actor/Scheduler/TaskBasedScheduler.cs deleted file mode 100644 index 612ac9a9e6a..00000000000 --- a/src/core/Akka/Actor/Scheduler/TaskBasedScheduler.cs +++ /dev/null @@ -1,92 +0,0 @@ -//----------------------------------------------------------------------- -// -// Copyright (C) 2009-2015 Typesafe Inc. -// Copyright (C) 2013-2015 Akka.NET project -// -//----------------------------------------------------------------------- - -using System; -using System.Threading; -using System.Threading.Tasks; - -namespace Akka.Actor -{ - /// - /// Class Scheduler. - /// - public class TaskBasedScheduler : SchedulerBase, IDateTimeOffsetNowTimeProvider - { - - protected override DateTimeOffset TimeNow { get { return DateTimeOffset.Now; } } - public override TimeSpan MonotonicClock { get { return Util.MonotonicClock.Elapsed; } } - public override TimeSpan HighResMonotonicClock { get { return Util.MonotonicClock.ElapsedHighRes; } } - - protected override void InternalScheduleTellOnce(TimeSpan delay, ICanTell receiver, object message, IActorRef sender, ICancelable cancelable) - { - var cancellationToken = cancelable == null ? CancellationToken.None : cancelable.Token; - InternalScheduleOnce(delay, () => receiver.Tell(message, sender), cancellationToken); - } - - protected override void InternalScheduleTellRepeatedly(TimeSpan initialDelay, TimeSpan interval, ICanTell receiver, object message, IActorRef sender, ICancelable cancelable) - { - var cancellationToken = cancelable == null ? CancellationToken.None : cancelable.Token; - InternalScheduleRepeatedly(initialDelay, interval, () => receiver.Tell(message, sender), cancellationToken); - } - - protected override void InternalScheduleOnce(TimeSpan delay, Action action, ICancelable cancelable) - { - var cancellationToken = cancelable == null ? CancellationToken.None : cancelable.Token; - InternalScheduleOnce(delay, action, cancellationToken); - } - - protected override void InternalScheduleRepeatedly(TimeSpan initialDelay, TimeSpan interval, Action action, ICancelable cancelable) - { - var cancellationToken = cancelable == null ? CancellationToken.None : cancelable.Token; - InternalScheduleRepeatedly(initialDelay, interval, action, cancellationToken); - } - - - private void InternalScheduleOnce(TimeSpan initialDelay, Action action, CancellationToken token) - { - Task.Delay(initialDelay, token).ContinueWith(t => - { - if (token.IsCancellationRequested) return; - - token.ThrowIfCancellationRequested(); - try - { - action(); - } - catch (OperationCanceledException) { } - //TODO: Should we log other exceptions? /@hcanber - - }, token, TaskContinuationOptions.OnlyOnRanToCompletion, TaskScheduler.Current); - } - - - private void InternalScheduleRepeatedly(TimeSpan initialDelay, TimeSpan interval, Action action, CancellationToken token) - { - Action executeAction = null; - executeAction = t => - { - if (token.IsCancellationRequested) return; - try - { - action(); - } - catch (OperationCanceledException) { } - //TODO: Should we log other exceptions? /@hcanber - - if (token.IsCancellationRequested) return; - - Task.Delay(interval, token) - .ContinueWith(executeAction, token, TaskContinuationOptions.OnlyOnRanToCompletion, TaskScheduler.Current); - }; - Task.Delay(initialDelay, token) - .ContinueWith(executeAction, token, TaskContinuationOptions.OnlyOnRanToCompletion, TaskScheduler.Current); - - } - - } -} - diff --git a/src/core/Akka/Akka.csproj b/src/core/Akka/Akka.csproj index 040eb68e398..8785cf71e94 100644 --- a/src/core/Akka/Akka.csproj +++ b/src/core/Akka/Akka.csproj @@ -151,7 +151,6 @@ - From 600a82c5a3c9ce3c985106fa35a049572fd06428 Mon Sep 17 00:00:00 2001 From: Stefan Sedich Date: Fri, 22 May 2015 08:15:34 +1000 Subject: [PATCH 51/66] Fixing up the tests for the actor system scheduler specs (bitten by R#9 being all fancy) --- src/core/Akka.Tests/Actor/ActorSystemSpec.cs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/core/Akka.Tests/Actor/ActorSystemSpec.cs b/src/core/Akka.Tests/Actor/ActorSystemSpec.cs index 82558b9c451..1d308f28656 100644 --- a/src/core/Akka.Tests/Actor/ActorSystemSpec.cs +++ b/src/core/Akka.Tests/Actor/ActorSystemSpec.cs @@ -170,10 +170,10 @@ public void ScheduleTellRepeatedly(TimeSpan initialDelay, TimeSpan interval, ICa throw new NotImplementedException(); } - public DateTimeOffset Now { get; } - public TimeSpan MonotonicClock { get; } - public TimeSpan HighResMonotonicClock { get; } - public IAdvancedScheduler Advanced { get; } + public DateTimeOffset Now { get; private set; } + public TimeSpan MonotonicClock { get; private set; } + public TimeSpan HighResMonotonicClock { get; private set; } + public IAdvancedScheduler Advanced { get; private set; } } } From 27b727f281d6339176ac9aa09c239414f5fcbcac Mon Sep 17 00:00:00 2001 From: rogeralsing Date: Fri, 22 May 2015 19:18:02 +0200 Subject: [PATCH 52/66] Remove empty file --- src/core/Akka/Akka.csproj | 1 - src/core/Akka/Routing/TailChoppingPool.cs | 17 ----------------- 2 files changed, 18 deletions(-) delete mode 100644 src/core/Akka/Routing/TailChoppingPool.cs diff --git a/src/core/Akka/Akka.csproj b/src/core/Akka/Akka.csproj index 8785cf71e94..aecf494824d 100644 --- a/src/core/Akka/Akka.csproj +++ b/src/core/Akka/Akka.csproj @@ -232,7 +232,6 @@ - diff --git a/src/core/Akka/Routing/TailChoppingPool.cs b/src/core/Akka/Routing/TailChoppingPool.cs deleted file mode 100644 index 30b13cbbbee..00000000000 --- a/src/core/Akka/Routing/TailChoppingPool.cs +++ /dev/null @@ -1,17 +0,0 @@ -//----------------------------------------------------------------------- -// -// Copyright (C) 2009-2015 Typesafe Inc. -// Copyright (C) 2013-2015 Akka.NET project -// -//----------------------------------------------------------------------- - -using System; -using Akka.Actor; -using Akka.Configuration; -using Akka.Util; - -namespace Akka.Routing -{ - -} - From ce04a8a561af44e6c2f152eabb2e319301eb2329 Mon Sep 17 00:00:00 2001 From: Dmitry Vakylenko Date: Sat, 23 May 2015 12:26:48 +0300 Subject: [PATCH 53/66] Added ctor overload Added constructor overload for ScatterGatherFirstCompletedPool --- src/core/Akka/Routing/ScatterGatherFirstCompleted.cs | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/src/core/Akka/Routing/ScatterGatherFirstCompleted.cs b/src/core/Akka/Routing/ScatterGatherFirstCompleted.cs index bed6e7462b9..800b080e2a7 100644 --- a/src/core/Akka/Routing/ScatterGatherFirstCompleted.cs +++ b/src/core/Akka/Routing/ScatterGatherFirstCompleted.cs @@ -183,6 +183,15 @@ public ScatterGatherFirstCompletedPool(int nrOfInstances, Resizer resizer, Super _within = within; } + /// + /// + /// The nr of instances. + /// Expect a response within the given timespan + public ScatterGatherFirstCompletedPool(int nrOfInstances, TimeSpan within) : this(nrOfInstances) + { + _within = within; + } + public ScatterGatherFirstCompletedPool(Config config) : base(config) { _within = config.GetTimeSpan("within"); From a3a9fec31372f54c0cb7a2dfe35f7851d68ce23c Mon Sep 17 00:00:00 2001 From: Nikita Tsukanov Date: Sat, 23 May 2015 23:03:34 +0300 Subject: [PATCH 54/66] Replaced DateTime.Now with DateTime.UtcNow/MonotonicClock #846 --- src/core/Akka.Cluster/ClusterMetricsCollector.cs | 2 +- src/core/Akka.Persistence/AtLeastOnceDelivery.cs | 6 +++--- .../Akka.Persistence/Snapshot/SnapshotStore.cs | 2 +- src/core/Akka.Remote/Deadline.cs | 10 +++++----- src/core/Akka.Tests/Actor/ActorSystemSpec.cs | 6 +++--- src/core/Akka.Tests/Actor/InboxSpec.cs | 2 +- .../Actor/ChildrenContainer/Internal/ChildStats.cs | 3 ++- src/core/Akka/Actor/Inbox.Actor.cs | 6 +++--- src/core/Akka/Actor/Inbox.cs | 14 +++++++------- src/core/Akka/Event/LogEvent.cs | 2 +- src/examples/TimeServer/TimeServer/Program.cs | 2 +- 11 files changed, 28 insertions(+), 27 deletions(-) diff --git a/src/core/Akka.Cluster/ClusterMetricsCollector.cs b/src/core/Akka.Cluster/ClusterMetricsCollector.cs index 6d22fdbcbf1..8853b7e201e 100644 --- a/src/core/Akka.Cluster/ClusterMetricsCollector.cs +++ b/src/core/Akka.Cluster/ClusterMetricsCollector.cs @@ -623,7 +623,7 @@ internal static class StandardMetrics public static long NewTimestamp() { - return DateTime.Now.Ticks; + return DateTime.UtcNow.Ticks; } public sealed class SystemMemory diff --git a/src/core/Akka.Persistence/AtLeastOnceDelivery.cs b/src/core/Akka.Persistence/AtLeastOnceDelivery.cs index 940f05f583f..c4f3371bb2b 100644 --- a/src/core/Akka.Persistence/AtLeastOnceDelivery.cs +++ b/src/core/Akka.Persistence/AtLeastOnceDelivery.cs @@ -209,7 +209,7 @@ public void Deliver(ActorPath destination, Func deliveryMessageMap } var deliveryId = NextDeliverySequenceNr(); - var now = IsRecovering ? DateTime.Now - RedeliverInterval : DateTime.Now; + var now = IsRecovering ? DateTime.UtcNow - RedeliverInterval : DateTime.UtcNow; var delivery = new Delivery(destination, deliveryMessageMapper(deliveryId), now, attempt: 0); if (IsRecovering) @@ -254,7 +254,7 @@ public AtLeastOnceDeliverySnapshot GetDeliverySnapshot() public void SetDeliverySnapshot(AtLeastOnceDeliverySnapshot snapshot) { _deliverySequenceNr = snapshot.DeliveryId; - var now = DateTime.Now; + var now = DateTime.UtcNow; var unconfirmedDeliveries = snapshot.UnconfirmedDeliveries .Select(u => new KeyValuePair(u.DeliveryId, new Delivery(u.Destination, u.Message, now, 0))); @@ -302,7 +302,7 @@ private void Send(long deliveryId, Delivery delivery, DateTime timestamp) private void RedeliverOverdue() { - var now = DateTime.Now; + var now = DateTime.UtcNow; var deadline = now - RedeliverInterval; var warnings = new List(); diff --git a/src/core/Akka.Persistence/Snapshot/SnapshotStore.cs b/src/core/Akka.Persistence/Snapshot/SnapshotStore.cs index 0dbabfd68e4..5b423d25af2 100644 --- a/src/core/Akka.Persistence/Snapshot/SnapshotStore.cs +++ b/src/core/Akka.Persistence/Snapshot/SnapshotStore.cs @@ -42,7 +42,7 @@ protected override bool Receive(object message) else if (message is SaveSnapshot) { var msg = (SaveSnapshot)message; - var metadata = new SnapshotMetadata(msg.Metadata.PersistenceId, msg.Metadata.SequenceNr, DateTime.Now); + var metadata = new SnapshotMetadata(msg.Metadata.PersistenceId, msg.Metadata.SequenceNr, DateTime.UtcNow); SaveAsync(metadata, msg.Snapshot).ContinueWith(t => !t.IsFaulted ? (object)new SaveSnapshotSuccess(metadata) diff --git a/src/core/Akka.Remote/Deadline.cs b/src/core/Akka.Remote/Deadline.cs index 42846e6accb..dc1817e564e 100644 --- a/src/core/Akka.Remote/Deadline.cs +++ b/src/core/Akka.Remote/Deadline.cs @@ -21,12 +21,12 @@ public Deadline(DateTime when) public bool IsOverdue { - get { return DateTime.Now > When; } + get { return DateTime.UtcNow > When; } } public bool HasTimeLeft { - get { return DateTime.Now < When; } + get { return DateTime.UtcNow < When; } } public DateTime When { get; private set; } @@ -34,7 +34,7 @@ public bool HasTimeLeft /// /// Warning: creates a new instance each time it's used /// - public TimeSpan TimeLeft { get { return When - DateTime.Now; } } + public TimeSpan TimeLeft { get { return When - DateTime.UtcNow; } } #region Overrides @@ -60,13 +60,13 @@ public override int GetHashCode() #region Static members /// - /// Returns a deadline that is due + /// Returns a deadline that is due /// public static Deadline Now { get { - return new Deadline(DateTime.Now); + return new Deadline(DateTime.UtcNow); } } diff --git a/src/core/Akka.Tests/Actor/ActorSystemSpec.cs b/src/core/Akka.Tests/Actor/ActorSystemSpec.cs index 7954ce3ec95..2946188e604 100644 --- a/src/core/Akka.Tests/Actor/ActorSystemSpec.cs +++ b/src/core/Akka.Tests/Actor/ActorSystemSpec.cs @@ -11,6 +11,7 @@ using Xunit; using System; using System.Collections.Generic; +using System.Diagnostics; namespace Akka.Tests.Actor { @@ -54,11 +55,10 @@ public void AnActorSystemShouldBeAllowedToBlockUntilExit() { var actorSystem = ActorSystem .Create(Guid.NewGuid().ToString()); - var startTime = DateTime.UtcNow; + var st = Stopwatch.StartNew(); var asyncShutdownTask = Task.Delay(TimeSpan.FromSeconds(1)).ContinueWith(_ => actorSystem.Shutdown()); actorSystem.AwaitTermination(TimeSpan.FromSeconds(2)).ShouldBeTrue(); - var endTime = DateTime.UtcNow; - Assert.True((endTime - startTime).TotalSeconds >= .9); + Assert.True(st.Elapsed.TotalSeconds >= .9); } [Fact] diff --git a/src/core/Akka.Tests/Actor/InboxSpec.cs b/src/core/Akka.Tests/Actor/InboxSpec.cs index bd7ee5e5e3e..56bf16f3edb 100644 --- a/src/core/Akka.Tests/Actor/InboxSpec.cs +++ b/src/core/Akka.Tests/Actor/InboxSpec.cs @@ -138,7 +138,7 @@ public void Inbox_have_a_default_and_custom_timeouts() [Fact] public void Select_WithClient_should_update_Client_and_copy_the_rest_of_the_properties_BUG_427() { - var deadline = new DateTime(1919, 5, 24); + var deadline = new TimeSpan(Sys.Scheduler.MonotonicClock.Ticks/2); //Some point in the past Predicate predicate = o => true; var actorRef = new EmptyLocalActorRef(((ActorSystemImpl)Sys).Provider, new RootActorPath(new Address("akka", "test")), Sys.EventStream); var select = new Select(deadline, predicate, actorRef); diff --git a/src/core/Akka/Actor/ChildrenContainer/Internal/ChildStats.cs b/src/core/Akka/Actor/ChildrenContainer/Internal/ChildStats.cs index 86f2bd0c42d..637d8e68302 100644 --- a/src/core/Akka/Actor/ChildrenContainer/Internal/ChildStats.cs +++ b/src/core/Akka/Actor/ChildrenContainer/Internal/ChildStats.cs @@ -6,6 +6,7 @@ //----------------------------------------------------------------------- using System; +using Akka.Util; namespace Akka.Actor.Internal { @@ -79,7 +80,7 @@ private bool RetriesInWindowOkay(int retries, int windowInMilliseconds) // after a restart and if enough restarts happen during this time, it // denies. Otherwise window closes and the scheme starts over. var retriesDone = _maxNrOfRetriesCount + 1; - var now = DateTime.Now.Ticks; + var now = MonotonicClock.Elapsed.Ticks; long windowStart; if (_restartTimeWindowStartTicks == 0) { diff --git a/src/core/Akka/Actor/Inbox.Actor.cs b/src/core/Akka/Actor/Inbox.Actor.cs index 8274779e29d..8c55d9afffe 100644 --- a/src/core/Akka/Actor/Inbox.Actor.cs +++ b/src/core/Akka/Actor/Inbox.Actor.cs @@ -22,7 +22,7 @@ internal class InboxActor : ActorBase private object _currentMessage; private Select? _currentSelect; - private Tuple _currentDeadline; + private Tuple _currentDeadline; private int _size; private ILoggingAdapter _log = Context.GetLogger(); @@ -120,7 +120,7 @@ protected override bool Receive(object message) .With(sw => Context.Unwatch(sw.Target)) .With(() => { - var now = DateTime.Now; + var now = Context.System.Scheduler.MonotonicClock; var overdue = _clientsByTimeout.TakeWhile(q => q.Deadline < now); foreach (var query in overdue) { @@ -169,7 +169,7 @@ protected override bool Receive(object message) { _currentDeadline.Item2.Cancel(); } - var cancelable = Context.System.Scheduler.ScheduleTellOnceCancelable(next.Deadline - DateTime.Now, Self, new Kick(), Self); + var cancelable = Context.System.Scheduler.ScheduleTellOnceCancelable(next.Deadline - Context.System.Scheduler.MonotonicClock, Self, new Kick(), Self); _currentDeadline = Tuple.Create(next.Deadline, cancelable); } diff --git a/src/core/Akka/Actor/Inbox.cs b/src/core/Akka/Actor/Inbox.cs index cd6b87c7d47..04d80198021 100644 --- a/src/core/Akka/Actor/Inbox.cs +++ b/src/core/Akka/Actor/Inbox.cs @@ -16,21 +16,21 @@ namespace Akka.Actor { internal interface IQuery { - DateTime Deadline { get; } + TimeSpan Deadline { get; } IActorRef Client { get; } IQuery WithClient(IActorRef client); } internal struct Get : IQuery { - public Get(DateTime deadline, IActorRef client = null) + public Get(TimeSpan deadline, IActorRef client = null) : this() { Deadline = deadline; Client = client; } - public DateTime Deadline { get; private set; } + public TimeSpan Deadline { get; private set; } public IActorRef Client { get; private set; } public IQuery WithClient(IActorRef client) { @@ -40,7 +40,7 @@ public IQuery WithClient(IActorRef client) internal struct Select : IQuery { - public Select(DateTime deadline, Predicate predicate, IActorRef client = null) + public Select(TimeSpan deadline, Predicate predicate, IActorRef client = null) : this() { Deadline = deadline; @@ -48,7 +48,7 @@ public Select(DateTime deadline, Predicate predicate, IActorRef client = Client = client; } - public DateTime Deadline { get; private set; } + public TimeSpan Deadline { get; private set; } public Predicate Predicate { get; set; } public IActorRef Client { get; private set; } public IQuery WithClient(IActorRef client) @@ -319,7 +319,7 @@ public object ReceiveWhere(Predicate predicate) public object ReceiveWhere(Predicate predicate, TimeSpan timeout) { - var task = Receiver.Ask(new Select(DateTime.Now + timeout, predicate), Timeout.InfiniteTimeSpan); + var task = Receiver.Ask(new Select(_system.Scheduler.MonotonicClock + timeout, predicate), Timeout.InfiniteTimeSpan); return AwaitResult(task, timeout); } @@ -330,7 +330,7 @@ public Task ReceiveAsync() public Task ReceiveAsync(TimeSpan timeout) { - return Receiver.Ask(new Get(DateTime.Now + timeout), Timeout.InfiniteTimeSpan); + return Receiver.Ask(new Get(_system.Scheduler.MonotonicClock + timeout), Timeout.InfiniteTimeSpan); } public void Dispose() diff --git a/src/core/Akka/Event/LogEvent.cs b/src/core/Akka/Event/LogEvent.cs index 65c6c00e8ea..db890fde1de 100644 --- a/src/core/Akka/Event/LogEvent.cs +++ b/src/core/Akka/Event/LogEvent.cs @@ -21,7 +21,7 @@ public abstract class LogEvent : INoSerializationVerificationNeeded /// protected LogEvent() { - Timestamp = DateTime.Now; + Timestamp = DateTime.UtcNow; Thread = Thread.CurrentThread; } diff --git a/src/examples/TimeServer/TimeServer/Program.cs b/src/examples/TimeServer/TimeServer/Program.cs index 7be2a4a6418..2cbd6845d11 100644 --- a/src/examples/TimeServer/TimeServer/Program.cs +++ b/src/examples/TimeServer/TimeServer/Program.cs @@ -33,7 +33,7 @@ public void Handle(string message) { if (message.ToLowerInvariant() == "gettime") { - var time =DateTime.Now.ToLongTimeString(); + var time =DateTime.UtcNow.ToLongTimeString(); Sender.Tell(time, Self); } else From f47a609feb8a7222cd734a009d86d07a39a9dd89 Mon Sep 17 00:00:00 2001 From: Stefan Sedich Date: Sun, 24 May 2015 13:32:36 +1000 Subject: [PATCH 55/66] Fix order that the scheduler is initialized --- src/core/Akka/Actor/Internals/ActorSystemImpl.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/core/Akka/Actor/Internals/ActorSystemImpl.cs b/src/core/Akka/Actor/Internals/ActorSystemImpl.cs index a11b8513825..59af3222945 100644 --- a/src/core/Akka/Actor/Internals/ActorSystemImpl.cs +++ b/src/core/Akka/Actor/Internals/ActorSystemImpl.cs @@ -53,9 +53,9 @@ public ActorSystemImpl(string name, Config config) _name = name; ConfigureSettings(config); - ConfigureScheduler(); ConfigureEventStream(); ConfigureProvider(); + ConfigureScheduler(); ConfigureSerialization(); ConfigureMailboxes(); ConfigureDispatchers(); From cdebe96b85c329d56123a6a89f5a4629ea9165a2 Mon Sep 17 00:00:00 2001 From: Sean Gilliam Date: Tue, 26 May 2015 16:15:41 -0500 Subject: [PATCH 56/66] Fixed missing/boilerplate xmldoc comments Fixed missing/boilerplate xmldoc comments to the Akka.Configuration.HOCON classes --- .../Hocon/AkkaConfigurationSection.cs | 35 +++ .../Hocon/CDataConfigurationElement.cs | 25 +- .../Akka/Configuration/Hocon/HoconArray.cs | 41 ++- .../Hocon/HoconConfigurationElement.cs | 20 ++ .../Akka/Configuration/Hocon/HoconLiteral.cs | 40 +++ .../Akka/Configuration/Hocon/HoconObject.cs | 75 ++++++ .../Akka/Configuration/Hocon/HoconParser.cs | 61 ++--- .../Akka/Configuration/Hocon/HoconRoot.cs | 24 +- .../Configuration/Hocon/HoconSubstitution.cs | 50 ++-- .../Akka/Configuration/Hocon/HoconToken.cs | 69 ++--- .../Configuration/Hocon/HoconTokenizer.cs | 248 ++++++++---------- .../Akka/Configuration/Hocon/HoconValue.cs | 136 +++++++++- .../Akka/Configuration/Hocon/IHoconElement.cs | 34 ++- 13 files changed, 593 insertions(+), 265 deletions(-) diff --git a/src/core/Akka/Configuration/Hocon/AkkaConfigurationSection.cs b/src/core/Akka/Configuration/Hocon/AkkaConfigurationSection.cs index 29d646d3f4d..a4fe34e3f6b 100644 --- a/src/core/Akka/Configuration/Hocon/AkkaConfigurationSection.cs +++ b/src/core/Akka/Configuration/Hocon/AkkaConfigurationSection.cs @@ -9,16 +9,51 @@ namespace Akka.Configuration.Hocon { + /// + /// This class represents a custom akka node within a configuration file. + /// + /// + /// + /// + ///
+ /// + /// + /// ... + /// + /// + /// + ///
public class AkkaConfigurationSection : ConfigurationSection { private const string ConfigurationPropertyName = "hocon"; private Config _akkaConfig; + /// + /// Retrieves a from the contents of the + /// custom akka node within a configuration file. + /// public Config AkkaConfig { get { return _akkaConfig ?? (_akkaConfig = ConfigurationFactory.ParseString(Hocon.Content)); } } + /// + /// Retrieves the HOCON (Human-Optimized Config Object Notation) + /// configuration string from the custom akka node. + /// + /// + /// + /// + ///
+ /// + /// + /// + /// ... + /// + /// + /// + /// + ///
[ConfigurationProperty(ConfigurationPropertyName, IsRequired = true)] public HoconConfigurationElement Hocon { diff --git a/src/core/Akka/Configuration/Hocon/CDataConfigurationElement.cs b/src/core/Akka/Configuration/Hocon/CDataConfigurationElement.cs index 93ca5a5cf11..e2600323545 100644 --- a/src/core/Akka/Configuration/Hocon/CDataConfigurationElement.cs +++ b/src/core/Akka/Configuration/Hocon/CDataConfigurationElement.cs @@ -10,10 +10,34 @@ namespace Akka.Configuration.Hocon { + /// + /// This class represents the base implementation for retrieving text from + /// an XML CDATA node within a configuration file. + /// + /// + /// + /// + ///
+ /// + /// + /// + /// + /// + /// + /// + /// + ///
public abstract class CDataConfigurationElement : ConfigurationElement { protected const string ContentPropertyName = "content"; + /// + /// Deserializes the text located in a CDATA node of the configuration file. + /// + /// The that reads from the configuration file. + /// true to serialize only the collection key properties; otherwise, false. protected override void DeserializeElement(XmlReader reader, bool serializeCollectionKey) { foreach (ConfigurationProperty configurationProperty in Properties) @@ -34,4 +58,3 @@ protected override void DeserializeElement(XmlReader reader, bool serializeColle } } } - diff --git a/src/core/Akka/Configuration/Hocon/HoconArray.cs b/src/core/Akka/Configuration/Hocon/HoconArray.cs index 7538ca7b53b..9d373e9ed29 100644 --- a/src/core/Akka/Configuration/Hocon/HoconArray.cs +++ b/src/core/Akka/Configuration/Hocon/HoconArray.cs @@ -11,51 +11,68 @@ namespace Akka.Configuration.Hocon { /// - /// Class HoconArray. + /// This class represents an array element in a HOCON (Human-Optimized Config Object Notation) + /// configuration string. + /// + /// akka { + /// cluster { + /// seed-nodes = [ + /// "akka.tcp://ClusterSystem@127.0.0.1:2551", + /// "akka.tcp://ClusterSystem@127.0.0.1:2552"] + /// } + /// } + /// /// public class HoconArray : List, IHoconElement { /// - /// Determines whether this instance is string. + /// Determines whether this element is a string. /// - /// true if this instance is string; otherwise, false. + /// false public bool IsString() { return false; } /// - /// Gets the string. + /// Retrieves the string representation of this element. /// - /// System.String. - /// + /// + /// The string representation of this element. + /// + /// + /// This element is an array. It is not a string. + /// Therefore this method will throw an exception. + /// public string GetString() { throw new NotImplementedException(); } /// - /// Determines whether this instance is array. + /// Determines whether this element is an array. /// - /// true if this instance is array; otherwise, false. + /// true public bool IsArray() { return true; } /// - /// Gets the array. + /// Retrieves a list of elements associated with this element. /// - /// IList<HoconValue>. + /// + /// A list of elements associated with this element. + /// public IList GetArray() { return this; } /// - /// Returns a that represents this instance. + /// Returns a HOCON string representation of this element. /// - /// A that represents this instance. + /// A HOCON string representation of this element. public override string ToString() { return "[" + string.Join(",", this) + "]"; diff --git a/src/core/Akka/Configuration/Hocon/HoconConfigurationElement.cs b/src/core/Akka/Configuration/Hocon/HoconConfigurationElement.cs index d4f0a7c7f24..3b637b79778 100644 --- a/src/core/Akka/Configuration/Hocon/HoconConfigurationElement.cs +++ b/src/core/Akka/Configuration/Hocon/HoconConfigurationElement.cs @@ -9,8 +9,28 @@ namespace Akka.Configuration.Hocon { + /// + /// This class represents a custom HOCON (Human-Optimized Config Object Notation) + /// node within a configuration file. + /// + /// + /// + /// + ///
+ /// + /// + /// + /// ... + /// + /// + /// + /// + ///
public class HoconConfigurationElement : CDataConfigurationElement { + /// + /// Gets or sets the HOCON configuration string contained in the hocon node. + /// [ConfigurationProperty(ContentPropertyName, IsRequired = true, IsKey = true)] public string Content { diff --git a/src/core/Akka/Configuration/Hocon/HoconLiteral.cs b/src/core/Akka/Configuration/Hocon/HoconLiteral.cs index e13e893b4d6..cd6a9cb7f02 100644 --- a/src/core/Akka/Configuration/Hocon/HoconLiteral.cs +++ b/src/core/Akka/Configuration/Hocon/HoconLiteral.cs @@ -10,30 +10,70 @@ namespace Akka.Configuration.Hocon { + /// + /// This class represents a string literal element in a HOCON (Human-Optimized Config Object Notation) + /// configuration string. + /// + /// akka { + /// actor { + /// provider = "Akka.Remote.RemoteActorRefProvider, Akka.Remote" + /// } + /// } + /// + /// public class HoconLiteral : IHoconElement { + /// + /// Gets or sets the value of this element. + /// public string Value { get; set; } + /// + /// Determines whether this element is a string. + /// + /// true public bool IsString() { return true; } + /// + /// Retrieves the string representation of this element. + /// + /// The value of this element. public string GetString() { return Value; } + /// + /// Determines whether this element is an array. + /// + /// false public bool IsArray() { return false; } + /// + /// Retrieves a list of elements associated with this element. + /// + /// + /// A list of elements associated with this element. + /// + /// + /// This element is a string literal. It is not an array. + /// Therefore this method will throw an exception. + /// public IList GetArray() { throw new NotImplementedException(); } + /// + /// Returns the string representation of this element. + /// + /// The value of this element. public override string ToString() { return Value; diff --git a/src/core/Akka/Configuration/Hocon/HoconObject.cs b/src/core/Akka/Configuration/Hocon/HoconObject.cs index e0897789fc7..c81f319f87c 100644 --- a/src/core/Akka/Configuration/Hocon/HoconObject.cs +++ b/src/core/Akka/Configuration/Hocon/HoconObject.cs @@ -13,13 +13,37 @@ namespace Akka.Configuration.Hocon { + /// + /// This class represents an object element in a HOCON (Human-Optimized Config Object Notation) + /// configuration string. + /// + /// akka { + /// actor { + /// debug { + /// receive = on + /// autoreceive = on + /// lifecycle = on + /// event-stream = on + /// unhandled = on + /// } + /// } + /// } + /// + /// public class HoconObject : IHoconElement { + /// + /// Initializes a new instance of the class. + /// public HoconObject() { Items = new Dictionary(); } + /// + /// Retrieves the underlying map that contains the barebones + /// object values. + /// [JsonIgnore] public IDictionary Unwrapped { @@ -35,28 +59,63 @@ public IDictionary Unwrapped } } + /// + /// Retrieves the underlying map that this element is based on. + /// public Dictionary Items { get; private set; } + /// + /// Determines whether this element is a string. + /// + /// false public bool IsString() { return false; } + /// + /// Retrieves the string representation of this element. + /// + /// The string representation of this element. + /// + /// This element is an object. It is not a string. + /// Therefore this method will throw an exception. + /// public string GetString() { throw new NotImplementedException(); } + /// + /// Determines whether this element is an array. + /// + /// false public bool IsArray() { return false; } + /// + /// Retrieves a list of elements associated with this element. + /// + /// A list of elements associated with this element. + /// + /// This element is an object. It is not an array. + /// Therefore this method will throw an exception. + /// public IList GetArray() { throw new NotImplementedException(); } + /// + /// Retrieves the value associated with the supplied key. + /// + /// The key associated with the value to retrieve. + /// + /// The value associated with the supplied key or null + /// if they key does not exist. + /// public HoconValue GetKey(string key) { if (Items.ContainsKey(key)) @@ -66,6 +125,13 @@ public HoconValue GetKey(string key) return null; } + /// + /// Retrieves the value associated with the supplied key. + /// If the supplied key is not found, then one is created + /// with a blank value. + /// + /// The key associated with the value to retrieve. + /// The value associated with the supplied key. public HoconValue GetOrCreateKey(string key) { if (Items.ContainsKey(key)) @@ -77,11 +143,20 @@ public HoconValue GetOrCreateKey(string key) return child; } + /// + /// Returns a HOCON string representation of this element. + /// + /// A HOCON string representation of this element. public override string ToString() { return ToString(0); } + /// + /// Returns a HOCON string representation of this element. + /// + /// The number of spaces to indent the string. + /// A HOCON string representation of this element. public string ToString(int indent) { var i = new string(' ', indent*2); diff --git a/src/core/Akka/Configuration/Hocon/HoconParser.cs b/src/core/Akka/Configuration/Hocon/HoconParser.cs index 4b75aa9f815..4baf6f47aae 100644 --- a/src/core/Akka/Configuration/Hocon/HoconParser.cs +++ b/src/core/Akka/Configuration/Hocon/HoconParser.cs @@ -12,41 +12,30 @@ namespace Akka.Configuration.Hocon { /// - /// Class Parser. + /// This class contains methods used to parse HOCON (Human-Optimized Config Object Notation) + /// configuration strings. /// public class Parser { - /// - /// The substitutions - /// private readonly List _substitutions = new List(); - - /// - /// The reader - /// private HoconTokenizer _reader; - - /// - /// The root - /// private HoconValue _root; /// - /// Parses the specified text. + /// Parses the supplied HOCON configuration string into a root element. /// - /// The text. - /// HoconValue. + /// The string that contains a HOCON configuration string. + /// The root element created from the supplied HOCON configuration string. + /// + /// This exception is thrown when an unresolved substitution is encountered. + /// It also occurs when the end of the file has been reached while trying + /// to read a value. + /// public static HoconRoot Parse(string text) { return new Parser().ParseText(text); } - /// - /// Parses the text. - /// - /// The text. - /// HoconValue. - /// Unresolved substitution: + sub.Path private HoconRoot ParseText(string text) { _root = new HoconValue(); @@ -65,11 +54,6 @@ private HoconRoot ParseText(string text) return new HoconRoot(_root, _substitutions); } - /// - /// Parses the object. - /// - /// The owner. - /// if set to true [root]. private void ParseObject(HoconValue owner, bool root) { if (owner.IsObject()) @@ -104,10 +88,6 @@ private void ParseObject(HoconValue owner, bool root) } } - /// - /// Parses the content of the key. - /// - /// The value. private void ParseKeyContent(HoconValue value) { while (!_reader.EoF) @@ -136,9 +116,10 @@ private void ParseKeyContent(HoconValue value) } /// - /// Parses the value. + /// Retrieves the next value token from the tokenizer and appends it + /// to the supplied element . /// - /// The owner. + /// The element to append the next token. /// End of file reached while trying to read a value public void ParseValue(HoconValue owner) { @@ -189,10 +170,6 @@ public void ParseValue(HoconValue owner) IgnoreComma(); } - /// - /// Parses the trailing whitespace. - /// - /// The owner. private void ParseTrailingWhitespace(HoconValue owner) { Token ws = _reader.PullSpaceOrTab(); @@ -207,20 +184,15 @@ private void ParseTrailingWhitespace(HoconValue owner) } } - /// - /// Parses the substitution. - /// - /// The value. - /// HoconSubstitution. private static HoconSubstitution ParseSubstitution(string value) { return new HoconSubstitution(value); } /// - /// Parses the array. + /// Retrieves the next array token from the tokenizer. /// - /// HoconArray. + /// An array of elements retrieved from the token. public HoconArray ParseArray() { var arr = new HoconArray(); @@ -235,9 +207,6 @@ public HoconArray ParseArray() return arr; } - /// - /// Ignores the comma. - /// private void IgnoreComma() { if (_reader.IsComma()) //optional end of value diff --git a/src/core/Akka/Configuration/Hocon/HoconRoot.cs b/src/core/Akka/Configuration/Hocon/HoconRoot.cs index a9832f66434..6b34842e9a7 100644 --- a/src/core/Akka/Configuration/Hocon/HoconRoot.cs +++ b/src/core/Akka/Configuration/Hocon/HoconRoot.cs @@ -10,25 +10,47 @@ namespace Akka.Configuration.Hocon { + /// + /// This class represents the root element in a HOCON (Human-Optimized Config Object Notation) + /// configuration string. + /// public class HoconRoot { + /// + /// Initializes a new instance of the class. + /// protected HoconRoot() - { + { } + /// + /// Initializes a new instance of the class. + /// + /// The value to associate with this element. + /// An enumeration of substitutions to associate with this element. public HoconRoot(HoconValue value, IEnumerable substitutions) { Value = value; Substitutions = substitutions; } + /// + /// Initializes a new instance of the class. + /// + /// The value to associate with this element. public HoconRoot(HoconValue value) { Value = value; Substitutions = Enumerable.Empty(); } + /// + /// Retrieves the value associated with this element. + /// public HoconValue Value { get; private set; } + /// + /// Retrieves an enumeration of substitutions associated with this element. + /// public IEnumerable Substitutions { get; private set; } } } diff --git a/src/core/Akka/Configuration/Hocon/HoconSubstitution.cs b/src/core/Akka/Configuration/Hocon/HoconSubstitution.cs index 1d34a9f2cf6..0492b15d43e 100644 --- a/src/core/Akka/Configuration/Hocon/HoconSubstitution.cs +++ b/src/core/Akka/Configuration/Hocon/HoconSubstitution.cs @@ -10,12 +10,26 @@ namespace Akka.Configuration.Hocon { /// - /// HOCON Substitution, e.g. $foo.bar + /// This class represents a substitution element in a HOCON (Human-Optimized Config Object Notation) + /// configuration string. + /// + /// akka { + /// defaultInstances = 10 + /// deployment{ + /// /user/time{ + /// nr-of-instances = $defaultInstances + /// } + /// } + /// } + /// /// public class HoconSubstitution : IHoconElement, IMightBeAHoconObject { + /// + /// Initializes a new instance of the class. + /// protected HoconSubstitution() - { + { } /// @@ -30,64 +44,62 @@ public HoconSubstitution(string path) /// /// The full path to the value which should substitute this instance. /// - /// The path. public string Path { get; private set; } /// /// The evaluated value from the Path property /// - /// The resolved value. public HoconValue ResolvedValue { get; set; } /// - /// Determines whether this instance is string. + /// Determines whether this element is a string. /// - /// true if this instance is string; otherwise, false. + /// true if this element is a string; otherwise false public bool IsString() { return ResolvedValue.IsString(); } /// - /// Returns the value of this instance as a string. + /// Retrieves the string representation of this element. /// - /// System.String. + /// The string representation of this element. public string GetString() { return ResolvedValue.GetString(); } /// - /// Determines whether this instance is array. + /// Determines whether this element is an array. /// - /// true if this instance is array; otherwise, false. + /// true if this element is aan array; otherwise false public bool IsArray() { return ResolvedValue.IsArray(); } /// - /// Returns the value of this instance as an array. + /// Retrieves a list of elements associated with this element. /// - /// IList<HoconValue>. + /// A list of elements associated with this element. public IList GetArray() { return ResolvedValue.GetArray(); } /// - /// Determines whether this instance is an HOCON object. + /// Determines whether this element is a HOCON object. /// - /// true if this instance is object; otherwise, false. + /// true if this element is a HOCON object; otherwise false public bool IsObject() { return ResolvedValue != null && ResolvedValue.IsObject(); } /// - /// Returns the value of this instance as an HOCON object. + /// Retrieves the HOCON object representation of this element. /// - /// HoconObject. + /// The HOCON object representation of this element. public HoconObject GetObject() { return ResolvedValue.GetObject(); @@ -96,10 +108,10 @@ public HoconObject GetObject() #region Implicit operators /// - /// Performs an implicit conversion from to . + /// Performs an implicit conversion from to . /// - /// The substitution. - /// The result of the conversion. + /// The HOCON object that contains the substitution. + /// The HOCON object contained in the substitution. public static implicit operator HoconObject(HoconSubstitution substitution) { return substitution.GetObject(); diff --git a/src/core/Akka/Configuration/Hocon/HoconToken.cs b/src/core/Akka/Configuration/Hocon/HoconToken.cs index fda5af55456..0cd1a23db5f 100644 --- a/src/core/Akka/Configuration/Hocon/HoconToken.cs +++ b/src/core/Akka/Configuration/Hocon/HoconToken.cs @@ -8,96 +8,98 @@ namespace Akka.Configuration.Hocon { /// - /// Enum TokenType + /// This enumeration defines the different types of tokens found within + /// a HOCON (Human-Optimized Config Object Notation) configuration string. /// public enum TokenType { /// - /// The comment + /// This token type represents a comment. /// Comment, /// - /// The key + /// This token type represents the key portion of a key-value pair. /// Key, /// - /// The literal value + /// This token type represents the value portion of a key-value pair. /// LiteralValue, /// - /// The assign + /// This token type represents the assignment operator, = or : . /// Assign, /// - /// The object start + /// This token type represents the beginning of an object, { . /// ObjectStart, /// - /// The object end + /// This token type represents the end of an object, } . /// ObjectEnd, /// - /// The dot + /// This token type represents a namespace separator, . . /// Dot, /// - /// The eo f + /// This token type represents the end of the configuration string. /// EoF, /// - /// The array start + /// This token type represents the beginning of an array, [ . /// ArrayStart, /// - /// The array end + /// This token type represents the end of an array, ] . /// ArrayEnd, /// - /// The comma + /// This token type represents the separator in an array, , . /// Comma, /// - /// The substitute + /// This token type represents a replacement variable, $foo . /// - Substitute, + Substitute } /// - /// Class Token. + /// This class represents a token within a HOCON (Human-Optimized Config Object Notation) + /// configuration string. /// public class Token { /// - /// Initializes a new instance of the class. + /// Initializes a new instance of the class. /// protected Token() { } /// - /// Initializes a new instance of the class. + /// Initializes a new instance of the class. /// - /// The type. + /// The type of token to associate with. public Token(TokenType type) { Type = type; } /// - /// Initializes a new instance of the class. + /// Initializes a new instance of the class. /// - /// The value. + /// The string literal value to associate with this token. public Token(string value) { Type = TokenType.LiteralValue; @@ -105,22 +107,22 @@ public Token(string value) } /// - /// If this instance is a LiteralValue, the Value property holds the string literal. + /// The value associated with this token. If this token is + /// a , then this property + /// holds the string literal. /// - /// The value. public string Value { get; set; } /// - /// The type of the token. + /// The type that represents this token. /// - /// The type. public TokenType Type { get; set; } /// - /// Creates a Key token. + /// Creates a key token with a given . /// - /// The key. - /// Token. + /// The key to associate with this token. + /// A key token with the given key. public static Token Key(string key) { return new Token @@ -131,10 +133,10 @@ public static Token Key(string key) } /// - /// Creates a Substitution token with a given Path + /// Creates a substitution token with a given . /// - /// The path. - /// Token. + /// The path to associate with this token. + /// A substitution token with the given path. public static Token Substitution(string path) { return new Token @@ -145,10 +147,10 @@ public static Token Substitution(string path) } /// - /// Creates a string Literal token. + /// Creates a string literal token with a given . /// - /// The value. - /// Token. + /// The value to associate with this token. + /// A string literal token with the given value. public static Token LiteralValue(string value) { return new Token @@ -159,4 +161,3 @@ public static Token LiteralValue(string value) } } } - diff --git a/src/core/Akka/Configuration/Hocon/HoconTokenizer.cs b/src/core/Akka/Configuration/Hocon/HoconTokenizer.cs index f7c5fd85155..b58e4d5045c 100644 --- a/src/core/Akka/Configuration/Hocon/HoconTokenizer.cs +++ b/src/core/Akka/Configuration/Hocon/HoconTokenizer.cs @@ -13,43 +13,36 @@ namespace Akka.Configuration.Hocon { /// - /// Class Tokenizer. + /// This class contains methods used to tokenize a string. /// public class Tokenizer { - /// - /// The text - /// private readonly string _text; - - /// - /// The index - /// private int _index; /// - /// Initializes a new instance of the class. + /// Initializes a new instance of the class. /// - /// The text. + /// The string that contains the text to tokenize. public Tokenizer(string text) { this._text = text; } /// - /// Gets a value indicating whether [eof]. + /// A value indicating whether the tokenizer has reached the end of the string. /// - /// true if [eof]; otherwise, false. public bool EoF { get { return _index >= _text.Length; } } /// - /// Matches the specified pattern. + /// Determines whether the given pattern matches the value at the current + /// position of the tokenizer. /// - /// The pattern. - /// true if XXXX, false otherwise. + /// The string that contains the characters to match. + /// true if the pattern matches, otherwise false. public bool Matches(string pattern) { if (pattern.Length + _index > _text.Length) @@ -64,10 +57,13 @@ public bool Matches(string pattern) } /// - /// Takes the specified length. + /// Retrieves a string of the given length from the current position of the tokenizer. /// - /// The length. - /// System.String. + /// The length of the string to return. + /// + /// The string of the given length. If the length exceeds where the + /// current index is located, then null is returned. + /// public string Take(int length) { if (_index + length > _text.Length) @@ -79,10 +75,11 @@ public string Take(int length) } /// - /// Matches the specified patterns. + /// Determines whether any of the given patterns match the value at the current + /// position of the tokenizer. /// - /// The patterns. - /// true if XXXX, false otherwise. + /// The string array that contains the characters to match. + /// true if any one of the patterns match, otherwise false. public bool Matches(params string[] patterns) { foreach (string pattern in patterns) @@ -97,9 +94,9 @@ public bool Matches(params string[] patterns) } /// - /// Peeks this instance. + /// Retrieves the next character in the tokenizer without advancing its position. /// - /// System.Char. + /// The character at the tokenizer's current position. public char Peek() { if (EoF) @@ -109,9 +106,9 @@ public char Peek() } /// - /// Takes this instance. + /// Retrieves the next character in the tokenizer. /// - /// System.Char. + /// The character at the tokenizer's current position. public char Take() { if (EoF) @@ -121,7 +118,7 @@ public char Take() } /// - /// Pulls the whitespace. + /// Advances the tokenizer to the next non-whitespace character. /// public void PullWhitespace() { @@ -134,31 +131,25 @@ public void PullWhitespace() /// - /// Class HoconTokenizer. + /// This class contains methods used to tokenize HOCON (Human-Optimized Config Object Notation) + /// configuration strings. /// public class HoconTokenizer : Tokenizer { - /// - /// The not in unquoted key - /// private const string NotInUnquotedKey = "$\"{}[]:=,#`^?!@*&\\."; - - /// - /// The not in unquoted text - /// private const string NotInUnquotedText = "$\"{}[]:=,#`^?!@*&\\"; /// - /// Initializes a new instance of the class. + /// Initializes a new instance of the class. /// - /// The text. + /// The string that contains the text to tokenize. public HoconTokenizer(string text) : base(text) { } /// - /// Pulls the whitespace and comments. + /// Advances the tokenizer to the next non-whitespace, non-comment token. /// public void PullWhitespaceAndComments() { @@ -173,9 +164,10 @@ public void PullWhitespaceAndComments() } /// - /// Pulls the rest of line. + /// Retrieves the current line from where the current token + /// is located in the string. /// - /// System.String. + /// The current line from where the current token is located. public string PullRestOfLine() { var sb = new StringBuilder(); @@ -195,10 +187,12 @@ public string PullRestOfLine() } /// - /// Pulls the next. + /// Retrieves the next token from the string. /// - /// Token. - /// unknown token + /// The next token contained in the string. + /// + /// This exception is thrown when an unknown token is encountered. + /// public Token PullNext() { PullWhitespaceAndComments(); @@ -241,19 +235,15 @@ public Token PullNext() throw new Exception("unknown token"); } - /// - /// Determines whether [is start of quoted key]. - /// - /// true if [is start of quoted key]; otherwise, false. private bool IsStartOfQuotedKey() { return Matches("\""); } /// - /// Pulls the array end. + /// Retrieves a token from the tokenizer's current position. /// - /// Token. + /// A token from the tokenizer's current position. public Token PullArrayEnd() { Take(); @@ -261,27 +251,27 @@ public Token PullArrayEnd() } /// - /// Determines whether [is array end]. + /// Determines whether the current token matches an token. /// - /// true if [is array end]; otherwise, false. + /// true if the token matches; otherwise, false. public bool IsArrayEnd() { return Matches("]"); } /// - /// Determines whether [is array start]. + /// Determines whether the current token matches an token. /// - /// true if [is array start]; otherwise, false. + /// true if the token matches; otherwise, false. public bool IsArrayStart() { return Matches("["); } /// - /// Pulls the array start. + /// Retrieves a token from the tokenizer's current position. /// - /// Token. + /// A token from the tokenizer's current position. public Token PullArrayStart() { Take(); @@ -289,9 +279,9 @@ public Token PullArrayStart() } /// - /// Pulls the dot. + /// Retrieves a token from the tokenizer's current position. /// - /// Token. + /// A token from the tokenizer's current position. public Token PullDot() { Take(); @@ -299,9 +289,9 @@ public Token PullDot() } /// - /// Pulls the comma. + /// Retrieves a token from the tokenizer's current position. /// - /// Token. + /// A token from the tokenizer's current position. public Token PullComma() { Take(); @@ -309,9 +299,9 @@ public Token PullComma() } /// - /// Pulls the start of object. + /// Retrieves a token from the tokenizer's current position. /// - /// Token. + /// A token from the tokenizer's current position. public Token PullStartOfObject() { Take(); @@ -319,9 +309,9 @@ public Token PullStartOfObject() } /// - /// Pulls the end of object. + /// Retrieves a token from the tokenizer's current position. /// - /// Token. + /// A token from the tokenizer's current position. public Token PullEndOfObject() { Take(); @@ -329,9 +319,9 @@ public Token PullEndOfObject() } /// - /// Pulls the assignment. + /// Retrieves a token from the tokenizer's current position. /// - /// Token. + /// A token from the tokenizer's current position. public Token PullAssignment() { Take(); @@ -339,72 +329,72 @@ public Token PullAssignment() } /// - /// Determines whether this instance is comma. + /// Determines whether the current token matches an token. /// - /// true if this instance is comma; otherwise, false. + /// true if the token matches; otherwise, false. public bool IsComma() { return Matches(","); } /// - /// Determines whether this instance is dot. + /// Determines whether the current token matches an token. /// - /// true if this instance is dot; otherwise, false. + /// true if the token matches; otherwise, false. public bool IsDot() { return Matches("."); } /// - /// Determines whether [is object start]. + /// Determines whether the current token matches an token. /// - /// true if [is object start]; otherwise, false. + /// true if the token matches; otherwise, false. public bool IsObjectStart() { return Matches("{"); } /// - /// Determines whether [is end of object]. + /// Determines whether the current token matches an token. /// - /// true if [is end of object]; otherwise, false. + /// true if the token matches; otherwise, false. public bool IsEndOfObject() { return Matches("}"); } /// - /// Determines whether this instance is assignment. + /// Determines whether the current token matches an token. /// - /// true if this instance is assignment; otherwise, false. + /// true if the token matches; otherwise, false. public bool IsAssignment() { return Matches("=", ":"); } /// - /// Determines whether [is start of quoted text]. + /// Determines whether the current token matches the start of a quoted string. /// - /// true if [is start of quoted text]; otherwise, false. + /// true if token matches; otherwise, false. public bool IsStartOfQuotedText() { return Matches("\""); } /// - /// Determines whether [is start of triple quoted text]. + /// Determines whether the current token matches the start of a triple quoted string. /// - /// true if [is start of triple quoted text]; otherwise, false. + /// true if token matches; otherwise, false. public bool IsStartOfTripleQuotedText() { return Matches("\"\"\""); } /// - /// Pulls the comment. + /// Retrieves a token from the tokenizer's current position. /// - /// Token. + /// A token from the tokenizer's current position. public Token PullComment() { PullRestOfLine(); @@ -412,9 +402,9 @@ public Token PullComment() } /// - /// Pulls the unquoted key. + /// Retrieves an unquoted token from the tokenizer's current position. /// - /// Token. + /// A token from the tokenizer's current position. public Token PullUnquotedKey() { var sb = new StringBuilder(); @@ -427,36 +417,32 @@ public Token PullUnquotedKey() } /// - /// Determines whether [is unquoted key]. + /// Determines whether the current token is an unquoted key. /// - /// true if [is unquoted key]; otherwise, false. + /// true if token is an unquoted key; otherwise, false. public bool IsUnquotedKey() { return (!EoF && !IsStartOfComment() && !NotInUnquotedKey.Contains(Peek())); } /// - /// Determines whether [is unquoted key start]. + /// Determines whether the current token is the start of an unquoted key. /// - /// true if [is unquoted key start]; otherwise, false. + /// true if token is the start of an unquoted key; otherwise, false. public bool IsUnquotedKeyStart() { return (!EoF && !IsWhitespace() && !IsStartOfComment() && !NotInUnquotedKey.Contains(Peek())); } - /// - /// Determines whether this instance is whitespace. - /// - /// true if this instance is whitespace; otherwise, false. private bool IsWhitespace() { return char.IsWhiteSpace(Peek()); } /// - /// Pulls the triple quoted text. + /// Retrieves a triple quoted token from the tokenizer's current position. /// - /// Token. + /// A token from the tokenizer's current position. public Token PullTripleQuotedText() { var sb = new StringBuilder(); @@ -478,9 +464,9 @@ public Token PullTripleQuotedText() } /// - /// Pulls the quoted text. + /// Retrieves a quoted token from the tokenizer's current position. /// - /// Token. + /// A token from the tokenizer's current position. public Token PullQuotedText() { var sb = new StringBuilder(); @@ -502,9 +488,9 @@ public Token PullQuotedText() } /// - /// Pulls the quoted key. + /// Retrieves a quoted token from the tokenizer's current position. /// - /// Token. + /// A token from the tokenizer's current position. public Token PullQuotedKey() { var sb = new StringBuilder(); @@ -525,11 +511,6 @@ public Token PullQuotedKey() return Token.Key(sb.ToString()); } - /// - /// Pulls the escape sequence. - /// - /// System.String. - /// private string PullEscapeSequence() { Take(); //consume "\" @@ -561,21 +542,17 @@ private string PullEscapeSequence() } } - /// - /// Determines whether [is start of comment]. - /// - /// true if [is start of comment]; otherwise, false. private bool IsStartOfComment() { return (Matches("#", "//")); } /// - /// Pulls the value. + /// Retrieves a value token from the tokenizer's current position. /// - /// Token. + /// A value token from the tokenizer's current position. /// - /// Expected value: Null literal, Array, Number, Boolean, Quoted Text, Unquoted Text, + /// Expected value: Null literal, Array, Quoted Text, Unquoted Text, /// Triple quoted Text, Object or End of array /// public Token PullValue() @@ -613,22 +590,22 @@ public Token PullValue() } throw new Exception( - "Expected value: Null literal, Array, Number, Boolean, Quoted Text, Unquoted Text, Triple quoted Text, Object or End of array"); + "Expected value: Null literal, Array, Quoted Text, Unquoted Text, Triple quoted Text, Object or End of array"); } /// - /// Determines whether [is substitution start]. + /// Determines whether the current token is the start of a substitution. /// - /// true if [is substitution start]; otherwise, false. + /// true if token is the start of a substitution; otherwise, false. public bool IsSubstitutionStart() { return Matches("${"); } /// - /// Pulls the substitution. + /// Retrieves a token from the tokenizer's current position. /// - /// Token. + /// A token from the tokenizer's current position. public Token PullSubstitution() { var sb = new StringBuilder(); @@ -641,30 +618,19 @@ public Token PullSubstitution() return Token.Substitution(sb.ToString()); } - //public Token PullNextTrailingValue() - //{ - // PullSpaceOrTab(); - - //} - /// - /// Determines whether [is space or tab]. + /// Determines whether the current token is a space or a tab. /// - /// true if [is space or tab]; otherwise, false. + /// true if token is the start of a space or a tab; otherwise, false. public bool IsSpaceOrTab() { return Matches(" ", "\t"); } - //private bool IsStartNumber() - //{ - // return Matches("-", "+") || char.IsDigit(Peek()); - // } - /// - /// Determines whether [is start simple value]. + /// Determines whether the current token is the start of an unquoted string literal. /// - /// true if [is start simple value]; otherwise, false. + /// true if token is the start of an unquoted string literal; otherwise, false. public bool IsStartSimpleValue() { if (IsSpaceOrTab()) @@ -677,9 +643,9 @@ public bool IsStartSimpleValue() } /// - /// Pulls the space or tab. + /// Retrieves the current token, including whitespace and tabs, as a string literal token. /// - /// Token. + /// A token that contains the string literal value. public Token PullSpaceOrTab() { var sb = new StringBuilder(); @@ -690,10 +656,6 @@ public Token PullSpaceOrTab() return Token.LiteralValue(sb.ToString()); } - /// - /// Pulls the unquoted text. - /// - /// Token. private Token PullUnquotedText() { var sb = new StringBuilder(); @@ -705,20 +667,19 @@ private Token PullUnquotedText() return Token.LiteralValue(sb.ToString()); } - /// - /// Determines whether [is unquoted text]. - /// - /// true if [is unquoted text]; otherwise, false. private bool IsUnquotedText() { return (!EoF && !IsWhitespace() && !IsStartOfComment() && !NotInUnquotedText.Contains(Peek())); } /// - /// Pulls the simple value. + /// Retrieves the current token as a string literal token. /// - /// Token. - /// No simple value found + /// A token that contains the string literal value. + /// + /// This exception is thrown when the tokenizer cannot find + /// a string literal value from the current token. + /// public Token PullSimpleValue() { if (IsSpaceOrTab()) @@ -730,9 +691,9 @@ public Token PullSimpleValue() } /// - /// Determines whether this instance is value. + /// Determines whether the current token is a value. /// - /// true if this instance is value; otherwise, false. + /// true if the current token is a value; otherwise, false. internal bool IsValue() { if (IsArrayStart()) @@ -752,4 +713,3 @@ internal bool IsValue() } } } - diff --git a/src/core/Akka/Configuration/Hocon/HoconValue.cs b/src/core/Akka/Configuration/Hocon/HoconValue.cs index 35085424e5d..76cf9770cca 100644 --- a/src/core/Akka/Configuration/Hocon/HoconValue.cs +++ b/src/core/Akka/Configuration/Hocon/HoconValue.cs @@ -14,12 +14,12 @@ namespace Akka.Configuration.Hocon { /// - /// Root type of HOCON configuration object - /// + /// This class represents the root type for a HOCON (Human-Optimized Config Object Notation) + /// configuration object. public class HoconValue : IMightBeAHoconObject { /// - /// Default constructor + /// Initializes a new instance of the class. /// public HoconValue() { @@ -42,6 +42,8 @@ public bool IsEmpty /// /// Wraps this into a new object at the specified key. /// + /// The key designated to be the new root element. + /// A with the given key as the root element. public Config AtKey(string key) { var o = new HoconObject(); @@ -52,6 +54,10 @@ public Config AtKey(string key) return new Config(new HoconRoot(r)); } + /// + /// Retrieves the from this . + /// + /// The that represents this . public HoconObject GetObject() { //TODO: merge objects? @@ -64,30 +70,49 @@ public HoconObject GetObject() } /// - /// Determines if this is a + /// Determines if this is a . /// - /// true if this value is a HOCON object, false otherwise. + /// true if this value is a , false otherwise. public bool IsObject() { return GetObject() != null; } + /// + /// Adds the given element to the list of elements inside this . + /// + /// The element to add to the list. public void AppendValue(IHoconElement value) { Values.Add(value); } + /// + /// Clears the list of elements inside this . + /// public void Clear() { Values.Clear(); } + /// + /// Creates a fresh list of elements inside this + /// and adds the given value to the list. + /// + /// The element to add to the list. public void NewValue(IHoconElement value) { Values.Clear(); Values.Add(value); } + /// + /// Determines whether all the elements inside this + /// are a string. + /// + /// + /// trueif all elements inside this are a string; otherwise false. + /// public bool IsString() { return Values.Any() && Values.All(v => v.IsString()); @@ -103,11 +128,24 @@ private string ConcatString() return concat; } + /// + /// Retrieves the child object located at the given key. + /// + /// The key used to retrieve the child object. + /// The element at the given key. public HoconValue GetChildObject(string key) { return GetObject().GetKey(key); } + /// + /// Retrieves the boolean value from this . + /// + /// The boolean value represented by this . + /// + /// This exception occurs when the doesn't + /// conform to the standard boolean values: "on", "off", "true", or "false" + /// public bool GetBoolean() { string v = GetString(); @@ -126,6 +164,10 @@ public bool GetBoolean() } } + /// + /// Retrieves the string value from this . + /// + /// The string value represented by this . public string GetString() { if (IsString()) @@ -135,76 +177,136 @@ public string GetString() return null; //TODO: throw exception? } + /// + /// Retrieves the decimal value from this . + /// + /// The decimal value represented by this . public decimal GetDecimal() { return decimal.Parse(GetString(), NumberFormatInfo.InvariantInfo); } + /// + /// Retrieves the float value from this . + /// + /// The float value represented by this . public float GetFloat() { return float.Parse(GetString(), NumberFormatInfo.InvariantInfo); } + /// + /// Retrieves the double value from this . + /// + /// The double value represented by this . public double GetDouble() { return double.Parse(GetString(), NumberFormatInfo.InvariantInfo); } + /// + /// Retrieves the long value from this . + /// + /// The long value represented by this . public long GetLong() { return long.Parse(GetString(), NumberFormatInfo.InvariantInfo); } + /// + /// Retrieves the integer value from this . + /// + /// The integer value represented by this . public int GetInt() { return int.Parse(GetString(), NumberFormatInfo.InvariantInfo); } + /// + /// Retrieves the byte value from this . + /// + /// The byte value represented by this . public byte GetByte() { return byte.Parse(GetString(), NumberFormatInfo.InvariantInfo); } + /// + /// Retrieves a list of byte values from this . + /// + /// A list of byte values represented by this . public IList GetByteList() { return GetArray().Select(v => v.GetByte()).ToList(); } + /// + /// Retrieves a list of integer values from this . + /// + /// A list of integer values represented by this . public IList GetIntList() { return GetArray().Select(v => v.GetInt()).ToList(); } + /// + /// Retrieves a list of long values from this . + /// + /// A list of long values represented by this . public IList GetLongList() { return GetArray().Select(v => v.GetLong()).ToList(); } + /// + /// Retrieves a list of boolean values from this . + /// + /// A list of boolean values represented by this . public IList GetBooleanList() { return GetArray().Select(v => v.GetBoolean()).ToList(); } + /// + /// Retrieves a list of float values from this . + /// + /// A list of float values represented by this . public IList GetFloatList() { return GetArray().Select(v => v.GetFloat()).ToList(); } + /// + /// Retrieves a list of double values from this . + /// + /// A list of double values represented by this . public IList GetDoubleList() { return GetArray().Select(v => v.GetDouble()).ToList(); } + /// + /// Retrieves a list of decimal values from this . + /// + /// A list of decimal values represented by this . public IList GetDecimalList() { return GetArray().Select(v => v.GetDecimal()).ToList(); } + /// + /// Retrieves a list of string values from this . + /// + /// A list of string values represented by this . public IList GetStringList() { return GetArray().Select(v => v.GetString()).ToList(); } + /// + /// Retrieves a list of values from this . + /// + /// A list of values represented by this . public IList GetArray() { IEnumerable x = from arr in Values @@ -215,6 +317,12 @@ from e in arr.GetArray() return x.ToList(); } + /// + /// Determines whether this is an array. + /// + /// + /// true if this is an array; otherwise false. + /// public bool IsArray() { return GetArray() != null; @@ -227,6 +335,11 @@ public TimeSpan GetMillisDuration(bool allowInfinite = true) return GetTimeSpan(allowInfinite); } + /// + /// Retrieves the time span value from this . + /// + /// A flag used to set inifinite durations. + /// The time span value represented by this . public TimeSpan GetTimeSpan(bool allowInfinite = true) { string res = GetString(); @@ -272,6 +385,10 @@ private static double ParsePositiveValue(string v) return value; } + /// + /// Retrieves the long value, optionally suffixed with a 'b', from this . + /// + /// The long value represented by this . public long? GetByteSize() { var res = GetString(); @@ -284,11 +401,20 @@ private static double ParsePositiveValue(string v) return long.Parse(res); } + /// + /// Returns a HOCON string representation of this . + /// + /// A HOCON string representation of this . public override string ToString() { return ToString(0); } + /// + /// Returns a HOCON string representation of this . + /// + /// The number of spaces to indent the string. + /// A HOCON string representation of this . public virtual string ToString(int indent) { if (IsString()) diff --git a/src/core/Akka/Configuration/Hocon/IHoconElement.cs b/src/core/Akka/Configuration/Hocon/IHoconElement.cs index 123c899fff9..a935809df2f 100644 --- a/src/core/Akka/Configuration/Hocon/IHoconElement.cs +++ b/src/core/Akka/Configuration/Hocon/IHoconElement.cs @@ -10,22 +10,50 @@ namespace Akka.Configuration.Hocon { /// - /// Marker interface to make it easier to retrieve Hocon objects for substitutions + /// Marker interface to make it easier to retrieve HOCON + /// (Human-Optimized Config Object Notation) objects for + /// substitutions. /// public interface IMightBeAHoconObject { + /// + /// Determines whether this element is a HOCON object. + /// + /// true if this element is a HOCON object; otherwise false bool IsObject(); + /// + /// Retrieves the HOCON object representation of this element. + /// + /// The HOCON object representation of this element. HoconObject GetObject(); } + /// + /// This interface defines the contract needed to implement + /// a HOCON (Human-Optimized Config Object Notation) element. + /// public interface IHoconElement { + /// + /// Determines whether this element is a string. + /// + /// true if this element is a string; otherwise false bool IsString(); + /// + /// Retrieves the string representation of this element. + /// + /// The string representation of this element. string GetString(); - + /// + /// Determines whether this element is an array. + /// + /// true if this element is aan array; otherwise false bool IsArray(); - + /// + /// Retrieves a list of elements associated with this element. + /// + /// A list of elements associated with this element. IList GetArray(); } } From 9f058cb7b575a92fd160a5d6bf8dc55a6fb0c6b5 Mon Sep 17 00:00:00 2001 From: Sean Gilliam Date: Wed, 27 May 2015 13:35:27 -0500 Subject: [PATCH 57/66] Fixed a few class cross references --- src/core/Akka.Persistence/AtLeastOnceDelivery.cs | 4 ++-- src/core/Akka.TestKit/TestKitBase_AwaitConditions.cs | 2 +- src/core/Akka/Actor/ActorRef.cs | 2 +- src/core/Akka/Actor/IActorContext.cs | 2 +- src/core/Akka/Configuration/Hocon/HoconToken.cs | 2 +- src/core/Akka/Configuration/Hocon/HoconTokenizer.cs | 4 ++-- src/core/Akka/Routing/TailChoppingRoutingLogic.cs | 2 +- 7 files changed, 9 insertions(+), 9 deletions(-) diff --git a/src/core/Akka.Persistence/AtLeastOnceDelivery.cs b/src/core/Akka.Persistence/AtLeastOnceDelivery.cs index c4f3371bb2b..dbc2c542b0c 100644 --- a/src/core/Akka.Persistence/AtLeastOnceDelivery.cs +++ b/src/core/Akka.Persistence/AtLeastOnceDelivery.cs @@ -129,7 +129,7 @@ protected MaxUnconfirmedMessagesExceededException(SerializationInfo info, Stream /// Use a method to send a message to a destination. Call the /// method once destination has replied with a confirmation message. The interval between redelivery attempts /// can be defined with . After a number of delivery attempts an - /// message will be sent to . The resending will continue, + /// message will be sent to . The resending will continue, /// but you may choose to cancel resending. /// /// This actor type has state consisting of unconfirmed messages and a sequence number. It doesn't store it by @@ -170,7 +170,7 @@ public void Init() protected int DefaultRedeliveryBurstLimit { get { return Extension.Settings.AtLeastOnceDelivery.RedeliveryBurstLimit; } } /// - /// After this number of delivery attempts a message will be sent to . + /// After this number of delivery attempts a message will be sent to . /// The count is reset after restart. /// public virtual int UnconfirmedDeliveryAttemptsToWarn { get { return DefaultUnconfirmedDeliveryAttemptsToWarn; } } diff --git a/src/core/Akka.TestKit/TestKitBase_AwaitConditions.cs b/src/core/Akka.TestKit/TestKitBase_AwaitConditions.cs index 3fad6fd8699..5033bd02f31 100644 --- a/src/core/Akka.TestKit/TestKitBase_AwaitConditions.cs +++ b/src/core/Akka.TestKit/TestKitBase_AwaitConditions.cs @@ -203,7 +203,7 @@ protected static bool InternalAwaitCondition(Func conditionIsFulfilled, Ti /// /// Action that is called when the timeout expired. /// The parameters conforms to - /// If a is specified, debug messages will be logged using it. If null nothing will be logged + /// If a is specified, debug messages will be logged using it. If null nothing will be logged protected static bool InternalAwaitCondition(Func conditionIsFulfilled, TimeSpan max, TimeSpan? interval, Action fail, ILoggingAdapter logger) { max.EnsureIsPositiveFinite("max"); diff --git a/src/core/Akka/Actor/ActorRef.cs b/src/core/Akka/Actor/ActorRef.cs index 88109ff2bc2..423b3b5f71f 100644 --- a/src/core/Akka/Actor/ActorRef.cs +++ b/src/core/Akka/Actor/ActorRef.cs @@ -42,7 +42,7 @@ internal interface ILocalRef : IActorRefScope { } /// RepointableActorRef (and potentially others) may change their locality at /// runtime, meaning that isLocal might not be stable. RepointableActorRef has /// the feature that it starts out “not fully started” (but you can send to it), - /// which is why features here; it is not improbable that cluster + /// which is why features here; it is not improbable that cluster /// actor refs will have the same behavior. /// INTERNAL /// diff --git a/src/core/Akka/Actor/IActorContext.cs b/src/core/Akka/Actor/IActorContext.cs index a3fb0dd86bd..75dd471cc31 100644 --- a/src/core/Akka/Actor/IActorContext.cs +++ b/src/core/Akka/Actor/IActorContext.cs @@ -52,7 +52,7 @@ public interface IActorContext : IActorRefFactory, ICanWatch /// Gets a reference to the to which this actor belongs. /// /// - /// This property is how you can get access to the and other parts + /// This property is how you can get access to the and other parts /// of Akka.NET from within an actor instance. /// /// diff --git a/src/core/Akka/Configuration/Hocon/HoconToken.cs b/src/core/Akka/Configuration/Hocon/HoconToken.cs index 0cd1a23db5f..68bf0d7bbfe 100644 --- a/src/core/Akka/Configuration/Hocon/HoconToken.cs +++ b/src/core/Akka/Configuration/Hocon/HoconToken.cs @@ -121,7 +121,7 @@ public Token(string value) /// /// Creates a key token with a given . /// - /// The key to associate with this token. + /// The key to associate with this token. /// A key token with the given key. public static Token Key(string key) { diff --git a/src/core/Akka/Configuration/Hocon/HoconTokenizer.cs b/src/core/Akka/Configuration/Hocon/HoconTokenizer.cs index b58e4d5045c..bc7be6058b4 100644 --- a/src/core/Akka/Configuration/Hocon/HoconTokenizer.cs +++ b/src/core/Akka/Configuration/Hocon/HoconTokenizer.cs @@ -603,9 +603,9 @@ public bool IsSubstitutionStart() } /// - /// Retrieves a token from the tokenizer's current position. + /// Retrieves a token from the tokenizer's current position. /// - /// A token from the tokenizer's current position. + /// A token from the tokenizer's current position. public Token PullSubstitution() { var sb = new StringBuilder(); diff --git a/src/core/Akka/Routing/TailChoppingRoutingLogic.cs b/src/core/Akka/Routing/TailChoppingRoutingLogic.cs index d3583c81c6e..ee945c71e4e 100644 --- a/src/core/Akka/Routing/TailChoppingRoutingLogic.cs +++ b/src/core/Akka/Routing/TailChoppingRoutingLogic.cs @@ -97,7 +97,7 @@ internal sealed class TailChoppingRoutee : Routee /// The routees to route to. /// The time within which at least one response is expected. /// The duration after which the next routee will be picked. - /// Access to a instance, used to force deadlines. + /// Access to a instance, used to force deadlines. public TailChoppingRoutee(Routee[] routees, TimeSpan within, TimeSpan interval, IScheduler scheduler) { _routees = routees; From 929984cddd16664ecaa4b6e6aa1acdd66a17b893 Mon Sep 17 00:00:00 2001 From: rogeralsing Date: Sat, 30 May 2015 18:39:09 +0200 Subject: [PATCH 58/66] ActorCell.DeathWatch InvalidOperationException during AddressTerminated #1011 --- src/core/Akka/Actor/ActorCell.DeathWatch.cs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/core/Akka/Actor/ActorCell.DeathWatch.cs b/src/core/Akka/Actor/ActorCell.DeathWatch.cs index d32f96aafb2..9b3afcc91dd 100644 --- a/src/core/Akka/Actor/ActorCell.DeathWatch.cs +++ b/src/core/Akka/Actor/ActorCell.DeathWatch.cs @@ -210,7 +210,8 @@ protected void AddressTerminated(Address address) // cleanup watchedBy since we know they are dead MaintainAddressTerminatedSubscription(() => { - foreach (var a in _state.GetWatchedBy().Where(a => a.Path.Address == address)) + + foreach (var a in _state.GetWatchedBy().Where(a => a.Path.Address == address).ToList()) { //_watchedBy.Remove(a); _state = _state.RemoveWatchedBy(a); From 1550b5f1dedf9bd53ab2240b9a084ed04906bde9 Mon Sep 17 00:00:00 2001 From: Jeff Doolittle Date: Mon, 1 Jun 2015 11:53:52 -0700 Subject: [PATCH 59/66] closes akkadotnet/akka.net#1020 structuremap dependency injection --- src/Akka.sln | 98 ++++++++++------- src/BasicStructureMapUses/Actors.cs | 57 ++++++++++ src/BasicStructureMapUses/App.config | 26 +++++ .../BasicStructureMapUses.csproj | 87 +++++++++++++++ src/BasicStructureMapUses/Program.cs | 64 +++++++++++ .../Properties/AssemblyInfo.cs | 42 +++++++ src/BasicStructureMapUses/packages.config | 4 + .../Akka.DI.StructureMap.csproj | 84 ++++++++++++++ .../Akka.DI.StructureMap.nuspec | 20 ++++ .../Properties/AssemblyInfo.cs | 26 +++++ .../Akka.DI.StructureMap/Readme.md | 97 +++++++++++++++++ .../StructureMapDependencyResolver.cs | 103 ++++++++++++++++++ .../Akka.DI.StructureMap/packages.config | 4 + 13 files changed, 674 insertions(+), 38 deletions(-) create mode 100644 src/BasicStructureMapUses/Actors.cs create mode 100644 src/BasicStructureMapUses/App.config create mode 100644 src/BasicStructureMapUses/BasicStructureMapUses.csproj create mode 100644 src/BasicStructureMapUses/Program.cs create mode 100644 src/BasicStructureMapUses/Properties/AssemblyInfo.cs create mode 100644 src/BasicStructureMapUses/packages.config create mode 100644 src/contrib/dependencyInjection/Akka.DI.StructureMap/Akka.DI.StructureMap.csproj create mode 100644 src/contrib/dependencyInjection/Akka.DI.StructureMap/Akka.DI.StructureMap.nuspec create mode 100644 src/contrib/dependencyInjection/Akka.DI.StructureMap/Properties/AssemblyInfo.cs create mode 100644 src/contrib/dependencyInjection/Akka.DI.StructureMap/Readme.md create mode 100644 src/contrib/dependencyInjection/Akka.DI.StructureMap/StructureMapDependencyResolver.cs create mode 100644 src/contrib/dependencyInjection/Akka.DI.StructureMap/packages.config diff --git a/src/Akka.sln b/src/Akka.sln index c303e0ff85d..f11e602a31c 100644 --- a/src/Akka.sln +++ b/src/Akka.sln @@ -208,6 +208,10 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Akka.Persistence.PostgreSql EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Akka.Persistence.PostgreSql.Tests", "contrib\persistence\Akka.Persistence.PostgreSql.Tests\Akka.Persistence.PostgreSql.Tests.csproj", "{2D1812FD-70C0-43EE-9C25-3980E41F30E1}" EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Akka.DI.StructureMap", "contrib\dependencyInjection\Akka.DI.StructureMap\Akka.DI.StructureMap.csproj", "{34E5B4E5-0ED0-4A27-B53A-BFD812D45E1E}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "BasicStructureMapUses", "BasicStructureMapUses\BasicStructureMapUses.csproj", "{13BA1CC1-A431-441D-8B11-3969D2C68A6E}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug Mono|Any CPU = Debug Mono|Any CPU @@ -741,22 +745,6 @@ Global {3B9E6211-9488-4DB5-B714-24248693B38F}.Release Mono|Any CPU.Build.0 = Release|Any CPU {3B9E6211-9488-4DB5-B714-24248693B38F}.Release|Any CPU.ActiveCfg = Release|Any CPU {3B9E6211-9488-4DB5-B714-24248693B38F}.Release|Any CPU.Build.0 = Release|Any CPU - {4B89227B-5AD1-4061-816F-570067C3727F}.Debug Mono|Any CPU.ActiveCfg = Debug|Any CPU - {4B89227B-5AD1-4061-816F-570067C3727F}.Debug Mono|Any CPU.Build.0 = Debug|Any CPU - {4B89227B-5AD1-4061-816F-570067C3727F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {4B89227B-5AD1-4061-816F-570067C3727F}.Debug|Any CPU.Build.0 = Debug|Any CPU - {4B89227B-5AD1-4061-816F-570067C3727F}.Release Mono|Any CPU.ActiveCfg = Release|Any CPU - {4B89227B-5AD1-4061-816F-570067C3727F}.Release Mono|Any CPU.Build.0 = Release|Any CPU - {4B89227B-5AD1-4061-816F-570067C3727F}.Release|Any CPU.ActiveCfg = Release|Any CPU - {4B89227B-5AD1-4061-816F-570067C3727F}.Release|Any CPU.Build.0 = Release|Any CPU - {2D1812FD-70C0-43EE-9C25-3980E41F30E1}.Debug Mono|Any CPU.ActiveCfg = Debug|Any CPU - {2D1812FD-70C0-43EE-9C25-3980E41F30E1}.Debug Mono|Any CPU.Build.0 = Debug|Any CPU - {2D1812FD-70C0-43EE-9C25-3980E41F30E1}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {2D1812FD-70C0-43EE-9C25-3980E41F30E1}.Debug|Any CPU.Build.0 = Debug|Any CPU - {2D1812FD-70C0-43EE-9C25-3980E41F30E1}.Release Mono|Any CPU.ActiveCfg = Release|Any CPU - {2D1812FD-70C0-43EE-9C25-3980E41F30E1}.Release Mono|Any CPU.Build.0 = Release|Any CPU - {2D1812FD-70C0-43EE-9C25-3980E41F30E1}.Release|Any CPU.ActiveCfg = Release|Any CPU - {2D1812FD-70C0-43EE-9C25-3980E41F30E1}.Release|Any CPU.Build.0 = Release|Any CPU {54BD0B45-8A46-4194-8C33-AD287CAC8FA4}.Debug Mono|Any CPU.ActiveCfg = Debug|Any CPU {54BD0B45-8A46-4194-8C33-AD287CAC8FA4}.Debug Mono|Any CPU.Build.0 = Debug|Any CPU {54BD0B45-8A46-4194-8C33-AD287CAC8FA4}.Debug|Any CPU.ActiveCfg = Debug|Any CPU @@ -773,44 +761,76 @@ Global {1FE6CA3D-4996-4A2A-AC0F-76F3BD66B4C4}.Release Mono|Any CPU.Build.0 = Release|Any CPU {1FE6CA3D-4996-4A2A-AC0F-76F3BD66B4C4}.Release|Any CPU.ActiveCfg = Release|Any CPU {1FE6CA3D-4996-4A2A-AC0F-76F3BD66B4C4}.Release|Any CPU.Build.0 = Release|Any CPU + {4B89227B-5AD1-4061-816F-570067C3727F}.Debug Mono|Any CPU.ActiveCfg = Debug|Any CPU + {4B89227B-5AD1-4061-816F-570067C3727F}.Debug Mono|Any CPU.Build.0 = Debug|Any CPU + {4B89227B-5AD1-4061-816F-570067C3727F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {4B89227B-5AD1-4061-816F-570067C3727F}.Debug|Any CPU.Build.0 = Debug|Any CPU + {4B89227B-5AD1-4061-816F-570067C3727F}.Release Mono|Any CPU.ActiveCfg = Release|Any CPU + {4B89227B-5AD1-4061-816F-570067C3727F}.Release Mono|Any CPU.Build.0 = Release|Any CPU + {4B89227B-5AD1-4061-816F-570067C3727F}.Release|Any CPU.ActiveCfg = Release|Any CPU + {4B89227B-5AD1-4061-816F-570067C3727F}.Release|Any CPU.Build.0 = Release|Any CPU + {2D1812FD-70C0-43EE-9C25-3980E41F30E1}.Debug Mono|Any CPU.ActiveCfg = Debug|Any CPU + {2D1812FD-70C0-43EE-9C25-3980E41F30E1}.Debug Mono|Any CPU.Build.0 = Debug|Any CPU + {2D1812FD-70C0-43EE-9C25-3980E41F30E1}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {2D1812FD-70C0-43EE-9C25-3980E41F30E1}.Debug|Any CPU.Build.0 = Debug|Any CPU + {2D1812FD-70C0-43EE-9C25-3980E41F30E1}.Release Mono|Any CPU.ActiveCfg = Release|Any CPU + {2D1812FD-70C0-43EE-9C25-3980E41F30E1}.Release Mono|Any CPU.Build.0 = Release|Any CPU + {2D1812FD-70C0-43EE-9C25-3980E41F30E1}.Release|Any CPU.ActiveCfg = Release|Any CPU + {2D1812FD-70C0-43EE-9C25-3980E41F30E1}.Release|Any CPU.Build.0 = Release|Any CPU + {34E5B4E5-0ED0-4A27-B53A-BFD812D45E1E}.Debug Mono|Any CPU.ActiveCfg = Debug|Any CPU + {34E5B4E5-0ED0-4A27-B53A-BFD812D45E1E}.Debug Mono|Any CPU.Build.0 = Debug|Any CPU + {34E5B4E5-0ED0-4A27-B53A-BFD812D45E1E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {34E5B4E5-0ED0-4A27-B53A-BFD812D45E1E}.Debug|Any CPU.Build.0 = Debug|Any CPU + {34E5B4E5-0ED0-4A27-B53A-BFD812D45E1E}.Release Mono|Any CPU.ActiveCfg = Release|Any CPU + {34E5B4E5-0ED0-4A27-B53A-BFD812D45E1E}.Release Mono|Any CPU.Build.0 = Release|Any CPU + {34E5B4E5-0ED0-4A27-B53A-BFD812D45E1E}.Release|Any CPU.ActiveCfg = Release|Any CPU + {34E5B4E5-0ED0-4A27-B53A-BFD812D45E1E}.Release|Any CPU.Build.0 = Release|Any CPU + {13BA1CC1-A431-441D-8B11-3969D2C68A6E}.Debug Mono|Any CPU.ActiveCfg = Debug|Any CPU + {13BA1CC1-A431-441D-8B11-3969D2C68A6E}.Debug Mono|Any CPU.Build.0 = Debug|Any CPU + {13BA1CC1-A431-441D-8B11-3969D2C68A6E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {13BA1CC1-A431-441D-8B11-3969D2C68A6E}.Debug|Any CPU.Build.0 = Debug|Any CPU + {13BA1CC1-A431-441D-8B11-3969D2C68A6E}.Release Mono|Any CPU.ActiveCfg = Release|Any CPU + {13BA1CC1-A431-441D-8B11-3969D2C68A6E}.Release Mono|Any CPU.Build.0 = Release|Any CPU + {13BA1CC1-A431-441D-8B11-3969D2C68A6E}.Release|Any CPU.ActiveCfg = Release|Any CPU + {13BA1CC1-A431-441D-8B11-3969D2C68A6E}.Release|Any CPU.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE EndGlobalSection GlobalSection(NestedProjects) = preSolution {3AA1E79A-F641-4115-BF84-96ECE552EC95} = {69279534-1DBA-4115-BF8B-03F77FC8125E} + {E814B432-DBC2-4372-92C6-70B9198613BD} = {69279534-1DBA-4115-BF8B-03F77FC8125E} + {45215639-D279-49B4-B11E-0337D1FE9364} = {69279534-1DBA-4115-BF8B-03F77FC8125E} + {D92D674A-7671-4D26-B3C5-ADB7EF220CFD} = {69279534-1DBA-4115-BF8B-03F77FC8125E} + {B555BA52-1911-426B-8018-D303D7228748} = {69279534-1DBA-4115-BF8B-03F77FC8125E} + {E17B704F-3DF0-4723-8E83-43DC8D33997A} = {69279534-1DBA-4115-BF8B-03F77FC8125E} + {DCE4B11E-6A5F-4AC8-A089-037F0B14BFAB} = {69279534-1DBA-4115-BF8B-03F77FC8125E} + {B17C5E75-B5F7-4492-9A97-A0C8DF66EF02} = {69279534-1DBA-4115-BF8B-03F77FC8125E} + {19FD9F6D-7ED2-4DF6-98EE-348027F8D5DA} = {69279534-1DBA-4115-BF8B-03F77FC8125E} + {85D1D513-2F94-404A-A99C-37F2BAD0D7BE} = {69279534-1DBA-4115-BF8B-03F77FC8125E} {819221CB-82B7-43D3-A4DE-E00AF17F2FDF} = {3AA1E79A-F641-4115-BF84-96ECE552EC95} {69AED1DD-02E1-423F-950C-DE6010DFA346} = {3AA1E79A-F641-4115-BF84-96ECE552EC95} {7FD886CC-216B-4324-9AB2-241494598B06} = {3AA1E79A-F641-4115-BF84-96ECE552EC95} - {E814B432-DBC2-4372-92C6-70B9198613BD} = {69279534-1DBA-4115-BF8B-03F77FC8125E} {19993C0B-8F9D-4FE4-B047-45965EA66BA2} = {E814B432-DBC2-4372-92C6-70B9198613BD} {68D89B2A-6770-483E-B7B0-C7EDAE0C2A6D} = {E814B432-DBC2-4372-92C6-70B9198613BD} {F53B3F7C-6422-4AA7-AF87-B838D736DBAB} = {68D89B2A-6770-483E-B7B0-C7EDAE0C2A6D} {B65AAB66-A779-4A2C-AE6B-495C292551AE} = {68D89B2A-6770-483E-B7B0-C7EDAE0C2A6D} - {45215639-D279-49B4-B11E-0337D1FE9364} = {69279534-1DBA-4115-BF8B-03F77FC8125E} {2CEC4A63-31ED-44E8-B1A6-7EEEB190117D} = {45215639-D279-49B4-B11E-0337D1FE9364} - {D92D674A-7671-4D26-B3C5-ADB7EF220CFD} = {69279534-1DBA-4115-BF8B-03F77FC8125E} {3007A692-A050-4F12-9187-4AE6C6106A66} = {D92D674A-7671-4D26-B3C5-ADB7EF220CFD} - {B555BA52-1911-426B-8018-D303D7228748} = {69279534-1DBA-4115-BF8B-03F77FC8125E} {93EDACD8-F6BA-43A7-9FD7-D3D4156ADBC0} = {B555BA52-1911-426B-8018-D303D7228748} {14873709-8C2E-485E-BF74-2423227DB59B} = {B555BA52-1911-426B-8018-D303D7228748} {56B7C019-AA7A-4194-AFA5-3D3F1A4B6393} = {B555BA52-1911-426B-8018-D303D7228748} - {E17B704F-3DF0-4723-8E83-43DC8D33997A} = {69279534-1DBA-4115-BF8B-03F77FC8125E} {EB9BEE97-7B2C-44B7-8636-237A8E4171C7} = {E17B704F-3DF0-4723-8E83-43DC8D33997A} {3D7C22A1-E999-4B01-BE94-9C54B31A4FF1} = {E17B704F-3DF0-4723-8E83-43DC8D33997A} - {DCE4B11E-6A5F-4AC8-A089-037F0B14BFAB} = {69279534-1DBA-4115-BF8B-03F77FC8125E} {B428311F-AD87-461D-9573-9004A2596ABA} = {DCE4B11E-6A5F-4AC8-A089-037F0B14BFAB} {D23C0D51-7E21-454B-9C3E-1154A744FF81} = {DCE4B11E-6A5F-4AC8-A089-037F0B14BFAB} - {E5EB3DF5-D017-4B50-B5AA-2B0440DB773D} = {D23C0D51-7E21-454B-9C3E-1154A744FF81} {0BA8B1E8-11DD-4A32-8BB4-99F7AB3E27BB} = {DCE4B11E-6A5F-4AC8-A089-037F0B14BFAB} + {E5EB3DF5-D017-4B50-B5AA-2B0440DB773D} = {D23C0D51-7E21-454B-9C3E-1154A744FF81} {29A08A09-83F6-48D4-A9AE-B4AE314069C4} = {0BA8B1E8-11DD-4A32-8BB4-99F7AB3E27BB} - {B17C5E75-B5F7-4492-9A97-A0C8DF66EF02} = {69279534-1DBA-4115-BF8B-03F77FC8125E} {4022147A-4F95-4A04-BE09-01B7952BBDD9} = {B17C5E75-B5F7-4492-9A97-A0C8DF66EF02} {8C261CA2-3CAD-42FF-B21C-C32E7D718F95} = {B17C5E75-B5F7-4492-9A97-A0C8DF66EF02} - {19FD9F6D-7ED2-4DF6-98EE-348027F8D5DA} = {69279534-1DBA-4115-BF8B-03F77FC8125E} {F09CC498-22E8-4D87-A235-CCE17E629612} = {19FD9F6D-7ED2-4DF6-98EE-348027F8D5DA} {7086D308-1046-40D6-824A-2C379334E599} = {19FD9F6D-7ED2-4DF6-98EE-348027F8D5DA} - {85D1D513-2F94-404A-A99C-37F2BAD0D7BE} = {69279534-1DBA-4115-BF8B-03F77FC8125E} {FBEBD803-AEFC-44B1-A429-6143C3288CA4} = {85D1D513-2F94-404A-A99C-37F2BAD0D7BE} {956F7D26-4505-4A26-86D0-73135BD35A93} = {73108242-625A-4D7B-AA09-63375DBAE464} {D0AC89AC-E6AB-4B16-BFBA-64C675D17ABD} = {01167D3C-49C4-4CDE-9787-C176D139ACDD} @@ -835,36 +855,38 @@ Global {D22C316E-991E-432A-950C-29BC3C45C07B} = {01167D3C-49C4-4CDE-9787-C176D139ACDD} {91746A3F-21C6-4614-B0AB-A59310D75C51} = {01167D3C-49C4-4CDE-9787-C176D139ACDD} {964F0EC5-FBE6-47C5-8AE6-145114D5DB8C} = {01167D3C-49C4-4CDE-9787-C176D139ACDD} + {F0781BEA-5BA0-4AF0-BB15-E3F209B681F5} = {01167D3C-49C4-4CDE-9787-C176D139ACDD} {FFEC736B-EDA3-433C-8564-7C14676601A1} = {588C1513-FAB6-42C3-B6FC-3485F13620CF} + {7625FD95-4B2C-4A5B-BDD5-94B1493FAC8E} = {588C1513-FAB6-42C3-B6FC-3485F13620CF} + {B1D10183-8FAE-4506-B935-403FCED89BDB} = {588C1513-FAB6-42C3-B6FC-3485F13620CF} + {264C22A4-CAFC-41F6-B82C-4DDC5C196767} = {588C1513-FAB6-42C3-B6FC-3485F13620CF} {104D8378-117D-411F-B921-BFBFBC55B46B} = {FFEC736B-EDA3-433C-8564-7C14676601A1} {A8AA2D7E-3D35-44DF-AF92-80A2C39C1F4D} = {FFEC736B-EDA3-433C-8564-7C14676601A1} {53D9D2F0-4A35-4FCE-980C-8F7F3DCBDB2B} = {FFEC736B-EDA3-433C-8564-7C14676601A1} - {7625FD95-4B2C-4A5B-BDD5-94B1493FAC8E} = {588C1513-FAB6-42C3-B6FC-3485F13620CF} {11F4D4B8-7E07-4457-ABF2-609B3E7B2649} = {7625FD95-4B2C-4A5B-BDD5-94B1493FAC8E} {01003DAF-660B-4C39-9557-1F5E95B405BF} = {7625FD95-4B2C-4A5B-BDD5-94B1493FAC8E} {F5697316-A1C6-4B99-84B5-1E7DEB08D669} = {7625FD95-4B2C-4A5B-BDD5-94B1493FAC8E} - {B1D10183-8FAE-4506-B935-403FCED89BDB} = {588C1513-FAB6-42C3-B6FC-3485F13620CF} + {BEF84A6F-32C4-4ACF-AFC3-7B5FCA6F209E} = {7625FD95-4B2C-4A5B-BDD5-94B1493FAC8E} + {D63223FA-03F5-4B32-A6EC-668F718C0826} = {7625FD95-4B2C-4A5B-BDD5-94B1493FAC8E} + {7DBD5C17-5E9D-40C4-9201-D092751532A7} = {7625FD95-4B2C-4A5B-BDD5-94B1493FAC8E} {760F900D-46A0-4D9F-9CD2-637040FFBD83} = {B1D10183-8FAE-4506-B935-403FCED89BDB} {88854526-09D3-4F15-98F8-060838D958D1} = {B1D10183-8FAE-4506-B935-403FCED89BDB} {FDF09D18-B68E-4B95-B1F6-B89D9C6C3AE9} = {B1D10183-8FAE-4506-B935-403FCED89BDB} {C22A9758-AD7D-4078-BD67-4829BACDFEB6} = {B1D10183-8FAE-4506-B935-403FCED89BDB} - {61862D3B-A8A7-491B-AC38-0F4E31E4576A} = {D1CCD86E-0EF8-473A-979B-25E1235FEA2D} + {D1CCD86E-0EF8-473A-979B-25E1235FEA2D} = {B1D10183-8FAE-4506-B935-403FCED89BDB} + {2065C3A2-8C15-4912-BCF5-AE89E3DDA079} = {B1D10183-8FAE-4506-B935-403FCED89BDB} + {34E5B4E5-0ED0-4A27-B53A-BFD812D45E1E} = {B1D10183-8FAE-4506-B935-403FCED89BDB} {6CF8FB6A-EA0C-4D8C-BD38-57C41F0619E6} = {D1CCD86E-0EF8-473A-979B-25E1235FEA2D} {4B693CFF-F14F-4924-95A1-5FA896D749D9} = {D1CCD86E-0EF8-473A-979B-25E1235FEA2D} - {D1CCD86E-0EF8-473A-979B-25E1235FEA2D} = {B1D10183-8FAE-4506-B935-403FCED89BDB} + {61862D3B-A8A7-491B-AC38-0F4E31E4576A} = {D1CCD86E-0EF8-473A-979B-25E1235FEA2D} {32EABA12-DDAA-4F2A-B254-85239267D869} = {D1CCD86E-0EF8-473A-979B-25E1235FEA2D} - {2065C3A2-8C15-4912-BCF5-AE89E3DDA079} = {B1D10183-8FAE-4506-B935-403FCED89BDB} - {BEF84A6F-32C4-4ACF-AFC3-7B5FCA6F209E} = {7625FD95-4B2C-4A5B-BDD5-94B1493FAC8E} - {D63223FA-03F5-4B32-A6EC-668F718C0826} = {7625FD95-4B2C-4A5B-BDD5-94B1493FAC8E} - {264C22A4-CAFC-41F6-B82C-4DDC5C196767} = {588C1513-FAB6-42C3-B6FC-3485F13620CF} + {13BA1CC1-A431-441D-8B11-3969D2C68A6E} = {D1CCD86E-0EF8-473A-979B-25E1235FEA2D} {BAC85686-AFC4-413E-98DC-5ED8F468BC63} = {264C22A4-CAFC-41F6-B82C-4DDC5C196767} {5A3C24D7-0D1C-4974-BBB4-22AC792666DE} = {264C22A4-CAFC-41F6-B82C-4DDC5C196767} - {7DBD5C17-5E9D-40C4-9201-D092751532A7} = {7625FD95-4B2C-4A5B-BDD5-94B1493FAC8E} - {F0781BEA-5BA0-4AF0-BB15-E3F209B681F5} = {01167D3C-49C4-4CDE-9787-C176D139ACDD} {3B9E6211-9488-4DB5-B714-24248693B38F} = {264C22A4-CAFC-41F6-B82C-4DDC5C196767} - {4B89227B-5AD1-4061-816F-570067C3727F} = {264C22A4-CAFC-41F6-B82C-4DDC5C196767} - {2D1812FD-70C0-43EE-9C25-3980E41F30E1} = {264C22A4-CAFC-41F6-B82C-4DDC5C196767} {54BD0B45-8A46-4194-8C33-AD287CAC8FA4} = {264C22A4-CAFC-41F6-B82C-4DDC5C196767} {1FE6CA3D-4996-4A2A-AC0F-76F3BD66B4C4} = {264C22A4-CAFC-41F6-B82C-4DDC5C196767} + {4B89227B-5AD1-4061-816F-570067C3727F} = {264C22A4-CAFC-41F6-B82C-4DDC5C196767} + {2D1812FD-70C0-43EE-9C25-3980E41F30E1} = {264C22A4-CAFC-41F6-B82C-4DDC5C196767} EndGlobalSection EndGlobal diff --git a/src/BasicStructureMapUses/Actors.cs b/src/BasicStructureMapUses/Actors.cs new file mode 100644 index 00000000000..09b14e616d6 --- /dev/null +++ b/src/BasicStructureMapUses/Actors.cs @@ -0,0 +1,57 @@ +//----------------------------------------------------------------------- +// +// Copyright (C) 2009-2015 Typesafe Inc. +// Copyright (C) 2013-2015 Akka.NET project +// +//----------------------------------------------------------------------- + +using Akka.Actor; +using Akka.Routing; +using System; +public class AnotherMessage +{ + public string Name { get; set; } + public int Id { get; set; } + + public override string ToString() + { + return string.Format("{0} {1}", Id, Name); + } + + +} +public class TypedActorMessage : IConsistentHashable +{ + public string Name { get; set; } + public int Id { get; set; } + + public override string ToString() + { + return string.Format("{0} {1}", Id, Name); + } + + public object ConsistentHashKey + { + get { return Id; } + } +} +public class TypedWorker : TypedActor, IHandle, IHandle +{ + public TypedWorker() + { + // + Console.WriteLine("Created {0}", Guid.NewGuid().ToString()); + } + + public void Handle(TypedActorMessage message) + { + Console.WriteLine("{0} received {1}", Self.Path.Name, message); + } + + + public void Handle(AnotherMessage message) + { + Console.WriteLine("{0} received other {1}", Self.Path.Name, message); + } +} + diff --git a/src/BasicStructureMapUses/App.config b/src/BasicStructureMapUses/App.config new file mode 100644 index 00000000000..50ad5acfd55 --- /dev/null +++ b/src/BasicStructureMapUses/App.config @@ -0,0 +1,26 @@ + + + +
+ + + + + + + + + + + diff --git a/src/BasicStructureMapUses/BasicStructureMapUses.csproj b/src/BasicStructureMapUses/BasicStructureMapUses.csproj new file mode 100644 index 00000000000..b65ad89a484 --- /dev/null +++ b/src/BasicStructureMapUses/BasicStructureMapUses.csproj @@ -0,0 +1,87 @@ + + + + + Debug + AnyCPU + {13BA1CC1-A431-441D-8B11-3969D2C68A6E} + Exe + Properties + BasicStructureMapUses + BasicStructureMapUses + v4.5 + 512 + ..\ + true + + + AnyCPU + true + full + false + bin\Debug\ + DEBUG;TRACE + prompt + 4 + + + AnyCPU + pdbonly + true + bin\Release\ + TRACE + prompt + 4 + + + + ..\packages\structuremap.3.1.5.154\lib\net40\StructureMap.dll + True + + + ..\packages\structuremap.3.1.5.154\lib\net40\StructureMap.Net4.dll + True + + + + + + + + + + + + + + + + + {fdf09d18-b68e-4b95-b1f6-b89d9c6c3ae9} + Akka.DI.Core + + + {34e5b4e5-0ed0-4a27-b53a-bfd812d45e1e} + Akka.DI.StructureMap + + + {5deddf90-37f0-48d3-a0b0-a5cbd8a7e377} + Akka + + + + + + + This project references NuGet package(s) that are missing on this computer. Enable NuGet Package Restore to download them. For more information, see http://go.microsoft.com/fwlink/?LinkID=322105. The missing file is {0}. + + + + + \ No newline at end of file diff --git a/src/BasicStructureMapUses/Program.cs b/src/BasicStructureMapUses/Program.cs new file mode 100644 index 00000000000..afdc56a0116 --- /dev/null +++ b/src/BasicStructureMapUses/Program.cs @@ -0,0 +1,64 @@ +//----------------------------------------------------------------------- +// +// Copyright (C) 2009-2015 Typesafe Inc. +// Copyright (C) 2013-2015 Akka.NET project +// +//----------------------------------------------------------------------- + +using Akka.Actor; +using Akka.DI.StructureMap; +using Akka.Routing; +using StructureMap; +using System; +using System.Threading.Tasks; +using Akka.DI.Core; +using StructureMap.Pipeline; + +namespace BasicStructureMapUses +{ + class Program + { + static void Main(string[] args) + { + WithHashPool(); + } + + private static void WithHashPool() + { + IContainer container = new Container(cfg => + { + cfg.For().Use().Named("TypedWorker").LifecycleIs(); + }); + + using (var system = ActorSystem.Create("MySystem")) + { + var propsResolver = + new StructureMapDependencyResolver(container, system); + + var router = system.ActorOf(system.DI().Props().WithRouter(FromConfig.Instance), "router1"); + + Task.Delay(500).Wait(); + Console.WriteLine("Sending Messages"); + for (var i = 0; i < 5; i++) + { + for (var j = 0; j < 7; j++) + { + + var msg = new TypedActorMessage { Id = j, Name = Guid.NewGuid().ToString() }; + var ms = new AnotherMessage { Id = j, Name = msg.Name }; + + var envelope = new ConsistentHashableEnvelope(ms, msg.Id); + + router.Tell(msg); + router.Tell(envelope); + + } + } + Console.WriteLine("Hit Enter to exit"); + Console.ReadLine(); + } + + + } + } +} diff --git a/src/BasicStructureMapUses/Properties/AssemblyInfo.cs b/src/BasicStructureMapUses/Properties/AssemblyInfo.cs new file mode 100644 index 00000000000..84a3b2e07f7 --- /dev/null +++ b/src/BasicStructureMapUses/Properties/AssemblyInfo.cs @@ -0,0 +1,42 @@ +//----------------------------------------------------------------------- +// +// Copyright (C) 2009-2015 Typesafe Inc. +// Copyright (C) 2013-2015 Akka.NET project +// +//----------------------------------------------------------------------- + +using System.Reflection; +using System.Runtime.InteropServices; + +// General Information about an assembly is controlled through the following +// set of attributes. Change these attribute values to modify the information +// associated with an assembly. +[assembly: AssemblyTitle("BasicStructureMapUses")] +[assembly: AssemblyDescription("")] +[assembly: AssemblyConfiguration("")] +[assembly: AssemblyCompany("")] +[assembly: AssemblyProduct("BasicStructureMapUses")] +[assembly: AssemblyCopyright("Copyright © 2015")] +[assembly: AssemblyTrademark("")] +[assembly: AssemblyCulture("")] + +// Setting ComVisible to false makes the types in this assembly not visible +// to COM components. If you need to access a type in this assembly from +// COM, set the ComVisible attribute to true on that type. +[assembly: ComVisible(false)] + +// The following GUID is for the ID of the typelib if this project is exposed to COM +[assembly: Guid("749952c0-10fe-49f7-9982-0cf1b05f6857")] + +// Version information for an assembly consists of the following four values: +// +// Major Version +// Minor Version +// Build Number +// Revision +// +// You can specify all the values or you can default the Build and Revision Numbers +// by using the '*' as shown below: +// [assembly: AssemblyVersion("1.0.*")] +[assembly: AssemblyVersion("1.0.0.0")] +[assembly: AssemblyFileVersion("1.0.0.0")] diff --git a/src/BasicStructureMapUses/packages.config b/src/BasicStructureMapUses/packages.config new file mode 100644 index 00000000000..0374f33be6e --- /dev/null +++ b/src/BasicStructureMapUses/packages.config @@ -0,0 +1,4 @@ + + + + \ No newline at end of file diff --git a/src/contrib/dependencyInjection/Akka.DI.StructureMap/Akka.DI.StructureMap.csproj b/src/contrib/dependencyInjection/Akka.DI.StructureMap/Akka.DI.StructureMap.csproj new file mode 100644 index 00000000000..6c4863922dc --- /dev/null +++ b/src/contrib/dependencyInjection/Akka.DI.StructureMap/Akka.DI.StructureMap.csproj @@ -0,0 +1,84 @@ + + + + + Debug + AnyCPU + {34E5B4E5-0ED0-4A27-B53A-BFD812D45E1E} + Library + Properties + Akka.DI.StructureMap + Akka.DI.StructureMap + v4.5 + 512 + ..\..\..\ + true + + + true + full + false + bin\Debug\ + DEBUG;TRACE + prompt + 4 + + + pdbonly + true + bin\Release\ + TRACE + prompt + 4 + + + + ..\..\..\packages\structuremap.3.1.5.154\lib\net40\StructureMap.dll + True + + + ..\..\..\packages\structuremap.3.1.5.154\lib\net40\StructureMap.Net4.dll + True + + + + + + + + Properties\SharedAssemblyInfo.cs + + + + + + + {5deddf90-37f0-48d3-a0b0-a5cbd8a7e377} + Akka + + + {fdf09d18-b68e-4b95-b1f6-b89d9c6c3ae9} + Akka.DI.Core + + + + + + + + + + + + This project references NuGet package(s) that are missing on this computer. Enable NuGet Package Restore to download them. For more information, see http://go.microsoft.com/fwlink/?LinkID=322105. The missing file is {0}. + + + + + \ No newline at end of file diff --git a/src/contrib/dependencyInjection/Akka.DI.StructureMap/Akka.DI.StructureMap.nuspec b/src/contrib/dependencyInjection/Akka.DI.StructureMap/Akka.DI.StructureMap.nuspec new file mode 100644 index 00000000000..c654d073b8f --- /dev/null +++ b/src/contrib/dependencyInjection/Akka.DI.StructureMap/Akka.DI.StructureMap.nuspec @@ -0,0 +1,20 @@ + + + + @project@ + @project@@title@ + @build.number@ + @authors@ + @authors@ + StructureMap Dependency Injection (DI) support for Akka.NET + https://github.com/akkadotnet/akka.net/blob/master/LICENSE + https://github.com/akkadotnet/akka.net + http://getakka.net/images/AkkaNetLogo.Normal.png + false + @releaseNotes@ + @copyright@ + @tags@ DI StructureMap + @dependencies@ + @references@ + + diff --git a/src/contrib/dependencyInjection/Akka.DI.StructureMap/Properties/AssemblyInfo.cs b/src/contrib/dependencyInjection/Akka.DI.StructureMap/Properties/AssemblyInfo.cs new file mode 100644 index 00000000000..749e0059e3d --- /dev/null +++ b/src/contrib/dependencyInjection/Akka.DI.StructureMap/Properties/AssemblyInfo.cs @@ -0,0 +1,26 @@ +//----------------------------------------------------------------------- +// +// Copyright (C) 2009-2015 Typesafe Inc. +// Copyright (C) 2013-2015 Akka.NET project +// +//----------------------------------------------------------------------- + +using System.Reflection; +using System.Runtime.InteropServices; + +// General Information about an assembly is controlled through the following +// set of attributes. Change these attribute values to modify the information +// associated with an assembly. +[assembly: AssemblyTitle("Akka.DI.StructureMap")] +[assembly: AssemblyDescription("")] +[assembly: AssemblyConfiguration("")] +[assembly: AssemblyProduct("Akka.DI.StructureMap")] +[assembly: AssemblyCulture("")] + +// Setting ComVisible to false makes the types in this assembly not visible +// to COM components. If you need to access a type in this assembly from +// COM, set the ComVisible attribute to true on that type. +[assembly: ComVisible(false)] + +// The following GUID is for the ID of the typelib if this project is exposed to COM +[assembly: Guid("05e76709-0e94-4dab-8e28-957cb82118d8")] \ No newline at end of file diff --git a/src/contrib/dependencyInjection/Akka.DI.StructureMap/Readme.md b/src/contrib/dependencyInjection/Akka.DI.StructureMap/Readme.md new file mode 100644 index 00000000000..b0824171d37 --- /dev/null +++ b/src/contrib/dependencyInjection/Akka.DI.StructureMap/Readme.md @@ -0,0 +1,97 @@ +#Akka.DI.StructureMap + +**Actor Producer Extension** backed by the [StructureMap](https://github.com/StructureMap/StructureMap) Dependency Injection Container for the [Akka.NET](https://github.com/akkadotnet/akka.net) framework. + +#What is it? + +**Akka.DI.StructureMap** is an **ActorSystem extension** for the Akka.NET framework that provides an alternative to the basic capabilities of [Props](http://akkadotnet.github.io/wiki/Props) when you have Actors with multiple dependencies. + +If StructureMap is your IoC container of choice and your actors have dependencies that make using the factory method provided by Props prohibitive and code maintenance is an important concern then this is the extension for you. + +#How to you use it? + +The best way to understand how to use it is by example. If you are already considering this extension then we will assume that you know how how to use the [StructureMap](https://github.com/StructureMap/StructureMap) container. This example is demonstrating a system using [ConsistentHashing](http://getakka.net/docs/working-with-actors/Routers#consistenthashing) routing along with this extension. + +Start by creating your StructureMap ```Container```, registering your actors and dependencies. + +```csharp +// Setup StructureMap +IContainer container = new Container(cfg => + { + cfg.For().Use(); + cfg.For().Use(); + }); +``` + +Next you have to create your ```ActorSystem``` and inject that system reference along with the container reference into a new instance of the ```StructureMapDependencyResolver```. + +```csharp +// Create the ActorSystem +using (var system = ActorSystem.Create("MySystem")) +{ + // Create the dependency resolver + IDependencyResolver resolver = new StructureMapDependencyResolver(container, system); + + // we'll fill in the rest in the following steps +} +``` + +To register the actors with the system use method ```Akka.Actor.Props Create()``` of the ```IDependencyResolver``` interface implemented by the ```StructureMapDependencyResolver```. + +```csharp +// Register the actors with the system +system.ActorOf(resolver.Create(), "Worker1"); +system.ActorOf(resolver.Create(), "Worker2"); +``` + +Finally create your router, message and send the message to the router. + +```csharp +// Create the router +IActorRef router = system.ActorOf(Props.Empty.WithRouter(new ConsistentHashingGroup(config))); + +// Create the message to send +TypedActorMessage message = new TypedActorMessage +{ + Id = 1, + Name = Guid.NewGuid().ToString() +}; + +// Send the message to the router +router.Tell(message); +``` + +The resulting code should look similar to the the following: + +```csharp +// Setup StructureMap +IContainer container = new Container(cfg => + { + cfg.For().Use(); + cfg.For().Use(); + }); + +// Create the ActorSystem +using (var system = ActorSystem.Create("MySystem")) +{ + // Create the dependency resolver + IDependencyResolver resolver = new StructureMapDependencyResolver(container, system); + + // Register the actors with the system + system.ActorOf(resolver.Create(), "Worker1"); + system.ActorOf(resolver.Create(), "Worker2"); + + // Create the router + IActorRef router = system.ActorOf(Props.Empty.WithRouter(new ConsistentHashingGroup(config))); + + // Create the message to send + TypedActorMessage message = new TypedActorMessage + { + Id = 1, + Name = Guid.NewGuid().ToString() + }; + + // Send the message to the router + router.Tell(message); +} +``` diff --git a/src/contrib/dependencyInjection/Akka.DI.StructureMap/StructureMapDependencyResolver.cs b/src/contrib/dependencyInjection/Akka.DI.StructureMap/StructureMapDependencyResolver.cs new file mode 100644 index 00000000000..5b8798e7721 --- /dev/null +++ b/src/contrib/dependencyInjection/Akka.DI.StructureMap/StructureMapDependencyResolver.cs @@ -0,0 +1,103 @@ +//----------------------------------------------------------------------- +// +// Copyright (C) 2009-2015 Typesafe Inc. +// Copyright (C) 2013-2015 Akka.NET project +// +//----------------------------------------------------------------------- + +using System; +using System.Collections.Concurrent; +using System.Linq; +using System.Runtime.CompilerServices; +using Akka.Actor; +using Akka.DI.Core; +using StructureMap; + +namespace Akka.DI.StructureMap +{ + /// + /// Provides services to the extension system + /// used to create actors using the AutoFac IoC container. + /// + public class StructureMapDependencyResolver : IDependencyResolver + { + private IContainer container; + private ConcurrentDictionary typeCache; + private ActorSystem system; + private ConditionalWeakTable references; + + /// + /// Initializes a new instance of the class. + /// + /// The container used to resolve references + /// The actor system to plug into + /// + /// Either the or the was null. + /// + public StructureMapDependencyResolver(IContainer container, ActorSystem system) + { + if (system == null) throw new ArgumentNullException("system"); + if (container == null) throw new ArgumentNullException("container"); + this.container = container; + typeCache = new ConcurrentDictionary(StringComparer.InvariantCultureIgnoreCase); + this.system = system; + this.system.AddDependencyResolver(this); + this.references = new ConditionalWeakTable(); + } + + /// + /// Retrieves an actor's type with the specified name + /// + /// The name of the actor to retrieve + /// The type with the specified actor name + public Type GetType(string actorName) + { + typeCache.TryAdd(actorName, actorName.GetTypeValue()); + + return typeCache[actorName]; + + } + + /// + /// Creates a delegate factory used to create actors based on their type + /// + /// The type of actor that the factory builds + /// A delegate factory used to create actors + public Func CreateActorFactory(Type actorType) + { + return () => + { + var nestedContainer = container.GetNestedContainer(); + var actor = (ActorBase)nestedContainer.GetInstance(actorType); + references.Add(actor, nestedContainer); + return actor; + }; + } + + /// + /// Used to register the configuration for an actor of the specified type + /// + /// The type of actor the configuration is based + /// The configuration object for the given actor type + public Props Create() where TActor : ActorBase + { + return system.GetExtension().Props(typeof(TActor)); + } + + /// + /// Signals the DI container to release it's reference to the actor. + /// HERE + /// + /// The actor to remove from the container + public void Release(ActorBase actor) + { + IContainer nestedContainer; + + if (references.TryGetValue(actor, out nestedContainer)) + { + container.Dispose(); + references.Remove(actor); + } + } + } +} diff --git a/src/contrib/dependencyInjection/Akka.DI.StructureMap/packages.config b/src/contrib/dependencyInjection/Akka.DI.StructureMap/packages.config new file mode 100644 index 00000000000..0374f33be6e --- /dev/null +++ b/src/contrib/dependencyInjection/Akka.DI.StructureMap/packages.config @@ -0,0 +1,4 @@ + + + + \ No newline at end of file From 6d59f143eedcc236ae98265bce4b9b24dd9bb964 Mon Sep 17 00:00:00 2001 From: Aaron Stannard Date: Tue, 2 Jun 2015 14:30:45 -0700 Subject: [PATCH 60/66] close #1025 --- src/core/Akka.Remote/RemoteActorRefProvider.cs | 18 ------------------ 1 file changed, 18 deletions(-) diff --git a/src/core/Akka.Remote/RemoteActorRefProvider.cs b/src/core/Akka.Remote/RemoteActorRefProvider.cs index a33684658c7..d166c3b9c06 100644 --- a/src/core/Akka.Remote/RemoteActorRefProvider.cs +++ b/src/core/Akka.Remote/RemoteActorRefProvider.cs @@ -206,7 +206,6 @@ public IInternalActorRef ActorOf(ActorSystemImpl system, Props props, IInternalA path.Elements.ToArray()). WithUid(path.Uid); var remoteRef = new RemoteActorRef(Transport, localAddress, rpath, supervisor, props, deployment); - remoteRef.Start(); return remoteRef; } catch (Exception ex) @@ -255,23 +254,6 @@ public IActorRef RootGuardianAt(Address address) Deploy.None); } - private IInternalActorRef RemoteActorOf(ActorSystemImpl system, Props props, IInternalActorRef supervisor, - ActorPath path) - { - var scope = (RemoteScope)props.Deploy.Scope; - var d = props.Deploy; - var addr = scope.Address; - - var localAddress = Transport.LocalAddressForRemote(addr); - - var rpath = (new RootActorPath(addr) / "remote" / localAddress.Protocol / localAddress.HostPort() / - path.Elements.ToArray()). - WithUid(path.Uid); - var remoteRef = new RemoteActorRef(Transport, localAddress, rpath, supervisor, props, d); - remoteRef.Start(); - return remoteRef; - } - private IInternalActorRef LocalActorOf(ActorSystemImpl system, Props props, IInternalActorRef supervisor, ActorPath path, bool systemService, Deploy deploy, bool lookupDeploy, bool async) { From b1f85d3fe84fe377c03b3d0ac3f8602df88e094d Mon Sep 17 00:00:00 2001 From: Aaron Stannard Date: Tue, 2 Jun 2015 15:57:01 -0700 Subject: [PATCH 61/66] revert-interface-change --- src/core/Akka/Event/ILoggingAdapter.cs | 6 ++++++ src/core/Akka/Event/LoggingAdapterBase.cs | 5 +++++ 2 files changed, 11 insertions(+) diff --git a/src/core/Akka/Event/ILoggingAdapter.cs b/src/core/Akka/Event/ILoggingAdapter.cs index 1493f072f5f..43474ab89f0 100644 --- a/src/core/Akka/Event/ILoggingAdapter.cs +++ b/src/core/Akka/Event/ILoggingAdapter.cs @@ -39,6 +39,12 @@ public interface ILoggingAdapter /// The arguments. void Info(string format, params object[] args); + /// Logs a message with the Warning level. + /// The format. + /// The arguments. + [Obsolete("Use Warning instead!")] + void Warn(string format, params object[] args); + /// Logs a message with the Warning level. /// The format. /// The arguments. diff --git a/src/core/Akka/Event/LoggingAdapterBase.cs b/src/core/Akka/Event/LoggingAdapterBase.cs index bc46e1049ed..9f29f0f6f94 100644 --- a/src/core/Akka/Event/LoggingAdapterBase.cs +++ b/src/core/Akka/Event/LoggingAdapterBase.cs @@ -99,6 +99,11 @@ public void Debug(string format, params object[] args) } } + public void Warn(string format, params object[] args) + { + Warning(format, args); + } + public void Warning(string format, params object[] args) { if (!IsWarningEnabled) From e1d37a6cf4cd1e0a6dcc5907d5690370ac6d6b9b Mon Sep 17 00:00:00 2001 From: Aaron Stannard Date: Tue, 2 Jun 2015 16:23:58 -0700 Subject: [PATCH 62/66] close #1004 - Akka.Persistence now added as dependency to all Akka.Persistence packages --- build.fsx | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/build.fsx b/build.fsx index 1c17c8d3690..24127f55e88 100644 --- a/build.fsx +++ b/build.fsx @@ -299,8 +299,7 @@ module Nuget = match project with | "Akka" -> [] | "Akka.Cluster" -> ["Akka.Remote", release.NugetVersion] - | "Akka.Persistence.TestKit" -> ["Akka.Persistence", release.NugetVersion] - | "Akka.Persistence.FSharp" -> ["Akka.Persistence", release.NugetVersion] + | persistence when (persistence.StartsWith("Akka.Persistence.")) -> ["Akka.Persistence", release.NugetVersion] | di when (di.StartsWith("Akka.DI.") && not (di.EndsWith("Core"))) -> ["Akka.DI.Core", release.NugetVersion] | testkit when testkit.StartsWith("Akka.TestKit.") -> ["Akka.TestKit", release.NugetVersion] | _ -> ["Akka", release.NugetVersion] From adc5db0eb072ce5a8851ab8f87e2b6140486584f Mon Sep 17 00:00:00 2001 From: Aaron Stannard Date: Tue, 2 Jun 2015 16:48:57 -0700 Subject: [PATCH 63/66] added release notes for v1.0.2 release --- RELEASE_NOTES.md | 60 +++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 59 insertions(+), 1 deletion(-) diff --git a/RELEASE_NOTES.md b/RELEASE_NOTES.md index 5e09caa130d..702445b551f 100644 --- a/RELEASE_NOTES.md +++ b/RELEASE_NOTES.md @@ -1,4 +1,62 @@ -#### 1.0.2 May 1 2015 +#### 1.0.2 June 2 2015 +**Bugfix release for Akka.NET v1.0.1.** + +Fixes & Changes - Akka.NET Core +* [Routers seem ignore supervision strategy](https://github.com/akkadotnet/akka.net/issues/996) +* [Replaced DateTime.Now with DateTime.UtcNow/MonotonicClock](https://github.com/akkadotnet/akka.net/pull/1009) +* [DedicatedThreadScheduler](https://github.com/akkadotnet/akka.net/pull/1002) +* [Add ability to specify scheduler implementation in configuration](https://github.com/akkadotnet/akka.net/pull/994) +* [Added generic extensions to EventStream subscribe/unsubscribe.](https://github.com/akkadotnet/akka.net/pull/990) +* [Convert null to NoSender.](https://github.com/akkadotnet/akka.net/pull/993) +* [Supervisor strategy bad timeouts](https://github.com/akkadotnet/akka.net/pull/986) +* [Updated Pigeon.conf throughput values](https://github.com/akkadotnet/akka.net/pull/980) +* [Add PipeTo for non-generic Tasks for exception handling](https://github.com/akkadotnet/akka.net/pull/978) + +Fixes & Changes - Akka.NET Dependency Injection +* [Added Extensions methods to ActorSystem and ActorContext to make DI more accessible](https://github.com/akkadotnet/akka.net/pull/966) +* [DIActorProducer fixes](https://github.com/akkadotnet/akka.net/pull/961) +* [closes akkadotnet/akka.net#1020 structuremap dependency injection](https://github.com/akkadotnet/akka.net/pull/1021) + +Fixes & Changes - Akka.Remote and Akka.Cluster +* [Fixing up cluster rejoin behavior](https://github.com/akkadotnet/akka.net/pull/962) +* [Added dispatcher fixes for remote and cluster ](https://github.com/akkadotnet/akka.net/pull/983) +* [Fixes to ClusterRouterGroup](https://github.com/akkadotnet/akka.net/pull/953) +* [Two actors are created by remote deploy using Props.WithDeploy](https://github.com/akkadotnet/akka.net/issues/1025) + +Fixes & Changes - Akka.Persistence +* [Renamed GuaranteedDelivery classes to AtLeastOnceDelivery](https://github.com/akkadotnet/akka.net/pull/984) +* [Changes in Akka.Persistence SQL backend](https://github.com/akkadotnet/akka.net/pull/963) +* [PostgreSQL persistence plugin for both event journal and snapshot store](https://github.com/akkadotnet/akka.net/pull/971) +* [Cassandra persistence plugin](https://github.com/akkadotnet/akka.net/pull/995) + +**New Features:** + +**Akka.TestKit.XUnit2** +Akka.NET now has support for [XUnit 2.0](http://xunit.github.io/)! You can install Akka.TestKit.XUnit2 via the NuGet commandline: + +``` +PM> Install-Package Akka.TestKit.XUnit2 +``` + +**Akka.Persistence.PostgreSql** and **Akka.Persistence.Cassandra** +Akka.Persistence now has two additional concrete implementations for PostgreSQL and Cassandra! You can install either of the packages using the following commandline: + +PostgreSQL +``` +PM> Install-Package Akka.Persistence.PostgreSql +``` + +Cassandra +``` +PM> Install-Package Akka.Persistence.Cassandra +``` + +**Akka.DI.StructureMap** +Akka.NET's dependency injection system now supports [StructureMap](http://structuremap.github.io/)! You can install Akka.DI.StructureMap via the NuGet commandline: + +``` +PM> Install-Package Akka.DI.StructureMap +``` #### 1.0.1 Apr 28 2015 From 2c4a433210d6552f4a7a7d1eeaec1eda7f96ddd5 Mon Sep 17 00:00:00 2001 From: Aaron Stannard Date: Tue, 2 Jun 2015 16:51:19 -0700 Subject: [PATCH 64/66] added link to Akka.Persistence.PostgreSql and Akka.Persistence.Cassandra docs --- RELEASE_NOTES.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/RELEASE_NOTES.md b/RELEASE_NOTES.md index 702445b551f..00e449a730f 100644 --- a/RELEASE_NOTES.md +++ b/RELEASE_NOTES.md @@ -41,12 +41,12 @@ PM> Install-Package Akka.TestKit.XUnit2 **Akka.Persistence.PostgreSql** and **Akka.Persistence.Cassandra** Akka.Persistence now has two additional concrete implementations for PostgreSQL and Cassandra! You can install either of the packages using the following commandline: -PostgreSQL +[Akka.Persistence.PostgreSql Configuration Docs](https://github.com/akkadotnet/akka.net/tree/dev/src/contrib/persistence/Akka.Persistence.PostgreSql) ``` PM> Install-Package Akka.Persistence.PostgreSql ``` -Cassandra +[Akka.Persistence.Cassandra Configuration Docs](https://github.com/akkadotnet/akka.net/tree/dev/src/contrib/persistence/Akka.Persistence.Cassandra) ``` PM> Install-Package Akka.Persistence.Cassandra ``` From 4a1784944d5eea9d073b2b7f1eade507c36b8532 Mon Sep 17 00:00:00 2001 From: Aaron Stannard Date: Tue, 2 Jun 2015 17:06:24 -0700 Subject: [PATCH 65/66] added fix pre-release nuget package #s for Akka.Cluster, Akka.Persistence --- build.fsx | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/build.fsx b/build.fsx index 1c17c8d3690..91dd6b71693 100644 --- a/build.fsx +++ b/build.fsx @@ -40,6 +40,7 @@ let envBuildNumber = System.Environment.GetEnvironmentVariable("BUILD_NUMBER") let buildNumber = if String.IsNullOrWhiteSpace(envBuildNumber) then "0" else envBuildNumber let version = parsedRelease.AssemblyVersion + "." + buildNumber +let preReleaseVersion = version + "-beta" let isUnstableDocs = hasBuildParam "unstable" let isPreRelease = hasBuildParam "nugetprerelease" @@ -299,8 +300,8 @@ module Nuget = match project with | "Akka" -> [] | "Akka.Cluster" -> ["Akka.Remote", release.NugetVersion] - | "Akka.Persistence.TestKit" -> ["Akka.Persistence", release.NugetVersion] - | "Akka.Persistence.FSharp" -> ["Akka.Persistence", release.NugetVersion] + | "Akka.Persistence.TestKit" -> ["Akka.Persistence", preReleaseVersion] + | "Akka.Persistence.FSharp" -> ["Akka.Persistence", preReleaseVersion] | di when (di.StartsWith("Akka.DI.") && not (di.EndsWith("Core"))) -> ["Akka.DI.Core", release.NugetVersion] | testkit when testkit.StartsWith("Akka.TestKit.") -> ["Akka.TestKit", release.NugetVersion] | _ -> ["Akka", release.NugetVersion] @@ -308,8 +309,8 @@ module Nuget = // used to add -pre suffix to pre-release packages let getProjectVersion project = match project with - | "Akka.Cluster" -> release.NugetVersion - | persistence when persistence.StartsWith("Akka.Persistence") -> release.NugetVersion + | "Akka.Cluster" -> preReleaseVersion + | persistence when persistence.StartsWith("Akka.Persistence") -> preReleaseVersion | _ -> release.NugetVersion open Nuget From cfaae977312ae424bf1d16c3e35f0facbe144ee5 Mon Sep 17 00:00:00 2001 From: Aaron Stannard Date: Tue, 2 Jun 2015 17:57:21 -0700 Subject: [PATCH 66/66] fixed issue with build.fsx --- build.fsx | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/build.fsx b/build.fsx index 91dd6b71693..0783f8f6c3e 100644 --- a/build.fsx +++ b/build.fsx @@ -300,8 +300,7 @@ module Nuget = match project with | "Akka" -> [] | "Akka.Cluster" -> ["Akka.Remote", release.NugetVersion] - | "Akka.Persistence.TestKit" -> ["Akka.Persistence", preReleaseVersion] - | "Akka.Persistence.FSharp" -> ["Akka.Persistence", preReleaseVersion] + | persistence when (persistence.StartsWith("Akka.Persistence.")) -> ["Akka.Persistence", preReleaseVersion] | di when (di.StartsWith("Akka.DI.") && not (di.EndsWith("Core"))) -> ["Akka.DI.Core", release.NugetVersion] | testkit when testkit.StartsWith("Akka.TestKit.") -> ["Akka.TestKit", release.NugetVersion] | _ -> ["Akka", release.NugetVersion]