diff --git a/src/CommunityToolkit.Datasync.Client/Offline/OfflineDbContext.cs b/src/CommunityToolkit.Datasync.Client/Offline/OfflineDbContext.cs index 6e9b7cc4..a217ea03 100644 --- a/src/CommunityToolkit.Datasync.Client/Offline/OfflineDbContext.cs +++ b/src/CommunityToolkit.Datasync.Client/Offline/OfflineDbContext.cs @@ -99,6 +99,12 @@ public abstract partial class OfflineDbContext : DbContext /// internal OperationsQueueManager QueueManager { get; } + /// + /// An event delegate that allows the app to monitor synchronization events. + /// + /// This event can be called from background threads. + public event EventHandler? SynchronizationProgress; + /// /// Initializes a new instance of the class. The /// method will be called to @@ -561,6 +567,15 @@ public async Task SaveChangesAsync(bool acceptAllChangesOnSuccess, bool add return await base.SaveChangesAsync(acceptAllChangesOnSuccess, cancellationToken).ConfigureAwait(false); } + /// + /// Sends a synchronization event to the consumers. + /// + /// The event arguments. + internal void SendSynchronizationEvent(SynchronizationEventArgs eventArgs) + { + SynchronizationProgress?.Invoke(this, eventArgs); + } + #region IDisposable /// /// Ensure that the context has not been disposed. diff --git a/src/CommunityToolkit.Datasync.Client/Offline/Operations/PullOperationManager.cs b/src/CommunityToolkit.Datasync.Client/Offline/Operations/PullOperationManager.cs index 22578310..8341ec6b 100644 --- a/src/CommunityToolkit.Datasync.Client/Offline/Operations/PullOperationManager.cs +++ b/src/CommunityToolkit.Datasync.Client/Offline/Operations/PullOperationManager.cs @@ -14,6 +14,7 @@ using System.Reflection; using System.Text.Json; using System.Text.Json.Serialization; +using static CommunityToolkit.Datasync.Client.Offline.Operations.PullOperationManager; namespace CommunityToolkit.Datasync.Client.Offline.Operations; @@ -53,61 +54,87 @@ public async Task ExecuteAsync(IEnumerable requests, Pu QueueHandler databaseUpdateQueue = new(1, async pullResponse => { - DateTimeOffset lastSynchronization = await DeltaTokenStore.GetDeltaTokenAsync(pullResponse.QueryId, cancellationToken).ConfigureAwait(false); - foreach (object item in pullResponse.Items) + if (pullResponse.Items.Any()) { - EntityMetadata metadata = EntityResolver.GetEntityMetadata(item, pullResponse.EntityType); - object? originalEntity = await context.FindAsync(pullResponse.EntityType, [metadata.Id], cancellationToken).ConfigureAwait(false); - - if (originalEntity is null && !metadata.Deleted) - { - _ = context.Add(item); - result.IncrementAdditions(); - } - else if (originalEntity is not null && metadata.Deleted) + DateTimeOffset lastSynchronization = await DeltaTokenStore.GetDeltaTokenAsync(pullResponse.QueryId, cancellationToken).ConfigureAwait(false); + foreach (object item in pullResponse.Items) { - _ = context.Remove(originalEntity); - result.IncrementDeletions(); - } - else if (originalEntity is not null && !metadata.Deleted) - { - // Gather properties marked with [JsonIgnore] - HashSet ignoredProps = pullResponse.EntityType - .GetProperties(BindingFlags.Public | BindingFlags.Instance) - .Where(p => p.IsDefined(typeof(JsonIgnoreAttribute), inherit: true)) - .Select(p => p.Name) - .ToHashSet(); - - EntityEntry originalEntry = context.Entry(originalEntity); - EntityEntry newEntry = context.Entry(item); - - // Only copy properties that are not marked with [JsonIgnore] - foreach (IProperty property in originalEntry.Metadata.GetProperties()) + EntityMetadata metadata = EntityResolver.GetEntityMetadata(item, pullResponse.EntityType); + object? originalEntity = await context.FindAsync(pullResponse.EntityType, [metadata.Id], cancellationToken).ConfigureAwait(false); + + if (originalEntity is null && !metadata.Deleted) + { + _ = context.Add(item); + result.IncrementAdditions(); + } + else if (originalEntity is not null && metadata.Deleted) + { + _ = context.Remove(originalEntity); + result.IncrementDeletions(); + } + else if (originalEntity is not null && !metadata.Deleted) { - if (!ignoredProps.Contains(property.Name)) + // Gather properties marked with [JsonIgnore] + HashSet ignoredProps = pullResponse.EntityType + .GetProperties(BindingFlags.Public | BindingFlags.Instance) + .Where(p => p.IsDefined(typeof(JsonIgnoreAttribute), inherit: true)) + .Select(p => p.Name) + .ToHashSet(); + + EntityEntry originalEntry = context.Entry(originalEntity); + EntityEntry newEntry = context.Entry(item); + + // Only copy properties that are not marked with [JsonIgnore] + foreach (IProperty property in originalEntry.Metadata.GetProperties()) { - originalEntry.Property(property.Name).CurrentValue = newEntry.Property(property.Name).CurrentValue; + if (!ignoredProps.Contains(property.Name)) + { + originalEntry.Property(property.Name).CurrentValue = newEntry.Property(property.Name).CurrentValue; + } } + + result.IncrementReplacements(); } - result.IncrementReplacements(); + if (metadata.UpdatedAt > lastSynchronization) + { + lastSynchronization = metadata.UpdatedAt.Value; + bool isAdded = await DeltaTokenStore.SetDeltaTokenAsync(pullResponse.QueryId, metadata.UpdatedAt.Value, cancellationToken).ConfigureAwait(false); + if (isAdded) + { + // Sqlite oddity - you can't add then update; it changes the change type to UPDATE, which then fails. + _ = await context.SaveChangesAsync(true, false, cancellationToken).ConfigureAwait(false); + } + } } - if (metadata.UpdatedAt > lastSynchronization) + if (pullOptions.SaveAfterEveryServiceRequest) { - lastSynchronization = metadata.UpdatedAt.Value; - bool isAdded = await DeltaTokenStore.SetDeltaTokenAsync(pullResponse.QueryId, metadata.UpdatedAt.Value, cancellationToken).ConfigureAwait(false); - if (isAdded) - { - // Sqlite oddity - you can't add then update; it changes the change type to UPDATE, which then fails. - _ = await context.SaveChangesAsync(true, false, cancellationToken).ConfigureAwait(false); - } + _ = await context.SaveChangesAsync(true, false, cancellationToken).ConfigureAwait(false); } + + context.SendSynchronizationEvent(new SynchronizationEventArgs() + { + EventType = SynchronizationEventType.ItemsCommitted, + EntityType = pullResponse.EntityType, + ItemsProcessed = pullResponse.TotalItemsProcessed, + ItemsTotal = pullResponse.TotalRequestItems, + QueryId = pullResponse.QueryId + }); } - if (pullOptions.SaveAfterEveryServiceRequest) + if (pullResponse.Completed) { - _ = await context.SaveChangesAsync(true, false, cancellationToken).ConfigureAwait(false); + context.SendSynchronizationEvent(new SynchronizationEventArgs() + { + EventType = SynchronizationEventType.PullEnded, + EntityType = pullResponse.EntityType, + ItemsProcessed = pullResponse.TotalItemsProcessed, + ItemsTotal = pullResponse.TotalRequestItems, + QueryId = pullResponse.QueryId, + Exception = pullResponse.Exception, + ServiceResponse = pullResponse.Exception is DatasyncPullException ex ? ex.ServiceResponse : null + }); } }); @@ -116,14 +143,34 @@ public async Task ExecuteAsync(IEnumerable requests, Pu Uri endpoint = ExecutableOperation.MakeAbsoluteUri(pullRequest.HttpClient.BaseAddress, pullRequest.Endpoint); Uri requestUri = new UriBuilder(endpoint) { Query = pullRequest.QueryDescription.ToODataQueryString() }.Uri; Type pageType = typeof(Page<>).MakeGenericType(pullRequest.EntityType); + long itemsProcessed = 0; + long totalCount = 0; try { bool completed = false; + // Signal we started the pull operation. + context.SendSynchronizationEvent(new SynchronizationEventArgs() + { + EventType = SynchronizationEventType.PullStarted, + EntityType = pullRequest.EntityType, + QueryId = pullRequest.QueryId + }); do { Page page = await GetPageAsync(pullRequest.HttpClient, requestUri, pageType, cancellationToken).ConfigureAwait(false); - databaseUpdateQueue.Enqueue(new PullResponse(pullRequest.EntityType, pullRequest.QueryId, page.Items)); + itemsProcessed += page.Items.Count(); + totalCount = page.Count ?? totalCount; + + context.SendSynchronizationEvent(new SynchronizationEventArgs() + { + EventType = SynchronizationEventType.ItemsFetched, + EntityType = pullRequest.EntityType, + ItemsProcessed = itemsProcessed, + ItemsTotal = page.Count ?? 0, + QueryId = pullRequest.QueryId + }); + if (!string.IsNullOrEmpty(page.NextLink)) { requestUri = new UriBuilder(endpoint) { Query = page.NextLink }.Uri; @@ -132,12 +179,15 @@ public async Task ExecuteAsync(IEnumerable requests, Pu { completed = true; } + + databaseUpdateQueue.Enqueue(new PullResponse(pullRequest.EntityType, pullRequest.QueryId, page.Items, totalCount, itemsProcessed, completed)); } while (!completed); } catch (DatasyncPullException ex) { result.AddFailedRequest(requestUri, ex.ServiceResponse); + databaseUpdateQueue.Enqueue(new PullResponse(pullRequest.EntityType, pullRequest.QueryId, [], totalCount, itemsProcessed, true, ex)); } }); @@ -173,6 +223,8 @@ public async Task ExecuteAsync(IEnumerable requests, Pu /// Thrown on error internal async Task> GetPageAsync(HttpClient client, Uri requestUri, Type pageType, CancellationToken cancellationToken = default) { + PropertyInfo countPropInfo = pageType.GetProperty("Count") + ?? throw new DatasyncException($"Page type '{pageType.Name}' does not have a 'Count' property"); PropertyInfo itemsPropInfo = pageType.GetProperty("Items") ?? throw new DatasyncException($"Page type '{pageType.Name}' does not have an 'Items' property"); PropertyInfo nextLinkPropInfo = pageType.GetProperty("NextLink") @@ -193,6 +245,7 @@ internal async Task> GetPageAsync(HttpClient client, Uri requestUri return new Page() { + Count = (long?)countPropInfo.GetValue(result), Items = (IEnumerable)itemsPropInfo.GetValue(result)!, NextLink = (string?)nextLinkPropInfo.GetValue(result) }; @@ -237,6 +290,10 @@ internal static QueryDescription PrepareQueryDescription(QueryDescription source /// The type of entity contained within the items. /// The query ID for the request. /// The list of items to process. + /// The total number of items in the current pull request. + /// The total number of items processed, included. + /// If true, indicates that the pull request is completed. + /// Indicates an exception occured during fetching of data [ExcludeFromCodeCoverage] - internal record PullResponse(Type EntityType, string QueryId, IEnumerable Items); + internal record PullResponse(Type EntityType, string QueryId, IEnumerable Items, long TotalRequestItems, long TotalItemsProcessed, bool Completed, Exception? Exception = null); } diff --git a/src/CommunityToolkit.Datasync.Client/Offline/OperationsQueue/OperationsQueueManager.cs b/src/CommunityToolkit.Datasync.Client/Offline/OperationsQueue/OperationsQueueManager.cs index d00c8ac6..ba2211c3 100644 --- a/src/CommunityToolkit.Datasync.Client/Offline/OperationsQueue/OperationsQueueManager.cs +++ b/src/CommunityToolkit.Datasync.Client/Offline/OperationsQueue/OperationsQueueManager.cs @@ -270,16 +270,42 @@ internal async Task PushAsync(IEnumerable entityTypes, PushOpt // Determine the list of queued operations in scope. List queuedOperations = await GetQueuedOperationsAsync(entityTypeNames, cancellationToken).ConfigureAwait(false); + + // Signal we started the push operation. + this._context.SendSynchronizationEvent(new SynchronizationEventArgs() + { + EventType = SynchronizationEventType.PushStarted, + ItemsTotal = queuedOperations.Count + }); + if (queuedOperations.Count == 0) { + // Signal we ended the push operation. + this._context.SendSynchronizationEvent(new SynchronizationEventArgs() + { + EventType = SynchronizationEventType.PushEnded, + ItemsProcessed = 0, + ItemsTotal = 0 + }); return pushResult; } + int nrItemsProcessed = 0; + // Push things in parallel, according to the PushOptions QueueHandler queueHandler = new(pushOptions.ParallelOperations, async operation => { ServiceResponse? response = await PushOperationAsync(operation, cancellationToken).ConfigureAwait(false); pushResult.AddOperationResult(operation, response); + // We can run on multiple threads, so use Interlocked to update the number of items processed. + int newItemsProcessed = Interlocked.Increment(ref nrItemsProcessed); + this._context.SendSynchronizationEvent(new SynchronizationEventArgs() + { + EventType = SynchronizationEventType.PushItem, + ItemsProcessed = newItemsProcessed, + ItemsTotal = queuedOperations.Count, + PushOperation = operation, + }); }); // Enqueue and process all the queued operations in scope @@ -288,6 +314,14 @@ internal async Task PushAsync(IEnumerable entityTypes, PushOpt // Save the changes, this time we don't update the queue. _ = await this._context.SaveChangesAsync(acceptAllChangesOnSuccess: true, addToQueue: false, cancellationToken).ConfigureAwait(false); + + this._context.SendSynchronizationEvent(new SynchronizationEventArgs() + { + EventType = SynchronizationEventType.PushEnded, + ItemsProcessed = nrItemsProcessed, + ItemsTotal = queuedOperations.Count, + }); + return pushResult; } diff --git a/src/CommunityToolkit.Datasync.Client/Offline/SynchronizationEventArgs.cs b/src/CommunityToolkit.Datasync.Client/Offline/SynchronizationEventArgs.cs new file mode 100644 index 00000000..0c8a763c --- /dev/null +++ b/src/CommunityToolkit.Datasync.Client/Offline/SynchronizationEventArgs.cs @@ -0,0 +1,97 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. + +namespace CommunityToolkit.Datasync.Client.Offline; + +/// +/// The list of synchronization events that we support. +/// +public enum SynchronizationEventType +{ + /// + /// Pull for the given entity starts. + /// + /// is not yet known here + PullStarted, + /// + /// Occurs when items have been successfully fetched from the server. + /// + /// This event is raised after a page of entities was successfully fetched from the server, ready to be committed to the data store. + ItemsFetched, + + /// + /// Occurs when items have been successfully committed to the underlying data store. + /// + /// This event is raised after a page of entities was successfully committed to the database + ItemsCommitted, + + /// + /// Pull for the given entity ended. + /// + PullEnded, + /// + /// Push operation started. + /// + PushStarted, + /// + /// An item was pushed to the server + /// + PushItem, + /// + /// Push operation ended. + /// + PushEnded, +} + +/// +/// The event arguments sent when a synchronization event occurs. +/// +public class SynchronizationEventArgs +{ + /// + /// The type of event. + /// + /// + /// On pull events, reporting occurs per entity type. With a start/stop per entity type. + /// On push events, reporting occurs per push request, which may contain multiple entity types. + /// + public required SynchronizationEventType EventType { get; init; } + + /// + /// The EntityType that is being processed. Not used on push events. + /// + public Type? EntityType { get; init; } + + /// + /// When pulling records, the number of items for the given entiry that have been processed in the current pull request. + /// When pushing records, the total number of items that have been processed in the current push request. + /// + public long ItemsProcessed { get; init; } = -1; + + /// + /// When pulling records, the total number of items to pull for the given entity in the current pull request + /// When pushing records, the total number of items that are being pushed in the current push request. + /// + public long ItemsTotal { get; init; } + + /// + /// The query ID that is being processed on pull operations. Not used on push events. + /// + public string? QueryId { get; init; } + + /// + /// If not null on event type , indicates pull failed with this exception. Currently not used on push. + /// + public Exception? Exception { get; init; } + + /// + /// If a occured in during server call processing, this property has more detail on the server response. Currently not used on push, use the returned instead. + /// + public ServiceResponse? ServiceResponse { get; init; } + + /// + /// The operation that was executed. Not used on pull events. + /// + public DatasyncOperation? PushOperation { get; init; } +} diff --git a/src/CommunityToolkit.Datasync.Client/Serialization/DateTimeConverter.cs b/src/CommunityToolkit.Datasync.Client/Serialization/DateTimeConverter.cs index da03fb5e..c5db71d1 100644 --- a/src/CommunityToolkit.Datasync.Client/Serialization/DateTimeConverter.cs +++ b/src/CommunityToolkit.Datasync.Client/Serialization/DateTimeConverter.cs @@ -26,7 +26,14 @@ public override DateTime Read(ref Utf8JsonReader reader, Type typeToConvert, Jso } else { - return DateTime.Parse(token); + // Check if datetime was 'default'. If so do not adjust to local time. + DateTime utc = DateTime.Parse(token, CultureInfo.InvariantCulture, DateTimeStyles.AdjustToUniversal | DateTimeStyles.AssumeUniversal); + if (utc == default) + { + return utc; + } + + return utc.ToLocalTime(); } } diff --git a/src/CommunityToolkit.Datasync.Server.Abstractions/Json/DateTimeConverter.cs b/src/CommunityToolkit.Datasync.Server.Abstractions/Json/DateTimeConverter.cs index a61d41e7..d5d89180 100644 --- a/src/CommunityToolkit.Datasync.Server.Abstractions/Json/DateTimeConverter.cs +++ b/src/CommunityToolkit.Datasync.Server.Abstractions/Json/DateTimeConverter.cs @@ -18,7 +18,16 @@ public class DateTimeConverter : JsonConverter /// public override DateTime Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) - => DateTime.Parse(reader.GetString() ?? string.Empty); + { + // Check if datetime was 'default'. If so do not adjust to local time. + DateTime utc = DateTime.Parse(reader.GetString() ?? "", CultureInfo.InvariantCulture, DateTimeStyles.AdjustToUniversal | DateTimeStyles.AssumeUniversal); + if (utc == default) + { + return utc; + } + + return utc.ToLocalTime(); + } /// public override void Write(Utf8JsonWriter writer, DateTime value, JsonSerializerOptions options) diff --git a/tests/CommunityToolkit.Datasync.Client.Test/Offline/OfflineDbContext_Tests.cs b/tests/CommunityToolkit.Datasync.Client.Test/Offline/OfflineDbContext_Tests.cs index 7e85ad6f..33db3a48 100644 --- a/tests/CommunityToolkit.Datasync.Client.Test/Offline/OfflineDbContext_Tests.cs +++ b/tests/CommunityToolkit.Datasync.Client.Test/Offline/OfflineDbContext_Tests.cs @@ -9,6 +9,7 @@ using CommunityToolkit.Datasync.Client.Test.Offline.Helpers; using CommunityToolkit.Datasync.TestCommon; using CommunityToolkit.Datasync.TestCommon.Databases; +using CommunityToolkit.Datasync.TestCommon.Models; using Microsoft.Data.Sqlite; using Microsoft.EntityFrameworkCore; using System.Net; @@ -1433,6 +1434,182 @@ public async Task DbSet_PushAsync_Throws_OnNonOfflineDbContext() } #endregion + #region SynchronizationProgress + [Fact] + public async Task SynchronizationProgress_Event_Works() + { + Page page1 = CreatePage(5, 20, "$skip=5"); + Page page2 = CreatePage(5, 20, "$skip=10"); + Page page3 = CreatePage(5, 20, "$skip=15"); + Page page4 = CreatePage(5, 20); + + this.context.Handler.AddResponse(HttpStatusCode.OK, page1); + this.context.Handler.AddResponse(HttpStatusCode.OK, page2); + this.context.Handler.AddResponse(HttpStatusCode.OK, page3); + this.context.Handler.AddResponse(HttpStatusCode.OK, page4); + + bool eventFiredForFetch = false; + bool eventFiredForCommit = false; + bool eventFiredForStart = false; + bool eventFiredForEnd = false; + long currentItemsFetched = 0; + long currentItemsCommited = 0; + + this.context.SynchronizationProgress += (sender, args) => + { + sender.Should().Be(this.context); + args.EntityType.Should().Be(); + args.QueryId.Should().Be("CommunityToolkit.Datasync.TestCommon.Databases.ClientMovie"); + args.Exception.Should().BeNull(); // We don't test exceptions here, so should always be null. + args.ServiceResponse.Should().BeNull(); + switch (args.EventType) + { + case SynchronizationEventType.ItemsFetched: + currentItemsFetched += 5; + args.ItemsProcessed.Should().Be(currentItemsFetched); + args.ItemsTotal.Should().Be(20); + eventFiredForFetch = true; + break; + case SynchronizationEventType.ItemsCommitted: + currentItemsCommited += 5; + args.ItemsProcessed.Should().Be(currentItemsCommited); + args.ItemsTotal.Should().Be(20); + eventFiredForCommit = true; + break; + case SynchronizationEventType.PullStarted: + eventFiredForStart.Should().BeFalse("PullStarted event should only fire once"); + eventFiredForStart = true; + break; + case SynchronizationEventType.PullEnded: + eventFiredForEnd.Should().BeFalse("PullEnded event should only fire once"); + eventFiredForEnd = true; + args.ItemsProcessed.Should().Be(20); + args.ItemsTotal.Should().Be(20); + break; + default: + Assert.Fail($"Invalid event type: {args.EventType}"); + break; + } + }; + + await this.context.Movies.PullAsync(); + + eventFiredForStart.Should().BeTrue(); + eventFiredForFetch.Should().BeTrue(); + eventFiredForCommit.Should().BeTrue(); + eventFiredForEnd.Should().BeTrue(); + currentItemsFetched.Should().Be(20); + currentItemsCommited.Should().Be(20); + } + + [Fact] + public async Task PullAsync_List_FailedRequest_SynchronizationEventWorks() + { + this.context.Handler.AddResponse(HttpStatusCode.BadRequest); + + bool eventFiredForStart = false; + bool eventFiredForEnd = false; + + this.context.SynchronizationProgress += (sender, args) => + { + sender.Should().Be(this.context); + args.EntityType.Should().Be(); + args.QueryId.Should().Be("CommunityToolkit.Datasync.TestCommon.Databases.ClientMovie"); + switch (args.EventType) + { + case SynchronizationEventType.PullStarted: + eventFiredForStart.Should().BeFalse("PullStarted event should only fire once"); + eventFiredForStart = true; + args.Exception.Should().BeNull(); + args.ServiceResponse.Should().BeNull(); + break; + case SynchronizationEventType.PullEnded: + eventFiredForEnd.Should().BeFalse("PullEnded event should only fire once"); + eventFiredForEnd = true; + args.Exception.Should().NotBeNull(); + args.Exception.Should().BeOfType(); + args.ServiceResponse.Should().NotBeNull(); + args.ServiceResponse.StatusCode.Should().Be(400); + break; + default: + Assert.Fail($"Unexpected event type: {args.EventType}"); + break; + } + }; + + PullResult pullResult = await this.context.PullAsync([typeof(ClientMovie)], new PullOptions()); + + eventFiredForStart.Should().BeTrue(); + eventFiredForEnd.Should().BeTrue(); + } + + [Fact] + public async Task SynchronizationProgress_Event_Works_For_Push() + { + // Add movies for testing + (MovieBase movie, string id)[] newMovies = + [ + (TestData.Movies.BlackPanther,Guid.NewGuid().ToString("N")), + (TestData.Movies.Dune,Guid.NewGuid().ToString("N")), + (TestData.Movies.DrNo ,Guid.NewGuid().ToString("N")), + ]; + + foreach ((MovieBase movie, string id) in newMovies) + { + this.context.Movies.Add(new(movie) { Id = id }); + ClientMovie responseMovie = new(movie) { Id = id, UpdatedAt = DateTimeOffset.UtcNow, Version = Guid.NewGuid().ToString() }; + this.context.Handler.AddResponseContent(DatasyncSerializer.Serialize(responseMovie), HttpStatusCode.Created); + this.context.SaveChanges(); + } + + bool eventFiredForItem = false; + bool eventFiredForStart = false; + bool eventFiredForEnd = false; + int[] itemsProcessedReported = new int[newMovies.Length]; // Due to multithreading, we can't guarantee the order of items processed. So register arrival of each separately. + + this.context.SynchronizationProgress += (sender, args) => + { + sender.Should().Be(this.context); + args.Exception.Should().BeNull(); + args.ServiceResponse.Should().BeNull(); + args.ItemsTotal.Should().Be(newMovies.Length); + switch (args.EventType) + { + case SynchronizationEventType.PushItem: + args.ItemsTotal.Should().Be(newMovies.Length); + args.ItemsProcessed.Should().BeInRange(1,newMovies.Length); + int prevProcessed = Interlocked.Exchange(ref itemsProcessedReported[args.ItemsProcessed-1], 1); + prevProcessed.Should().Be(0, "Each item should only be reported once"); + args.PushOperation.Should().NotBeNull(); + args.PushOperation.ItemId.Should().Be(newMovies[args.ItemsProcessed - 1].id); + eventFiredForItem = true; + break; + case SynchronizationEventType.PushStarted: + eventFiredForStart.Should().BeFalse("PushStarted event should only fire once"); + eventFiredForStart = true; + break; + case SynchronizationEventType.PushEnded: + eventFiredForEnd.Should().BeFalse("PushEnded event should only fire once"); + eventFiredForEnd = true; + args.ItemsProcessed.Should().Be(newMovies.Length); + itemsProcessedReported.Should().OnlyContain(x => x == 1, "All items should be reported as processed"); + args.PushOperation.Should().BeNull(); + break; + default: + Assert.Fail($"Invalid event type: {args.EventType}"); + break; + } + }; + + PushResult results = await this.context.Movies.PushAsync(); + + eventFiredForStart.Should().BeTrue(); + eventFiredForItem.Should().BeTrue(); + eventFiredForEnd.Should().BeTrue(); + } + + #endregion + public class NotOfflineDbContext : DbContext { public NotOfflineDbContext() : base() diff --git a/tests/CommunityToolkit.Datasync.Client.Test/Serialization/DateTimeConverter_Tests.cs b/tests/CommunityToolkit.Datasync.Client.Test/Serialization/DateTimeConverter_Tests.cs index a7f05332..c33737c1 100644 --- a/tests/CommunityToolkit.Datasync.Client.Test/Serialization/DateTimeConverter_Tests.cs +++ b/tests/CommunityToolkit.Datasync.Client.Test/Serialization/DateTimeConverter_Tests.cs @@ -75,6 +75,52 @@ public void Converter_HandlesNullDateInInput(string culture) }); } + [Theory] + [MemberData(nameof(Locales))] + public void Converter_Roundtrip_Consistent_Default(string culture) + { + DateTime value = default; + + TestWithCulture(culture, () => + { + Entity entity = new() { UpdatedAt = value }; + string serialized = JsonSerializer.Serialize(entity, SerializerOptions); + Entity deserialized = JsonSerializer.Deserialize(serialized, SerializerOptions); + Assert.Equal(deserialized.UpdatedAt, value); + }); + } + + [Theory] + [MemberData(nameof(Locales))] + public void Converter_Roundtrip_Consistent_Local(string culture) + { + DateTime value = new(2021, 8, 21, 12, 30, 15, 123, DateTimeKind.Local); + + TestWithCulture(culture, () => + { + Entity entity = new() { UpdatedAt = value }; + string serialized = JsonSerializer.Serialize(entity, SerializerOptions); + Entity deserialized = JsonSerializer.Deserialize(serialized, SerializerOptions); + Assert.Equal(deserialized.UpdatedAt, value); + }); + } + + [Theory] + [MemberData(nameof(Locales))] + public void Converter_Roundtrip_Consistent_Utc(string culture) + { + DateTime value = new(2021, 8, 21, 14, 35, 20, 12, DateTimeKind.Utc); + + TestWithCulture(culture, () => + { + Entity entity = new() { UpdatedAt = value }; + string serialized = JsonSerializer.Serialize(entity, SerializerOptions); + Entity deserialized = JsonSerializer.Deserialize(serialized, SerializerOptions); + // Roundtrip will convert to local time, DateTimeKind is not preserved. + Assert.Equal(deserialized.UpdatedAt, value.ToLocalTime()); + }); + } + #region Models public class Entity { diff --git a/tests/CommunityToolkit.Datasync.Client.Test/Threading/QueueHandler_Tests.cs b/tests/CommunityToolkit.Datasync.Client.Test/Threading/QueueHandler_Tests.cs index b38d04a6..b408f972 100644 --- a/tests/CommunityToolkit.Datasync.Client.Test/Threading/QueueHandler_Tests.cs +++ b/tests/CommunityToolkit.Datasync.Client.Test/Threading/QueueHandler_Tests.cs @@ -23,7 +23,7 @@ public async Task QueueHandler_WithThreads_Enqueue(int nThreads) { accId.Enqueue(el); accTh.Enqueue(Environment.CurrentManagedThreadId); - Thread.Sleep(1000); + Thread.Sleep(2000); return Task.CompletedTask; }); DateTimeOffset startTime = DateTimeOffset.Now; @@ -49,7 +49,7 @@ public async Task QueueHandler_WithThreads_Enqueue(int nThreads) accTh.AsEnumerable().Distinct().Should().HaveCount(nThreads); // This just makes sure that the amount of time is "of the right order of magnitude" since CI systems // are notoriously bad at correct timings. We just don't want it to be 10x the expected time. - (endTime - startTime).TotalSeconds.Should().BeLessThanOrEqualTo((nElements / nThreads) + 5); + (endTime - startTime).TotalSeconds.Should().BeLessThanOrEqualTo(2 * (nElements / nThreads) + 5); } [Theory(Timeout = 30000)] diff --git a/tests/CommunityToolkit.Datasync.Server.Abstractions.Test/Json/DateTimeConverter_Tests.cs b/tests/CommunityToolkit.Datasync.Server.Abstractions.Test/Json/DateTimeConverter_Tests.cs index e40b6814..5bb68230 100644 --- a/tests/CommunityToolkit.Datasync.Server.Abstractions.Test/Json/DateTimeConverter_Tests.cs +++ b/tests/CommunityToolkit.Datasync.Server.Abstractions.Test/Json/DateTimeConverter_Tests.cs @@ -69,6 +69,52 @@ public void Converter_ThrowsOnNullDateInInput() act.Should().Throw(); } + [Theory] + [MemberData(nameof(Locales))] + public void Converter_Roundtrip_Consistent_Default(string culture) + { + DateTime value = default; + + TestWithCulture(culture, () => + { + Entity entity = new() { UpdatedAt = value }; + string serialized = JsonSerializer.Serialize(entity, SerializerOptions); + Entity deserialized = JsonSerializer.Deserialize(serialized, SerializerOptions); + Assert.Equal(deserialized.UpdatedAt, value); + }); + } + + [Theory] + [MemberData(nameof(Locales))] + public void Converter_Roundtrip_Consistent_Local(string culture) + { + DateTime value = new(2021, 8, 21, 12, 30, 15, 123, DateTimeKind.Local); + + TestWithCulture(culture, () => + { + Entity entity = new() { UpdatedAt = value }; + string serialized = JsonSerializer.Serialize(entity, SerializerOptions); + Entity deserialized = JsonSerializer.Deserialize(serialized, SerializerOptions); + Assert.Equal(deserialized.UpdatedAt, value); + }); + } + + [Theory] + [MemberData(nameof(Locales))] + public void Converter_Roundtrip_Consistent_Utc(string culture) + { + DateTime value = new(2021, 8, 21, 14, 35, 20, 12, DateTimeKind.Utc); + + TestWithCulture(culture, () => + { + Entity entity = new() { UpdatedAt = value }; + string serialized = JsonSerializer.Serialize(entity, SerializerOptions); + Entity deserialized = JsonSerializer.Deserialize(serialized, SerializerOptions); + // Roundtrip will convert to local time, DateTimeKind is not preserved. + Assert.Equal(deserialized.UpdatedAt, value.ToLocalTime()); + }); + } + #region Models public class Entity { diff --git a/tests/CommunityToolkit.Datasync.TestCommon/TestData/Movies.cs b/tests/CommunityToolkit.Datasync.TestCommon/TestData/Movies.cs index c6b136c5..f39e65d7 100644 --- a/tests/CommunityToolkit.Datasync.TestCommon/TestData/Movies.cs +++ b/tests/CommunityToolkit.Datasync.TestCommon/TestData/Movies.cs @@ -20,6 +20,26 @@ public static class Movies Year = 2018 }; + public static readonly MovieBase Dune = new() + { + BestPictureWinner = false, + Duration = 155, + Rating = MovieRating.PG13, + ReleaseDate = new DateOnly(2021, 10, 22), + Title = "Dune", + Year = 2021 + }; + + public static readonly MovieBase DrNo = new() + { + BestPictureWinner = false, + Duration = 110, + Rating = MovieRating.PG, + ReleaseDate = new DateOnly(1962, 5, 8), + Title = "Dr. No", + Year = 1962 + }; + /// /// Counts the number of items in the list that match the predicate. ///