Random wanderings through Microsoft Azure esp. PaaS plumbing, the IoT bits, AI on Micro controllers, AI on Edge Devices, .NET nanoFramework, .NET Core on *nix and ML.NET+ONNX
When writing payload formatters, the Visual Studio 2022 syntax highlighting is really useful for spotting syntax errors and with the “Downlink Payload Formatter Test Harness” application payload formatters can be executed and debugged before deployment with Azure Storage Explorer.
namespace PayloadFormattercode
{
using System.Collections.Generic;
using Newtonsoft.Json.Linq;
public interface IFormatterUplink
{
public JObject Evaluate(IDictionary<string, string> properties, string application, string terminalId, DateTime timestamp, JObject payloadJson, string payloadText, byte[] payloadBytes);
}
..
}
The myriota uplink packet payload is only 20 bytes long so it is very unlikely that the payloadText and payloadJSON parameters would ever be populated so I removed them from the interface. The uplink message handler interface has been updated and the code to convert (if possible) the payload bytes to text and then to JSON deleted.
namespace PayloadFormatter
{
using System.Collections.Generic;
using Newtonsoft.Json.Linq;
public interface IFormatterUplink
{
public JObject Evaluate(IDictionary<string, string> properties, string application, string terminalId, DateTime timestamp, byte[] payloadBytes);
}
...
}
All of the sample payload formatters have been updated to reflect the updated parameters. The sample Tracker.cs payload formatter unpacks a message from Myriota Dev Kit running the Tracker sample and returns an Azure IoT Central compatible location telemetry payload.
/*
myriota tracker payload format
typedef struct {
uint16_t sequence_number;
int32_t latitude; // scaled by 1e7, e.g. -891234567 (south 89.1234567)
int32_t longitude; // scaled by 1e7, e.g. 1791234567 (east 179.1234567)
uint32_t time; // epoch timestamp of last fix
} __attribute__((packed)) tracker_message;
*/
using System;
using System.Collections.Generic;
using System.Globalization;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
public class FormatterUplink : PayloadFormatter.IFormatterUplink
{
public JObject Evaluate(IDictionary<string, string> properties, string application, string terminalId, DateTime timestamp, byte[] payloadBytes)
{
JObject telemetryEvent = new JObject();
if (payloadBytes is null)
{
return telemetryEvent;
}
telemetryEvent.Add("SequenceNumber", BitConverter.ToUInt16(payloadBytes));
JObject location = new JObject();
double latitude = BitConverter.ToInt32(payloadBytes, 2) / 10000000.0;
location.Add("lat", latitude);
double longitude = BitConverter.ToInt32(payloadBytes, 6) / 10000000.0;
location.Add("lon", longitude);
location.Add("alt", 0);
telemetryEvent.Add("DeviceLocation", location);
UInt32 packetimestamp = BitConverter.ToUInt32(payloadBytes, 10);
DateTime fixAtUtc = DateTime.UnixEpoch.AddSeconds(packetimestamp);
telemetryEvent.Add("FixAtUtc", fixAtUtc);
properties.Add("iothub-creation-time-utc", fixAtUtc.ToString("s", CultureInfo.InvariantCulture));
return telemetryEvent;
}
}
If a message payload is text or JSON it can still be converted in the payload formatter.
public static void Main(string[] args)
{
var builder = WebApplication.CreateBuilder(args);
// Add services to the container.
builder.Services.AddApplicationInsightsTelemetry();
// Add services to the container.
builder.Services.AddTransient<IDapperContext>(s => new DapperContext(builder.Configuration));
builder.Services.AddControllers();
builder.Services.AddSingleton<IConnectionMultiplexer>(s => ConnectionMultiplexer.Connect(builder.Configuration.GetConnectionString("Redis")));
var app = builder.Build();
// Configure the HTTP request pipeline.
app.UseHttpsRedirection();
app.MapControllers();
app.Run();
}
I trialed the initial versions of my Redis project with Memurai on my development machine, then configured an Azure Cache for Redis. I then load tested the project with several Azure AppService client and there was a significant improvement in response time.
[ApiController]
[Route("api/[controller]")]
public class StockItemsController : ControllerBase
{
private const int StockItemSearchMaximumRowsToReturn = 15;
private readonly TimeSpan StockItemListExpiration = new TimeSpan(0, 5, 0);
private const string sqlCommandText = @"SELECT [StockItemID] as ""ID"", [StockItemName] as ""Name"", [RecommendedRetailPrice], [TaxRate] FROM [Warehouse].[StockItems]";
//private const string sqlCommandText = @"SELECT [StockItemID] as ""ID"", [StockItemName] as ""Name"", [RecommendedRetailPrice], [TaxRate] FROM [Warehouse].[StockItems]; WAITFOR DELAY '00:00:02'";
private readonly ILogger<StockItemsController> logger;
private readonly IDbConnection dbConnection;
private readonly IDatabase redisCache;
public StockItemsController(ILogger<StockItemsController> logger, IDapperContext dapperContext, IConnectionMultiplexer connectionMultiplexer)
{
this.logger = logger;
this.dbConnection = dapperContext.ConnectionCreate();
this.redisCache = connectionMultiplexer.GetDatabase();
}
[HttpGet]
public async Task<ActionResult<IEnumerable<Model.StockItemListDtoV1>>> Get()
{
var cached = await redisCache.StringGetAsync("StockItems");
if (cached.HasValue)
{
return Content(cached, "application/json");
}
var stockItems = await dbConnection.QueryWithRetryAsync<Model.StockItemListDtoV1>(sql: sqlCommandText, commandType: CommandType.Text);
#if SERIALISER_SOURCE_GENERATION
string json = JsonSerializer.Serialize(stockItems, typeof(List<Model.StockItemListDtoV1>), Model.StockItemListDtoV1GenerationContext.Default);
#else
string json = JsonSerializer.Serialize(stockItems);
#endif
await redisCache.StringSetAsync("StockItems", json, expiry: StockItemListExpiration);
return Content(json, "application/json");
}
...
[HttpDelete()]
public async Task<ActionResult> ListCacheDelete()
{
await redisCache.KeyDeleteAsync("StockItems");
logger.LogInformation("StockItems list removed");
return this.Ok();
}
}
public class StockItemListDtoV1
{
public int Id { get; set; }
public string Name { get; set; }
public decimal RecommendedRetailPrice { get; set; }
public decimal TaxRate { get; set; }
}
[JsonSourceGenerationOptions(PropertyNamingPolicy = JsonKnownNamingPolicy.CamelCase)]
[JsonSerializable(typeof(List<StockItemListDtoV1>))]
public partial class StockItemListDtoV1GenerationContext : JsonSerializerContext
{
}
The cost of constructing the Serialiser may be higher, but the cost of performing serialisation with it is much smaller.
I used Telerik Fiddler to empty the cache then load the StockItems list 10 times (more tests would improve the quality of the results). The first trial was with the “conventional” serialiser
The average time for the conventional serialiser was 0.028562 seconds
The average time for the generated version was 0.030546 seconds. But, if the initial compilation step was ignored the average duration dropped to 0.000223 seconds a significant improvement.
I have found that putting the C/C++ structure for the uplink payload at the top of the convertor really helpful.
/*
myriota tracker payload format
typedef struct {
uint16_t sequence_number;
int32_t latitude; // scaled by 1e7, e.g. -891234567 (south 89.1234567)
int32_t longitude; // scaled by 1e7, e.g. 1791234567 (east 179.1234567)
uint32_t time; // epoch timestamp of last fix
} __attribute__((packed)) tracker_message;
*/
using System;
using System.Collections.Generic;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
public class FormatterUplink : PayloadFormatter.IFormatterUplink
{
public JObject Evaluate(IDictionary<string, string> properties, string application, string terminalId, DateTime timestamp, JObject payloadJson, string payloadText, byte[] payloadBytes)
{
JObject telemetryEvent = new JObject();
telemetryEvent.Add("SequenceNumber", BitConverter.ToUInt16(payloadBytes));
double latitude = BitConverter.ToInt32(payloadBytes, 2) / 10000000.0;
telemetryEvent.Add("Latitude", latitude);
double longitude = BitConverter.ToInt32(payloadBytes, 6) / 10000000.0;
telemetryEvent.Add("Longitude", longitude);
UInt32 packetimestamp = BitConverter.ToUInt32(payloadBytes, 10);
DateTime lastFix = DateTime.UnixEpoch.AddSeconds(packetimestamp);
properties.Add("iothub-creation-time-utc", lastFix .ToString("s", CultureInfo.InvariantCulture));
return telemetryEvent;
}
}
The sample Tracker.cs payload formatter unpacks a message from Myriota Dev Kit running the Tracker sample and returns an Azure IoT Central compatible location telemetry payload.
BEWARE : I think the Azure IoT Central Position lat, lon + alt values might be case sensitive.
Azure IoT Explorer displaying Tracker.cs payload formatter output
Myriota Destination configuration application name URL configuration
namespace devMobile.IoT.MyriotaAzureIoTConnector.Connector.Models
{
public class UplinkPayloadQueueDto
{
public string Application { get; set; }
public string EndpointRef { get; set; }
public DateTime PayloadReceivedAtUtc { get; set; }
public DateTime PayloadArrivedAtUtc { get; set; }
public QueueData Data { get; set; }
public string Id { get; set; }
public Uri CertificateUrl { get; set; }
public string Signature { get; set; }
}
public class QueueData
{
public List<QueuePacket> Packets { get; set; }
}
public class QueuePacket
{
public string TerminalId { get; set; }
public DateTime Timestamp { get; set; }
public string Value { get; set; }
}
}
A pair of Azure Blob Storage containers are used to store the uplink/downlink (coming soon) formatter files. The compiled payload formatters are cached with Uplink/Downlink + Application (from the UplinkPayloadQueueDto) as the key.
Azure IoT Storage Explorer uplink payload formatters
The default uplink and downlink formatters used when there is no payload formatter for “Application” are configured in the application settings.
namespace devMobile.IoT.myriotaAzureIoTConnector.myriota.UplinkWebhook.Models
{
public class UplinkPayloadWebDto
{
public string EndpointRef { get; set; }
public long Timestamp { get; set; }
public string Data { get; set; } // Embedded JSON ?
public string Id { get; set; }
public string CertificateUrl { get; set; }
public string Signature { get; set; }
}
}
The UplinkWebhook controller “automagically” deserialises the message, then in code the embedded JSON is deserialised and “unpacked”, finally the processed message is inserted into an Azure Storage queue.
For a couple of weeks Myriota Developer Toolkit has been sitting under my desk and today I got some time to setup a device, register it, then upload some data.
ASP.NET Core identityRoles can also have individual claims but with the authorisation model of the legacy application I work on this functionality hasn’t been useful. We use role based authentication with a few user claims to minimise the size of our Java Web Tokens(JWT)
Visual Studio 2022 ASP.NET Core Web Application template options
I tried to minimise the modifications to the application. I added EnableRetryOnFailure, some changes to names spaces etc. I also added support for email address confirmation with SendGrid and “authentication” link to the navabar in _Layout.cshtml.
While exploring some of the functionality of MiniProfiler there were some 3rd party examples which caught my attention.
using (SqlConnection connection = new SqlConnection(@"Data Source=...; Initial Catalog=SyncDB; Trusted_Connection=Yes"))
{
using (ProfiledDbConnection profiledDbConnection = new ProfiledDbConnection(connection, MiniProfiler.Current))
{
if (profiledDbConnection.State != System.Data.ConnectionState.Open)
profiledDbConnection.Open();
using (SqlCommand command = new SqlCommand("Select * From Authors", connection))
{
using (ProfiledDbCommand profiledDbCommand = new ProfiledDbCommand(command, connection, MiniProfiler.Current))
{
var data = profiledDbCommand.ExecuteReader();
//Write code here to populate the list of Authors
}
}
}
“Inspired” by code like this my first attempt to retrieve a list of stock items didn’t look right.
[HttpGet("AdoProfiledOtt")]
public async Task<ActionResult<IEnumerable<Model.StockItemListDtoV1>>> GetAdoProfiledOtt()
{
List<Model.StockItemListDtoV1> response = new List<Model.StockItemListDtoV1>();
using (SqlConnection connection = new SqlConnection(configuration.GetConnectionString("default")))
{
using (ProfiledDbConnection profiledDbConnection = new ProfiledDbConnection(connection, MiniProfiler.Current))
{
await profiledDbConnection.OpenAsync();
using (SqlCommand command = new SqlCommand(sqlCommandText, connection))
{
using (ProfiledDbCommand profiledDbCommand = new ProfiledDbCommand(command, profiledDbConnection, MiniProfiler.Current))
{
using (SqlDataReader reader = await command.ExecuteReaderAsync())
{
using (ProfiledDbDataReader profiledDbDataReader = new ProfiledDbDataReader(reader, MiniProfiler.Current))
{
var rowParser = profiledDbDataReader.GetRowParser<Model.StockItemListDtoV1>();
while (await profiledDbDataReader.ReadAsync())
{
response.Add(rowParser(profiledDbDataReader));
}
}
}
}
}
await profiledDbConnection.CloseAsync();
}
}
}
/// <summary>
/// Initializes a new instance of the <see cref="ProfiledDbDataReader"/> class (with <see cref="CommandBehavior.Default"/>).
/// </summary>
/// <param name="reader">The reader.</param>
/// <param name="profiler">The profiler.</param>
public ProfiledDbDataReader(DbDataReader reader, IDbProfiler profiler) : this(reader, CommandBehavior.Default, profiler) { }
/// <summary>
/// Initializes a new instance of the <see cref="ProfiledDbDataReader"/> class.
/// </summary>
/// <param name="reader">The reader.</param>
/// <param name="behavior">The behavior specified during command execution.</param>
/// <param name="profiler">The profiler.</param>
public ProfiledDbDataReader(DbDataReader reader, CommandBehavior behavior, IDbProfiler? profiler)
{
WrappedReader = reader;
Behavior = behavior;
_profiler = profiler;
}
...
/// <summary>
/// The <see cref="DbDataReader"/> that is being used.
/// </summary>
public DbDataReader WrappedReader { get; }
/// <inheritdoc cref="DbDataReader.Dispose(bool)"/>
protected override void Dispose(bool disposing)
{
// reader can be null when we're not profiling, but we've inherited from ProfiledDbCommand and are returning a
// an unwrapped reader from the base command
WrappedReader?.Dispose();
base.Dispose(disposing);
}
Another “using” not required as ProfiledDbDataReader “automagically” disposes the SqlDataReader. This was my final version of profiling the System.Data.SqlClient code to retrieve a list of stock items.
[HttpGet("AdoProfiled")]
public async Task<ActionResult<IEnumerable<Model.StockItemListDtoV1>>> GetProfiledAdo()
{
List<Model.StockItemListDtoV1> response = new List<Model.StockItemListDtoV1>();
using (ProfiledDbConnection profiledDbConnection = new ProfiledDbConnection((SqlConnection)dapperContext.ConnectionCreate(), MiniProfiler.Current))
{
await profiledDbConnection.OpenAsync();
using (ProfiledDbCommand profiledDbCommand = new ProfiledDbCommand(new SqlCommand(sqlCommandText), profiledDbConnection, MiniProfiler.Current))
{
DbDataReader reader = await profiledDbCommand.ExecuteReaderAsync();
using (ProfiledDbDataReader profiledDbDataReader = new ProfiledDbDataReader(reader, MiniProfiler.Current))
{
var rowParser = profiledDbDataReader.GetRowParser<Model.StockItemListDtoV1>();
while (await profiledDbDataReader.ReadAsync())
{
response.Add(rowParser(profiledDbDataReader));
}
}
}
}
return this.Ok(response);
}
The profileDbDataReader.cs implementation was “sparse” and when loading a longer list of stock items there were some ReadAsync calls which took a bit longer.
/// <summary>
/// The profiled database data reader.
/// </summary>
public class ProfiledDbDataReader : DbDataReader
{
private readonly IDbProfiler? _profiler;
/// <summary>
/// Initializes a new instance of the <see cref="ProfiledDbDataReader"/> class (with <see cref="CommandBehavior.Default"/>).
/// </summary>
/// <param name="reader">The reader.</param>
/// <param name="profiler">The profiler.</param>
public ProfiledDbDataReader(DbDataReader reader, IDbProfiler profiler) : this(reader, CommandBehavior.Default, profiler) { }
/// <summary>
/// Initializes a new instance of the <see cref="ProfiledDbDataReader"/> class.
/// </summary>
/// <param name="reader">The reader.</param>
/// <param name="behavior">The behavior specified during command execution.</param>
/// <param name="profiler">The profiler.</param>
public ProfiledDbDataReader(DbDataReader reader, CommandBehavior behavior, IDbProfiler? profiler)
{
WrappedReader = reader;
Behavior = behavior;
_profiler = profiler;
}
/// <summary>Gets the behavior specified during command execution.</summary>
public CommandBehavior Behavior { get; }
/// <inheritdoc cref="DbDataReader.Depth"/>
public override int Depth => WrappedReader.Depth;
/// <inheritdoc cref="DbDataReader.FieldCount"/>
public override int FieldCount => WrappedReader.FieldCount;
/// <inheritdoc cref="DbDataReader.HasRows"/>
public override bool HasRows => WrappedReader.HasRows;
/// <inheritdoc cref="DbDataReader.IsClosed"/>
public override bool IsClosed => WrappedReader.IsClosed;
/// <inheritdoc cref="DbDataReader.RecordsAffected"/>
public override int RecordsAffected => WrappedReader.RecordsAffected;
/// <summary>
/// The <see cref="DbDataReader"/> that is being used.
/// </summary>
public DbDataReader WrappedReader { get; }
/// <inheritdoc cref="DbDataReader.this[string]"/>
public override object this[string name] => WrappedReader[name];
/// <inheritdoc cref="DbDataReader.this[int]"/>
public override object this[int ordinal] => WrappedReader[ordinal];
...
/// <inheritdoc cref="DbDataReader.GetString(int)"/>
public override string GetString(int ordinal) => WrappedReader.GetString(ordinal);
/// <inheritdoc cref="DbDataReader.GetValue(int)"/>
public override object GetValue(int ordinal) => WrappedReader.GetValue(ordinal);
/// <inheritdoc cref="DbDataReader.GetValues(object[])"/>
public override int GetValues(object[] values) => WrappedReader.GetValues(values);
/// <inheritdoc cref="DbDataReader.IsDBNull(int)"/>
public override bool IsDBNull(int ordinal) => WrappedReader.IsDBNull(ordinal);
/// <inheritdoc cref="DbDataReader.IsDBNullAsync(int, CancellationToken)"/>
public override Task<bool> IsDBNullAsync(int ordinal, CancellationToken cancellationToken) => WrappedReader.IsDBNullAsync(ordinal, cancellationToken);
/// <inheritdoc cref="DbDataReader.NextResult()"/>
public override bool NextResult() => WrappedReader.NextResult();
/// <inheritdoc cref="DbDataReader.NextResultAsync(CancellationToken)"/>
public override Task<bool> NextResultAsync(CancellationToken cancellationToken) => WrappedReader.NextResultAsync(cancellationToken);
/// <inheritdoc cref="DbDataReader.Read()"/>
public override bool Read() => WrappedReader.Read();
/// <inheritdoc cref="DbDataReader.ReadAsync(CancellationToken)"/>
public override Task<bool> ReadAsync(CancellationToken cancellationToken) => WrappedReader.ReadAsync(cancellationToken);
/// <inheritdoc cref="DbDataReader.Close()"/>
public override void Close()
{
// reader can be null when we're not profiling, but we've inherited from ProfiledDbCommand and are returning a
// an unwrapped reader from the base command
WrappedReader?.Close();
_profiler?.ReaderFinish(this);
}
/// <inheritdoc cref="DbDataReader.GetSchemaTable()"/>
public override DataTable? GetSchemaTable() => WrappedReader.GetSchemaTable();
/// <inheritdoc cref="DbDataReader.Dispose(bool)"/>
protected override void Dispose(bool disposing)
{
// reader can be null when we're not profiling, but we've inherited from ProfiledDbCommand and are returning a
// an unwrapped reader from the base command
WrappedReader?.Dispose();
base.Dispose(disposing);
}
}
In the [HttpGet(“DapperProfiledQueryMultipleStep”)] method I wrapped ReadAsync and could see in the profiling that every so often a call did take significantly longer.
using (MiniProfiler.Current.Step("invoiceSummaryLine.ReadAsync"))
{
response.InvoiceLines = await invoiceSummary.ReadAsync<Model.InvoiceLineSummaryListDtoV1>();
}
I did consider modifying profileDbDataReader.cs to add some instrumentation to the Read… and Get… methods but, the authors of miniprofiler are way way smarter than me so there must be a reason why they didn’t.
After several unsuccessful attempts at updating the NuGets packages I started again from scratch
The code wouldn’t compile so I started fixing issues (The first couple of attempts were very “hacky”). The UseDatabaseErrorPage method was from EF Core so it was commented out. The UseBrowserLink method was from the Browser Link support which I decided not to use etc.
...
namespace CustomIdentityProviderSample
{
public class Startup
{
public Startup(IHostingEnvironment env)
{
var builder = new ConfigurationBuilder()
.SetBasePath(env.ContentRootPath)
.AddJsonFile("appsettings.json", optional: false, reloadOnChange: true)
.AddJsonFile($"appsettings.{env.EnvironmentName}.json", optional: true);
if (env.IsDevelopment())
{
// For more details on using the user secret store see https://go.microsoft.com/fwlink/?LinkID=532709
builder.AddUserSecrets<Startup>();
}
builder.AddEnvironmentVariables();
Configuration = builder.Build();
}
public IConfigurationRoot Configuration { get; }
// This method gets called by the runtime. Use this method to add services to the container.
public void ConfigureServices(IServiceCollection services)
{
// Add identity types
services.AddIdentity<ApplicationUser, ApplicationRole>()
.AddDefaultTokenProviders();
// Identity Services
services.AddTransient<IUserStore<ApplicationUser>, CustomUserStore>();
services.AddTransient<IRoleStore<ApplicationRole>, CustomRoleStore>();
string connectionString = Configuration.GetConnectionString("DefaultConnection");
services.AddTransient<SqlConnection>(e => new SqlConnection(connectionString));
services.AddTransient<DapperUsersTable>();
services.AddMvc();
// Add application services.
services.AddTransient<IEmailSender, AuthMessageSender>();
services.AddTransient<ISmsSender, AuthMessageSender>();
}
// This method gets called by the runtime. Use this method to configure the HTTP request pipeline.
public void Configure(IApplicationBuilder app, IHostingEnvironment env, ILoggerFactory loggerFactory)
{
// loggerFactory.AddConsole(Configuration.GetSection("Logging"));
// loggerFactory.AddDebug();
if (env.IsDevelopment())
{
app.UseDeveloperExceptionPage();
// app.UseDatabaseErrorPage(); BHL
// app.UseBrowserLink(); BHL
}
else
{
app.UseExceptionHandler("/Home/Error");
}
app.UseStaticFiles();
app.UseRouting(); // BHL
// app.UseIdentity(); BHL
app.UseAuthentication();
app.UseAuthorization();
// Add external authentication middleware below. To configure them please see https://go.microsoft.com/fwlink/?LinkID=532715
app.UseMvc(routes =>
{
routes.MapRoute(
name: "default",
template: "{controller=Home}/{action=Index}/{id?}");
});
}
}
}
using Microsoft.AspNetCore.Identity;
using System;
using System.Threading.Tasks;
using System.Threading;
using System.Collections.Generic;
namespace CustomIdentityProviderSample.CustomProvider
{
/// <summary>
/// This store is only partially implemented. It supports user creation and find methods.
/// </summary>
public class CustomUserStore : IUserStore<ApplicationUser>,
IUserPasswordStore<ApplicationUser>,
IUserPhoneNumberStore<ApplicationUser>,
IUserTwoFactorStore<ApplicationUser>,
IUserLoginStore<ApplicationUser>
{
private readonly DapperUsersTable _usersTable;
public CustomUserStore(DapperUsersTable usersTable)
{
_usersTable = usersTable;
}
public Task AddLoginAsync(ApplicationUser user, UserLoginInfo login, CancellationToken cancellationToken)
{
throw new NotImplementedException();
}
public async Task<IdentityResult> CreateAsync(ApplicationUser user,
CancellationToken cancellationToken = default(CancellationToken))
{
cancellationToken.ThrowIfCancellationRequested();
if (user == null) throw new ArgumentNullException(nameof(user));
return await _usersTable.CreateAsync(user);
}
public async Task<IdentityResult> DeleteAsync(ApplicationUser user,
CancellationToken cancellationToken = default(CancellationToken))
{
cancellationToken.ThrowIfCancellationRequested();
if (user == null) throw new ArgumentNullException(nameof(user));
return await _usersTable.DeleteAsync(user);
}
public async Task<ApplicationUser> FindByIdAsync(string userId,
CancellationToken cancellationToken = default(CancellationToken))
{
cancellationToken.ThrowIfCancellationRequested();
if (userId == null) throw new ArgumentNullException(nameof(userId));
Guid idGuid;
if(!Guid.TryParse(userId, out idGuid))
{
throw new ArgumentException("Not a valid Guid id", nameof(userId));
}
return await _usersTable.FindByIdAsync(idGuid);
}
public Task<ApplicationUser> FindByLoginAsync(string loginProvider, string providerKey, CancellationToken cancellationToken)
{
throw new NotImplementedException();
}
public async Task<ApplicationUser> FindByNameAsync(string userName,
CancellationToken cancellationToken = default(CancellationToken))
{
cancellationToken.ThrowIfCancellationRequested();
if (userName == null) throw new ArgumentNullException(nameof(userName));
return await _usersTable.FindByNameAsync(userName);
}
public async Task<IList<UserLoginInfo>> GetLoginsAsync(ApplicationUser user, CancellationToken cancellationToken)
{
cancellationToken.ThrowIfCancellationRequested();
if (user == null) throw new ArgumentNullException(nameof(user));
return await _usersTable.GetLoginsAsync(user.Id);
}
...
}
This also required extensions to the DapperUsersTable.cs.
public async Task<IdentityResult> UpdateAsync(ApplicationUser user)
{
string sql = "UPDATE dbo.AspNetUsers " + // BHL
"SET [Id] = @Id, [Email]= @Email, [EmailConfirmed] = @EmailConfirmed, [PasswordHash] = @PasswordHash, [UserName] = @UserName " +
"WHERE Id = @Id;";
int rows = await _connection.ExecuteAsync(sql, new { user.Id, user.Email, user.EmailConfirmed, user.PasswordHash, user.UserName });
if (rows == 1)
{
return IdentityResult.Success;
}
return IdentityResult.Failed(new IdentityError { Description = $"Could not update user {user.Email}." });
}
After many failed attempts my very nasty Custom Storage Provider refresh works (with many warnings and messages). I now understand how they work well enough that I am going to start again from scratch.
The note on the wiki page“For LazyCache v2+ users, you should consider switching away from LazyCache to IDistributedCache. More information at #59“ caught my attention.
I have written other posts about caching Dapper query results with the Dapper Extension Library which worked well but had some configuration limitations. I also have posts about off-loading read-only workloads with Azure Active geo-replication or SQL Data Sync for Azure, which worked well in some scenarios but had limitations (performance and operational costs).
I explored the in-memory implementation (AddDistributedMemoryCache) on my development machine and found “tinkering” with the configuration options had little impact on the performance of my trivial sample application.
CREATE TABLE [dbo].[StockItemsCache](
[Id] [nvarchar](449) NOT NULL,
[Value] [varbinary](max) NOT NULL,
[ExpiresAtTime] [datetimeoffset](7) NOT NULL,
[SlidingExpirationInSeconds] [bigint] NULL,
[AbsoluteExpiration] [datetimeoffset](7) NULL,
PRIMARY KEY CLUSTERED
(
[Id] ASC
)WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON, OPTIMIZE_FOR_SEQUENTIAL_KEY = OFF) ON [PRIMARY]
) ON [PRIMARY] TEXTIMAGE_ON [PRIMARY]
GO
The table used to store the data wasn’t very complex and I could view the data associated with a cache key in SQL Server Mangement studio.
SQL Server Managment Studio displaying cache table contents
One of the applications I work on uses a complex SQL Server Stored procedure to load reference data (updated daily) and being able to purge the cache at the end of this process like this might be useful. For a geographically distributed application putting the Azure SQL instance “closer” to the application’s users might be worth considering.
[HttpGet]
public async Task<ActionResult<IEnumerable<Model.StockItemListDtoV1>>> Get()
{
var utcNow = DateTime.UtcNow;
var cached = await distributedCache.GetAsync("StockItems");
if (cached != null)
{
#if SERIALISATION_JSON
return this.Ok(JsonSerializer.Deserialize<List<Model.StockItemListDtoV1>>(cached));
#endif
#if SERIALISATION_MESSAGE_PACK
return this.Ok(MessagePackSerializer.Deserialize<List<Model.StockItemListDtoV1>>(cached));
#endif
}
var stockItems = await dbConnection.QueryWithRetryAsync<Model.StockItemListDtoV1>(sql: sqlCommandText, commandType: CommandType.Text);
#if SERIALISATION_JSON
await distributedCache.SetAsync("StockItems", JsonSerializer.SerializeToUtf8Bytes(stockItems), new DistributedCacheEntryOptions()
#endif
#if SERIALISATION_MESSAGE_PACK
await distributedCache.SetAsync("StockItems", MessagePackSerializer.Serialize(stockItems), new DistributedCacheEntryOptions()
#endif
{
AbsoluteExpiration = new DateTime(utcNow.Year, utcNow.Month, DateTime.DaysInMonth(utcNow.Year, utcNow.Month), StockItemListAbsoluteExpiration.Hours, StockItemListAbsoluteExpiration.Minutes, StockItemListAbsoluteExpiration.Seconds)
});
return this.Ok(stockItems);
}
[HttpGet("NoLoad")]
public async Task<ActionResult<IEnumerable<Model.StockItemListDtoV1>>> GetNoLoad()
{
var cached = await distributedCache.GetAsync("StockItems");
if (cached == null)
{
return this.NoContent();
}
#if SERIALISATION_JSON
return this.Ok(JsonSerializer.Deserialize<List<Model.StockItemListDtoV1>>(cached));
#endif
#if SERIALISATION_MESSAGE_PACK
return this.Ok(MessagePackSerializer.Deserialize<List<Model.StockItemListDtoV1>>(cached));
#endif
}
In my test environment the JSON payload for a list of stock items was a bit “chunky” at 25K bytes, so I added compile time configurable support for the MessagePack library. This significantly reduced the size of the payload LZ4Block (5K bytes) and LZ4BlockArray (5K2 bytes) which should reduce network traffic.
Assuming the overheads of JSON vs. MessagePack serialisation are similar and the much smaller MessagePack library payload I would most probably use MessagePack and LZ4BlockArray (For improved compatibility with other implementations) compression.